code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from contextlib import contextmanager
from pathlib import Path
import pytest
from dagos.core.configuration import ConfigurationScanner
from dagos.core.configuration.configuration_domain import DefaultPlaceholder
from dagos.exceptions import ValidationException
@contextmanager
def does_not_raise():
yield
@pytest.mark.parametrize(
"file,expectation",
[
("config/basic.yml", does_not_raise()),
("does_not_exist", pytest.raises(ValidationException)),
("config/invalid.yml", pytest.raises(ValidationException)),
],
)
def test_load_configuration(test_data_dir: Path, file, expectation):
config_file = test_data_dir.joinpath(file)
with expectation:
instance = ConfigurationScanner()
instance.load_configuration(config_file)
assert instance.configuration.verbosity == 1
assert not isinstance(instance.configuration.verbosity, DefaultPlaceholder)
def test_load_multiple_configurations(test_data_dir: Path):
instance = ConfigurationScanner()
instance.load_configuration(test_data_dir.joinpath("config/basic.yml"))
instance.load_configuration(test_data_dir.joinpath("config/another.yml"))
assert instance.configuration.verbosity == 1
assert len(instance.configuration.search_paths) == 1
assert not isinstance(instance.configuration._search_paths, DefaultPlaceholder)
assert not isinstance(instance.configuration._verbosity, DefaultPlaceholder)
assert not isinstance(
instance.configuration._component_search_paths, DefaultPlaceholder
)
assert not isinstance(
instance.configuration._environment_search_paths, DefaultPlaceholder
)
| [
"pytest.raises",
"dagos.core.configuration.ConfigurationScanner"
] | [((1004, 1026), 'dagos.core.configuration.ConfigurationScanner', 'ConfigurationScanner', ([], {}), '()\n', (1024, 1026), False, 'from dagos.core.configuration import ConfigurationScanner\n'), ((717, 739), 'dagos.core.configuration.ConfigurationScanner', 'ConfigurationScanner', ([], {}), '()\n', (737, 739), False, 'from dagos.core.configuration import ConfigurationScanner\n'), ((446, 480), 'pytest.raises', 'pytest.raises', (['ValidationException'], {}), '(ValidationException)\n', (459, 480), False, 'import pytest\n'), ((514, 548), 'pytest.raises', 'pytest.raises', (['ValidationException'], {}), '(ValidationException)\n', (527, 548), False, 'import pytest\n')] |
"""
Test the maximum a posteriori estimates
"""
import time
import numpy as np
from .test_model import prepare_dla_model
def test_DLA_MAP():
# test 1
dla_gp = prepare_dla_model(plate=5309, mjd=55929, fiber_id=362, z_qso=3.166)
tic = time.time()
max_dlas = 4
log_likelihoods_dla = dla_gp.log_model_evidences(max_dlas)
toc = time.time()
# very time consuming: ~ 4 mins for a single spectrum without parallelized.
print("spent {} mins; {} seconds".format((toc - tic) // 60, (toc - tic) % 60))
catalog_MAP_log_nhis = np.array(
[
[22.28420156, np.nan, np.nan, np.nan],
[20.63417494, 22.28420156, np.nan, np.nan],
[20.60601572, 22.28420156, 20.63417494, np.nan],
[20.12721363, 22.28420156, 20.63417494, 20.36967609],
]
)
catalog_MAP_z_dlas = np.array(
[
[3.03175723, np.nan, np.nan, np.nan],
[2.52182382, 3.03175723, np.nan, np.nan],
[2.39393537, 3.03175723, 2.52182382, np.nan],
[2.94786938, 3.03175723, 2.52182382, 2.38944805],
]
)
mapind = np.nanargmax(log_likelihoods_dla)
MAP_z_dla, MAP_log_nhi = dla_gp.maximum_a_posteriori()
nanind = np.isnan(catalog_MAP_z_dlas[mapind])
assert np.all(
np.abs(MAP_z_dla[mapind][~nanind] - catalog_MAP_z_dlas[mapind][~nanind]) < 1e-1
)
assert np.all(
np.abs(MAP_log_nhi[mapind][~nanind] - catalog_MAP_log_nhis[mapind][~nanind])
< 1e-1
)
# test 2
dla_gp = prepare_dla_model(plate=3816, mjd=55272, fiber_id=76, z_qso=3.68457627)
tic = time.time()
max_dlas = 4
log_likelihoods_dla = dla_gp.log_model_evidences(max_dlas)
toc = time.time()
# very time consuming: ~ 4 mins for a single spectrum without parallelized.
print("spent {} mins; {} seconds".format((toc - tic) // 60, (toc - tic) % 60))
catalog_MAP_log_nhis = np.array(
[
[21.05371292, np.nan, np.nan, np.nan],
[20.0073665, 20.94707037, np.nan, np.nan],
[20.00838815, 20.94707037, 20.0073665, np.nan],
[20.20539934, 20.94707037, 20.0073665, 20.0134955],
]
)
catalog_MAP_z_dlas = np.array(
[
[3.42520566, np.nan, np.nan, np.nan],
[2.69422714, 3.42710284, np.nan, np.nan],
[3.41452521, 3.42710284, 2.69422714, np.nan],
[3.43813463, 3.42710284, 2.69422714, 3.41262802],
]
)
mapind = np.nanargmax(log_likelihoods_dla)
MAP_z_dla, MAP_log_nhi = dla_gp.maximum_a_posteriori()
nanind = np.isnan(catalog_MAP_z_dlas[mapind])
assert np.all(
np.abs(MAP_z_dla[mapind][~nanind] - catalog_MAP_z_dlas[mapind][~nanind]) < 1e-1
)
assert np.all(
np.abs(MAP_log_nhi[mapind][~nanind] - catalog_MAP_log_nhis[mapind][~nanind])
< 1e-1
)
| [
"numpy.nanargmax",
"numpy.abs",
"numpy.array",
"numpy.isnan",
"time.time"
] | [((249, 260), 'time.time', 'time.time', ([], {}), '()\n', (258, 260), False, 'import time\n'), ((353, 364), 'time.time', 'time.time', ([], {}), '()\n', (362, 364), False, 'import time\n'), ((556, 761), 'numpy.array', 'np.array', (['[[22.28420156, np.nan, np.nan, np.nan], [20.63417494, 22.28420156, np.nan,\n np.nan], [20.60601572, 22.28420156, 20.63417494, np.nan], [20.12721363,\n 22.28420156, 20.63417494, 20.36967609]]'], {}), '([[22.28420156, np.nan, np.nan, np.nan], [20.63417494, 22.28420156,\n np.nan, np.nan], [20.60601572, 22.28420156, 20.63417494, np.nan], [\n 20.12721363, 22.28420156, 20.63417494, 20.36967609]])\n', (564, 761), True, 'import numpy as np\n'), ((852, 1048), 'numpy.array', 'np.array', (['[[3.03175723, np.nan, np.nan, np.nan], [2.52182382, 3.03175723, np.nan, np.\n nan], [2.39393537, 3.03175723, 2.52182382, np.nan], [2.94786938, \n 3.03175723, 2.52182382, 2.38944805]]'], {}), '([[3.03175723, np.nan, np.nan, np.nan], [2.52182382, 3.03175723, np\n .nan, np.nan], [2.39393537, 3.03175723, 2.52182382, np.nan], [\n 2.94786938, 3.03175723, 2.52182382, 2.38944805]])\n', (860, 1048), True, 'import numpy as np\n'), ((1126, 1159), 'numpy.nanargmax', 'np.nanargmax', (['log_likelihoods_dla'], {}), '(log_likelihoods_dla)\n', (1138, 1159), True, 'import numpy as np\n'), ((1234, 1270), 'numpy.isnan', 'np.isnan', (['catalog_MAP_z_dlas[mapind]'], {}), '(catalog_MAP_z_dlas[mapind])\n', (1242, 1270), True, 'import numpy as np\n'), ((1619, 1630), 'time.time', 'time.time', ([], {}), '()\n', (1628, 1630), False, 'import time\n'), ((1723, 1734), 'time.time', 'time.time', ([], {}), '()\n', (1732, 1734), False, 'import time\n'), ((1926, 2127), 'numpy.array', 'np.array', (['[[21.05371292, np.nan, np.nan, np.nan], [20.0073665, 20.94707037, np.nan,\n np.nan], [20.00838815, 20.94707037, 20.0073665, np.nan], [20.20539934, \n 20.94707037, 20.0073665, 20.0134955]]'], {}), '([[21.05371292, np.nan, np.nan, np.nan], [20.0073665, 20.94707037,\n np.nan, np.nan], [20.00838815, 20.94707037, 20.0073665, np.nan], [\n 20.20539934, 20.94707037, 20.0073665, 20.0134955]])\n', (1934, 2127), True, 'import numpy as np\n'), ((2218, 2414), 'numpy.array', 'np.array', (['[[3.42520566, np.nan, np.nan, np.nan], [2.69422714, 3.42710284, np.nan, np.\n nan], [3.41452521, 3.42710284, 2.69422714, np.nan], [3.43813463, \n 3.42710284, 2.69422714, 3.41262802]]'], {}), '([[3.42520566, np.nan, np.nan, np.nan], [2.69422714, 3.42710284, np\n .nan, np.nan], [3.41452521, 3.42710284, 2.69422714, np.nan], [\n 3.43813463, 3.42710284, 2.69422714, 3.41262802]])\n', (2226, 2414), True, 'import numpy as np\n'), ((2492, 2525), 'numpy.nanargmax', 'np.nanargmax', (['log_likelihoods_dla'], {}), '(log_likelihoods_dla)\n', (2504, 2525), True, 'import numpy as np\n'), ((2600, 2636), 'numpy.isnan', 'np.isnan', (['catalog_MAP_z_dlas[mapind]'], {}), '(catalog_MAP_z_dlas[mapind])\n', (2608, 2636), True, 'import numpy as np\n'), ((1298, 1370), 'numpy.abs', 'np.abs', (['(MAP_z_dla[mapind][~nanind] - catalog_MAP_z_dlas[mapind][~nanind])'], {}), '(MAP_z_dla[mapind][~nanind] - catalog_MAP_z_dlas[mapind][~nanind])\n', (1304, 1370), True, 'import numpy as np\n'), ((1411, 1487), 'numpy.abs', 'np.abs', (['(MAP_log_nhi[mapind][~nanind] - catalog_MAP_log_nhis[mapind][~nanind])'], {}), '(MAP_log_nhi[mapind][~nanind] - catalog_MAP_log_nhis[mapind][~nanind])\n', (1417, 1487), True, 'import numpy as np\n'), ((2665, 2737), 'numpy.abs', 'np.abs', (['(MAP_z_dla[mapind][~nanind] - catalog_MAP_z_dlas[mapind][~nanind])'], {}), '(MAP_z_dla[mapind][~nanind] - catalog_MAP_z_dlas[mapind][~nanind])\n', (2671, 2737), True, 'import numpy as np\n'), ((2778, 2854), 'numpy.abs', 'np.abs', (['(MAP_log_nhi[mapind][~nanind] - catalog_MAP_log_nhis[mapind][~nanind])'], {}), '(MAP_log_nhi[mapind][~nanind] - catalog_MAP_log_nhis[mapind][~nanind])\n', (2784, 2854), True, 'import numpy as np\n')] |
#!/usr/local/bin/python
# various by python (v3.3) stuff while I learn the langugage
# (taking my features.rb script and porting it to python)
# Really useful to me:
# google course on python: https://developers.google.com/edu/python/
# software carpentry bootcamp: http://software-carpentry.org/v4/python/index.html
# loops
for x in range(1, 10):
print(x, end="")
print()
for x in range(9, 4, -1):
print(x, end="")
print()
for x in range(0, 21, 5):
print(x, end=" ")
print()
# string methods
print('Length of "This is a test"', end="")
print(len('This is a test'))
print('This is a test'.lower())
print('This is a test'.upper())
print('This is a test'.swapcase())
# reverse ("extended slice syntax": begin:end:step)
print("This is a test"[::-1])
# string manipulation & reg ex
import re
# sub just the first
print(re.sub('bar', 'foo', 'foobarfoobar', count=1))
# global sub
print(re.sub('bar','foo','foobarfoobar'))
print(re.sub(r'\s+', '|', "This is a test", count=1))
print(re.sub(r'\s+', '|', 'This is a test'))
text = '<h3 align="center">Popularity in 1990</h3>\n'
year = re.search(r'Popularity in (\d\d\d\d)', text).group(1)
print(year)
text = '<tr align="right"><td>7</td><td>Andrew</td><td>Stephanie</td>'
rank_and_names = re.search(r'<td>(\d+)</td><td>(\w+)</td><td>(\w+)</td>', text)
print(rank_and_names.group())
print(rank_and_names.group(1))
print(rank_and_names.group(2))
print(rank_and_names.group(3))
rank_and_names = re.findall(r'<td>(\d+)</td><td>(\w+)</td><td>(\w+)</td>', text)
print(rank_and_names)
text2 = '''<tr align="right"><td>7</td><td>Andrew</td><td>Stephanie</td>
<tr align="right"><td>8</td><td>James</td><td>Jennifer</td>
<tr align="right"><td>9</td><td>Justin</td><td>Elizabeth</td>'''
rank_and_names = re.findall(r'<td>(\d+)</td><td>(\w+)</td><td>(\w+)</td>', text2)
print(rank_and_names)
# split on whitespace
print("Blah blah blah. ".split())
# arrays (lists)
arr = [1, "test", 2, 3, 4]
for x in arr:
print(str(x) + "X ", end="")
print()
# formated print
for x in arr:
print("%sX " % x, end="")
print()
# map
x = list(map(lambda x:x+1, range(6)))
print(x)
# list comprehension
x = [y+1 for y in range(6)]
print(x)
y = list(map(lambda x:x+2, range(6)))
print(' '.join(map(str, y)))
# ranges
x = list(range(6))
y = list(range(1, 7))
z = list(range(3, 50, 5))
print(x, y, z)
# loops
for i in range(1,6):
print("%d^2 = %d" % (i, i**2))
i = 1
while i <= 5:
print("%d^2 = %d" % (i, i**2))
i += 1
# other array methods
x = list(range(1,6))
y = [2, 4, 1]
print(x+y)
print(":".join(map(str, x+y)))
# aliasing
x = list(range(1,6))
y = [2, 4, 1]
z = x # aliased
zz = list(x) # a copy
id(x) == id(z) # True
id(x) != id(zz) # True
for yy in y:
if yy in x: x.remove(yy)
print(x, z, zz)
print(3 in x)
print(7 in x)
print(x[0]) # first element
print(x[-1]) # last element
z = range(5, 9)
print(z[-2:]) # a range
z = list(z)
print(z[-2:]) # now a list
zz = z.reverse() # doesn't return
print(z, zz) # zz = None
zz = reversed(z)
print(zz) # an iterator
zz = list(reversed(z))
print(zz) # a list
# hashes (hash is called a 'dict')
x = {"a" : 1, "b" : 2, "c" : 3}
print(x['a'])
for (value,key) in enumerate(x):
print(key, ' -> ', value)
print(list(x.keys()))
x.pop("a")
print(x)
x = {"a" : 1, "b" : 2, "c" : 3}
z = list(x.keys()) # need list() since I'll be modifying the keys in place
for key in z: # "for key in x:" would work if I weren't modifying the keys in place
if x[key] == 2:
x.pop(key)
print(x)
# alternatively:
x = {"a" : 1, "b" : 2, "c" : 3}
z = [key for key in x.keys() if x[key] == 2]
for key in z:
x.pop(key)
print(x)
x = {"a":1, "b":2}
x['d'] = x['d']+1 if 'd' in x else 1
## slices of arrays, negative index to start from end
a = list(range(2, 13, 2))
print(a[1:3])
print(a[-1])
print(a[-3:-2])
print(a[-3])
print(a[-3:-1])
## conversion between classes
int("5") # to integer
float("6") # to float
str(252.3) # to string
## a bit of text manipulation
text = '''We may at once admit that any inference from the particular
to the general must be attended with some degree of uncertainty,
but this is not the same as to admit that such inference cannot
be absolutely rigorous, for the nature and degree of the uncertainty
may itself be capable of rigorous expression.'''
stopwords = 'the a by on for of are with just but and to my in I has some'.lower().split()
words = text.lower().split()
keywords = [word for word in words if word not in stopwords]
print(' '.join(keywords))
print("no. char =", len(' '.join(keywords)))
print("no. words =", len(keywords))
## playing with map
n = 8
counts = map(lambda x: 0, range(n))
print(' '.join(map(str, counts)))
import random
x = map(lambda z: random.randint(1,8), range(1000))
counts = []
for i in range(1,9):
counts.append( sum(z==i for z in y) )
print(' '.join(map(str, counts)))
## looping over hashes (also sorting)
words = '''We may at once admit that any inference from the particular to the general
must be attended with some degree of uncertainty, but this is not the same as to
admit that such inference cannot be absolutely rigorous, for the nature and
degree of the uncertainty may itself be capable of rigorous expression.'''.split()
import re
words = list(map(lambda word: re.sub(r'[,\.]', '', word), words))
wordcount = {}
for word in words:
wordcount[word] = wordcount[word]+1 if word in wordcount else 1
# sort by word length
sorted(wordcount.keys(), key=len)
# sort by count
sorted(wordcount.keys(), key=lambda x: wordcount[x])
# by count then word length
sorted(wordcount.keys(), key=lambda x: [wordcount[x], len(x)])
# by word length then count
sorted(wordcount.keys(), key=lambda x: [len(x), wordcount[x]])
# by count then word length, but reversed
sorted(wordcount.keys(), key=lambda x: [wordcount[x], len(x)], reverse=True)
# using a function
def count_and_length (a):
return [wordcount[a], len(a)]
sorted(wordcount.keys(), key=count_and_length)
## regex
import re
if not re.search(r'AM', 'am'):
print('ok 1')
if re.search(r'(?i)AM', 'am'):
print('ok 2')
if re.search(r'AM', 'am', re.IGNORECASE):
print('ok 3')
multi = 'blah a number of special\nAll of these are'
if re.search(r'\Ablah', multi):
print('ok 4')
if not re.search(r'\AAll', multi):
print('ok 5')
if re.search(r'^blah', multi):
print('ok 6')
if not re.search(r'^All', multi):
print('ok 7')
if re.search(r'^A', multi, re.MULTILINE):
print('ok 8')
if not re.search(r'special\Z', multi):
print('ok 9')
if re.search(r'special$', multi, re.MULTILINE):
print('ok 10')
if re.search(r'are\Z', multi, re.MULTILINE):
print('ok 11')
if re.search(r'are\Z', multi):
print('ok 12')
if not re.search(r'special$', multi):
print('ok 13')
if re.search(r'special$', multi, re.MULTILINE):
print('ok 14')
if re.search(r'are$', multi):
print('ok 15')
if not re.search(r'blah.*are', multi):
print('ok 16')
if re.search(r'blah.*are', multi, re.DOTALL):
print('ok 17')
x = 'Today is 11/26/2013, while tomorrow is 11/27/2013.'
z = re.search(r'(\d+)/(\d+)/(\d+)', x)
if z:
print('Month = %s, day = %s, year = %s' % (z.group(1), z.group(2), z.group(3)))
zz = re.findall(r'(\d+)/(\d+)/(\d+)', x)
if zz:
print('Month = %s, day = %s, year = %s' % (zz[0][0], zz[0][1], zz[0][2]))
if len(zz) > 1:
print('Month = %s, day = %s, year = %s' % (zz[1][0], zz[1][1], zz[1][2]))
| [
"re.sub",
"re.findall",
"random.randint",
"re.search"
] | [((1261, 1325), 're.search', 're.search', (['"""<td>(\\\\d+)</td><td>(\\\\w+)</td><td>(\\\\w+)</td>"""', 'text'], {}), "('<td>(\\\\d+)</td><td>(\\\\w+)</td><td>(\\\\w+)</td>', text)\n", (1270, 1325), False, 'import re\n'), ((1465, 1530), 're.findall', 're.findall', (['"""<td>(\\\\d+)</td><td>(\\\\w+)</td><td>(\\\\w+)</td>"""', 'text'], {}), "('<td>(\\\\d+)</td><td>(\\\\w+)</td><td>(\\\\w+)</td>', text)\n", (1475, 1530), False, 'import re\n'), ((1767, 1833), 're.findall', 're.findall', (['"""<td>(\\\\d+)</td><td>(\\\\w+)</td><td>(\\\\w+)</td>"""', 'text2'], {}), "('<td>(\\\\d+)</td><td>(\\\\w+)</td><td>(\\\\w+)</td>', text2)\n", (1777, 1833), False, 'import re\n'), ((6022, 6047), 're.search', 're.search', (['"""(?i)AM"""', '"""am"""'], {}), "('(?i)AM', 'am')\n", (6031, 6047), False, 'import re\n'), ((6069, 6105), 're.search', 're.search', (['"""AM"""', '"""am"""', 're.IGNORECASE'], {}), "('AM', 'am', re.IGNORECASE)\n", (6078, 6105), False, 'import re\n'), ((6180, 6207), 're.search', 're.search', (['"""\\\\Ablah"""', 'multi'], {}), "('\\\\Ablah', multi)\n", (6189, 6207), False, 'import re\n'), ((6279, 6304), 're.search', 're.search', (['"""^blah"""', 'multi'], {}), "('^blah', multi)\n", (6288, 6304), False, 'import re\n'), ((6376, 6412), 're.search', 're.search', (['"""^A"""', 'multi', 're.MULTILINE'], {}), "('^A', multi, re.MULTILINE)\n", (6385, 6412), False, 'import re\n'), ((6489, 6531), 're.search', 're.search', (['"""special$"""', 'multi', 're.MULTILINE'], {}), "('special$', multi, re.MULTILINE)\n", (6498, 6531), False, 'import re\n'), ((6554, 6594), 're.search', 're.search', (['"""are\\\\Z"""', 'multi', 're.MULTILINE'], {}), "('are\\\\Z', multi, re.MULTILINE)\n", (6563, 6594), False, 'import re\n'), ((6616, 6642), 're.search', 're.search', (['"""are\\\\Z"""', 'multi'], {}), "('are\\\\Z', multi)\n", (6625, 6642), False, 'import re\n'), ((6719, 6761), 're.search', 're.search', (['"""special$"""', 'multi', 're.MULTILINE'], {}), "('special$', multi, re.MULTILINE)\n", (6728, 6761), False, 'import re\n'), ((6784, 6808), 're.search', 're.search', (['"""are$"""', 'multi'], {}), "('are$', multi)\n", (6793, 6808), False, 'import re\n'), ((6887, 6927), 're.search', 're.search', (['"""blah.*are"""', 'multi', 're.DOTALL'], {}), "('blah.*are', multi, re.DOTALL)\n", (6896, 6927), False, 'import re\n'), ((7008, 7044), 're.search', 're.search', (['"""(\\\\d+)/(\\\\d+)/(\\\\d+)"""', 'x'], {}), "('(\\\\d+)/(\\\\d+)/(\\\\d+)', x)\n", (7017, 7044), False, 'import re\n'), ((7136, 7173), 're.findall', 're.findall', (['"""(\\\\d+)/(\\\\d+)/(\\\\d+)"""', 'x'], {}), "('(\\\\d+)/(\\\\d+)/(\\\\d+)', x)\n", (7146, 7173), False, 'import re\n'), ((840, 885), 're.sub', 're.sub', (['"""bar"""', '"""foo"""', '"""foobarfoobar"""'], {'count': '(1)'}), "('bar', 'foo', 'foobarfoobar', count=1)\n", (846, 885), False, 'import re\n'), ((907, 943), 're.sub', 're.sub', (['"""bar"""', '"""foo"""', '"""foobarfoobar"""'], {}), "('bar', 'foo', 'foobarfoobar')\n", (913, 943), False, 'import re\n'), ((950, 996), 're.sub', 're.sub', (['"""\\\\s+"""', '"""|"""', '"""This is a test"""'], {'count': '(1)'}), "('\\\\s+', '|', 'This is a test', count=1)\n", (956, 996), False, 'import re\n'), ((1005, 1042), 're.sub', 're.sub', (['"""\\\\s+"""', '"""|"""', '"""This is a test"""'], {}), "('\\\\s+', '|', 'This is a test')\n", (1011, 1042), False, 'import re\n'), ((5979, 6000), 're.search', 're.search', (['"""AM"""', '"""am"""'], {}), "('AM', 'am')\n", (5988, 6000), False, 'import re\n'), ((6232, 6258), 're.search', 're.search', (['"""\\\\AAll"""', 'multi'], {}), "('\\\\AAll', multi)\n", (6241, 6258), False, 'import re\n'), ((6330, 6354), 're.search', 're.search', (['"""^All"""', 'multi'], {}), "('^All', multi)\n", (6339, 6354), False, 'import re\n'), ((6438, 6468), 're.search', 're.search', (['"""special\\\\Z"""', 'multi'], {}), "('special\\\\Z', multi)\n", (6447, 6468), False, 'import re\n'), ((6668, 6696), 're.search', 're.search', (['"""special$"""', 'multi'], {}), "('special$', multi)\n", (6677, 6696), False, 'import re\n'), ((6835, 6864), 're.search', 're.search', (['"""blah.*are"""', 'multi'], {}), "('blah.*are', multi)\n", (6844, 6864), False, 'import re\n'), ((1106, 1153), 're.search', 're.search', (['"""Popularity in (\\\\d\\\\d\\\\d\\\\d)"""', 'text'], {}), "('Popularity in (\\\\d\\\\d\\\\d\\\\d)', text)\n", (1115, 1153), False, 'import re\n'), ((4714, 4734), 'random.randint', 'random.randint', (['(1)', '(8)'], {}), '(1, 8)\n', (4728, 4734), False, 'import random\n'), ((5260, 5286), 're.sub', 're.sub', (['"""[,\\\\.]"""', '""""""', 'word'], {}), "('[,\\\\.]', '', word)\n", (5266, 5286), False, 'import re\n')] |
"""
"""
import datetime
import os
# import sys
import logging
import numpy as np
import scipy as sp
import scipy.optimize # noqa
import tqdm
import h5py
import zcode.inout as zio
import zcode.math as zmath
from . import spectra, radiation # , utils
from . import PATH_DATA, MASS_EXTR, FEDD_EXTR, RADS_EXTR
from . constants import MSOL, MELC, MPRT, SPLC, K_BLTZ, H_PLNK
NUM = 10
np.seterr(divide='ignore', invalid='ignore', over='raise')
# MASS_EXTR = [1e6, 5e10]
# FEDD_EXTR = [1e-5, 1e-1]
# RADS_EXTR = [3.0, 1e5]
GRID_NAMES = ['mass', 'fedd', 'rmin', 'rmax']
ALPHA_VISC = 0.1
BETA_GP = 0.5
FRAC_ADV = 0.5
GAMMA_SH = (32 - 24*BETA_GP - 3*BETA_GP**2) / (24 - 21*BETA_GP)
EPS = (5/3 - GAMMA_SH) / (GAMMA_SH - 1.0)
EPS_PRIME = EPS / FRAC_ADV
DELTA = MELC/MPRT
GAE = np.sqrt(1.0 + 18.0 * np.square(ALPHA_VISC/(5.0 + 2*EPS_PRIME))) - 1.0
C1 = GAE * (5 + 2*EPS_PRIME) / (3 * np.square(ALPHA_VISC))
# C2 = np.sqrt(2 * EPS_PRIME * C1 / 3)
C3 = 2 * C1 / 3
MEC2 = MELC * SPLC**2
S1 = 1.42e9 * np.sqrt(1 - BETA_GP) * np.sqrt(C3 / C1 / ALPHA_VISC)
S3 = 1.05e-24
KB_OVER_MEC2 = K_BLTZ / MEC2
META = dict(ALPHA_VISC=ALPHA_VISC, BETA_GP=BETA_GP, FRAC_ADV=FRAC_ADV)
def main(num=None, recreate=True):
if num is None:
num = NUM
fname = grid_fname(num)
exists = os.path.exists(fname)
logging.warning("Grid for num={} exists: {} ({})".format(num, exists, fname))
logging.info("recreate: {}".format(recreate))
if not exists or recreate:
grid, grid_names, grid_temps, grid_valid = get_temp_grid(num)
save_grid(fname, grid, grid_names, grid_temps, grid_valid)
return
def get_interp(num=None):
if num is None:
num = NUM
fname = grid_fname(num)
grid, grid_names, grid_temps, grid_valid = load_grid(fname=fname)
grid_temps[~grid_valid] = np.mean(grid_temps[grid_valid])
# mesh = np.meshgrid(*grid)
# mesh = np.log10(mesh)
mesh = [np.log10(gg) for gg in grid]
grid_temps = np.log10(grid_temps)
interp_ll = sp.interpolate.RegularGridInterpolator(mesh, grid_temps)
def interp(xx):
try:
res = 10**interp_ll(np.log10(xx))
except ValueError:
logging.error("ValueError for argument: '{}'".format(xx))
logging.error("ValueError for argument: log: '{}'".format(np.log10(xx)))
for gg in interp_ll.grid:
logging.error("\t{}".format(zmath.minmax(gg)))
raise
return res
return interp
def grid_fname(num):
fname = "temp_grid_n{}.hdf5".format(num)
fname = os.path.join(PATH_DATA, fname)
return fname
def save_grid(fname, grid, grid_names, grid_temps, grid_valid):
fname = os.path.abspath(fname)
with h5py.File(fname, 'w') as out:
group = out.create_group('grid')
for nn, vv in zip(grid_names, grid):
group.create_dataset(nn, data=vv)
group = out.create_group('parameters')
for nn, vv in META.items():
group.create_dataset(nn, data=vv)
out.create_dataset('temps', data=grid_temps)
out.create_dataset('valid', data=grid_valid)
logging.info("Saved to '{}' size '{}'".format(fname, zio.get_file_size(fname)))
return
def load_grid(*args, num=None, fname=None):
if len(args):
raise ValueError("Only passed kwargs to `load_grid()`!")
if fname is None:
if num is None:
num = NUM
fname = grid_fname(num)
fname = os.path.abspath(fname)
if not os.path.exists(fname):
raise ValueError("fname '{}' does not exist!".format(fname))
with h5py.File(fname, 'r') as h5:
grid_group = h5['grid']
# grid_names = list(grid_group.keys())
grid_names = []
grid = []
for nn in GRID_NAMES:
grid.append(grid_group[nn][:])
grid_names.append(nn)
grid_temps = h5['temps'][:]
grid_valid = h5['valid'][:]
return grid, grid_names, grid_temps, grid_valid
def get_temp_grid(num, fix=True):
grid_extr = [np.array(MASS_EXTR)*MSOL, FEDD_EXTR, RADS_EXTR, RADS_EXTR]
grid_names = ['mass', 'fedd', 'rmin', 'rmax']
grid = [np.logspace(*np.log10(extr), num) for extr in grid_extr]
shape = [num for ii in range(len(grid))]
tot = np.product(shape)
grid_temps = np.zeros(shape)
grid_valid = np.ones(shape, dtype=bool)
cnt = 0
beg = datetime.datetime.now()
for idx in tqdm.tqdm(np.ndindex(*shape), total=tot):
# print(idx)
vals = [gg[ii] for gg, ii in zip(grid, idx)]
if vals[2] >= vals[3]:
grid_valid[idx] = False
continue
tt = solve_adaf_temp(*vals)
if tt is not None:
grid_temps[idx] = tt
cnt += 1
end = datetime.datetime.now()
dur = (end - beg)
dur_per = dur.total_seconds()/cnt
bads_nan = np.isnan(grid_temps)
grid_temps = np.nan_to_num(grid_temps)
bads = grid_valid & np.isclose(grid_temps, 0.0)
logging.warning("Success on : {}".format(zmath.frac_str(grid_temps[grid_valid] > 0.0)))
logging.warning("nan values: {}".format(zmath.frac_str(bads_nan)))
logging.warning("Bad values: {}".format(zmath.frac_str(bads)))
logging.warning("Done after {}, per iteration: {}".format(str(dur), dur_per))
if fix:
grid_temps = interp_bad_grid_vals(grid, grid_temps, grid_valid)
return grid, grid_names, grid_temps, grid_valid
def solve_adaf_temp(mass, fedd, rmin, rmax, debug=False):
msol = mass / MSOL
lvl = logging.WARNING
def heat_cool(temp):
"""Calculate heating and cooling rates for disk as a whole.
"""
nonlocal mass, fedd, rmin, rmax, msol
alpha = ALPHA_VISC
beta = BETA_GP
eps_prime = EPS_PRIME
delta = DELTA
rmin = rmin
rmax = rmax
theta_e = KB_OVER_MEC2 * temp
xm = spectra.xm_from_te(temp, msol, fedd)
tau_es = 23.87 * fedd * (0.3 / alpha) * (0.5 / C1) * np.sqrt(3/rmin)
mean_amp_a = 1.0 + 4.0 * theta_e + 16*np.square(theta_e)
alpha_crit = - np.log(tau_es) / np.log(mean_amp_a)
s2 = 1.19e-13 * xm
# Viscous Heating
# ---------------
_ge = radiation._heat_func_g(theta_e)
q1 = 1.2e38 * _ge * C3 * beta * msol * np.square(fedd) / np.square(alpha*C1) / rmin
q2 = delta * 9.39e38 * eps_prime * C3 * msol * fedd / rmin
heat_elc = q1 + q2
# Synchrotron
# -----------
# Eq. 24 [Hz]
f_p = S1 * s2 * np.sqrt(fedd/msol) * np.square(temp) * np.power(rmin, -1.25)
lum_synch_peak = np.power(S1 * s2, 3) * S3 * np.power(rmin, -1.75) * np.sqrt(msol)
lum_synch_peak *= np.power(fedd, 1.5) * np.power(temp, 7) / f_p
# Eq. 26
power_synch = 5.3e35 * np.power(xm/1000, 3) * np.power(alpha/0.3, -1.5)
power_synch *= np.power((1 - beta)/0.5, 1.5) * np.power(C1/0.5, -1.5)
# Bremsstrahlung
# --------------
# Eq. 29
power_brems = 4.78e34 * np.log(rmax/rmin) / np.square(alpha * C1)
power_brems *= radiation._brems_fit_func_f(theta_e) * fedd * msol
# Compton
# -------
power_compt = lum_synch_peak * f_p / (1 - alpha_crit)
power_compt *= (np.power(6.2e7 * (temp/1e9) / (f_p/1e12), 1 - alpha_crit) - 1.0)
return heat_elc, power_synch, power_brems, power_compt
def _func(logt):
tt = np.power(10.0, logt)
qv, qs, qb, qc = heat_cool(tt)
rv = qv - (qs + qb + qc)
return rv
start_temps = [1e11, 1e10, 1e12, 1e9, 1e8]
success = False
for ii, t0 in enumerate(start_temps):
try:
logt = sp.optimize.newton(_func, np.log10(t0), tol=1e-4, maxiter=100)
temp_e = np.power(10.0, logt)
except (RuntimeError, FloatingPointError) as err:
if debug:
logging.warn("Trial '{}' (t={:.1e}) optimization failed: {}".format(
ii, t0, str(err)))
else:
success = True
break
if success:
# logging.log(lvl, "Success with `t0`={:.2e} ==> t={:.2e}".format(t0, temp_e))
pass
else:
err = ("Unable to find electron temperature!"
"\nIf the eddington factor is larger than 1e-2, "
"this may be expected!")
if debug:
logging.log(lvl, "FAILED to find electron temperature!")
logging.log(lvl, "m = {:.2e}, f = {:.2e}".format(msol, fedd))
logging.log(lvl, err)
# raise RuntimeError(err)
return None
qv, qs, qb, qc = heat_cool(temp_e)
heat = qv
cool = qs + qb + qc
diff = np.fabs(heat - cool) / heat
if diff < 1e-2:
if debug:
logging.log(lvl, "Heating vs. cooling frac-diff: {:.2e}".format(diff))
else:
if debug:
err = "Electron temperature seems inconsistent (Te = {:.2e})!".format(temp_e)
err += "\n\tm: {:.2e}, f: {:.2e}".format(msol, fedd)
err += "\n\tHeating: {:.2e}, Cooling: {:.2e}, diff: {:.4e}".format(heat, cool, diff)
err += "\n\tThis may mean there is an input error (e.g. mdot may be too large... or small?)."
logging.log(lvl, err)
return None
return temp_e
def interp_bad_grid_vals(grid, grid_temps, grid_valid):
grid_temps = np.copy(grid_temps)
bads = grid_valid & np.isclose(grid_temps, 0.0)
shape = [len(gg) for gg in grid]
logging.warning("Fixing bad values: {}".format(zmath.frac_str(bads)))
neighbors = []
good_neighbors = []
bads_inds = np.array(np.where(bads)).T
for bad in tqdm.tqdm(bads_inds):
nbs = []
# print(bad)
cnt = 0
for dim in range(4):
for side in [-1, +1]:
test = [bb for bb in bad]
test[dim] += side
if test[dim] < 0 or test[dim] >= shape[dim]:
continue
test = tuple(test)
# print("\t", test)
# print("\t", temps[test])
nbs.append(test)
if grid_temps[test] > 0.0:
cnt += 1
neighbors.append(nbs)
good_neighbors.append(cnt)
num_nbs = [len(nbs) for nbs in neighbors]
logging.warning("All neighbors: {}".format(zmath.stats_str(num_nbs)))
logging.warning("Good neighbors: {}".format(zmath.stats_str(good_neighbors)))
goods = np.zeros(len(neighbors))
MAX_TRIES = 10
still_bad = list(np.argsort(good_neighbors)[::-1])
tries = 0
while len(still_bad) > 0 and tries < MAX_TRIES:
keep_bad = []
for kk, ii in enumerate(still_bad):
values = np.zeros(num_nbs[ii])
for jj, nbr in enumerate(neighbors[ii]):
values[jj] = grid_temps[nbr]
cnt = np.count_nonzero(values)
if cnt == 0:
keep_bad.append(kk)
continue
new = np.sum(np.log10(values[values > 0])) / cnt
loc = tuple(bads_inds[ii])
# print("\t", loc, new, cnt)
grid_temps[loc] = 10**new
goods[ii] = cnt
still_bad = [still_bad[kk] for kk in keep_bad]
num_still = len(still_bad)
logging.warning("Try: {}, still_bad: {}".format(tries, num_still))
if (tries+1 >= MAX_TRIES) and (num_still > 0):
logging.error("After {} tries, still {} bad!!".format(tries, num_still))
tries += 1
logging.warning("Filled neighbors: {}".format(zmath.stats_str(goods)))
logging.warning("Full temps array: {}".format(zmath.stats_str(grid_temps[grid_valid])))
return grid_temps
def plot_grid(grid, grid_names, temps, valid, interp=None):
import matplotlib.pyplot as plt
import zcode.plot as zplot
extr = zmath.minmax(temps, filter='>')
smap = zplot.colormap(extr, 'viridis')
# bads = valid & np.isclose(temps, 0.0)
num = len(grid)
fig, axes = plt.subplots(figsize=[14, 14], nrows=num, ncols=num)
plt.subplots_adjust(hspace=0.4, wspace=0.4)
def_idx = [-4, -4, 4, -4]
for (ii, jj), ax in np.ndenumerate(axes):
if ii < jj:
ax.set_visible(False)
continue
ax.set(xscale='log', yscale='log')
xx = grid[jj]
if ii == jj:
# print(grid_names[ii], zmath.minmax(grid[ii], filter='>'))
# idx = list(range(num))
# idx.pop(ii)
# idx = tuple(idx)
# vals = np.mean(temps, axis=idx)
idx = [slice(None) if aa == ii else def_idx[aa] for aa in range(num)]
vals = temps[tuple(idx)]
ax.plot(xx, vals, 'k-')
if interp is not None:
num_test = 10
test = [np.ones(num_test)*grid[aa][def_idx[aa]] for aa in range(num)]
test[ii] = zmath.spacing(grid[ii], 'log', num_test)
test_vals = [interp(tt) for tt in np.array(test).T]
ax.plot(test[ii], test_vals, 'r--')
# bad_vals = np.count_nonzero(bads, axis=idx)
# tw = ax.twinx()
# tw.plot(xx, bad_vals, 'r--')
else:
# print(ii, jj)
# print("\t", ii, grid_names[ii], zmath.minmax(grid[ii], filter='>'))
# print("\t", jj, grid_names[jj], zmath.minmax(grid[jj], filter='>'))
# idx = [0, 1, 2, 3]
# idx.pop(np.max([ii, jj]))
# idx.pop(np.min([ii, jj]))
# vals = np.mean(temps, axis=tuple(idx))
# idx = [slice(None) if aa in [ii, jj] else num//2 for aa in range(num)]
idx = [slice(None) if aa in [ii, jj] else def_idx[aa] for aa in range(num)]
vals = temps[tuple(idx)]
if len(vals) == 0:
continue
yy = grid[ii]
xx, yy = np.meshgrid(xx, yy, indexing='ij')
ax.pcolor(xx, yy, vals, cmap=smap.cmap, norm=smap.norm)
if np.count_nonzero(vals > 0.0) == 0:
continue
tit = "{:.1e}, {:.1e}".format(*zmath.minmax(vals, filter='>'))
ax.set_title(tit, size=10)
# bad_vals = np.count_nonzero(bads, axis=tuple(idx))
# idx = (bad_vals > 0.0)
# aa = xx[idx]
# bb = yy[idx]
# cc = bad_vals[idx]
# ax.scatter(aa, bb, s=2*cc**2, color='0.5', alpha=0.5)
# ax.scatter(aa, bb, s=cc**2, color='r')
if interp is not None:
for kk in range(10):
idx = (vals > 0.0)
x0 = 10**np.random.uniform(*zmath.minmax(np.log10(xx[idx])))
y0 = 10**np.random.uniform(*zmath.minmax(np.log10(yy[idx])))
# y0 = np.random.choice(yy[idx])
temp = [grid[ll][def_idx[ll]] for ll in range(num)]
temp[ii] = y0
temp[jj] = x0
if temp[2] >= temp[3]:
temp[2] = 3.1
iv = interp(temp)
if not np.isfinite(iv) or np.isclose(iv, 0.0):
print("\nBAD")
print(temp)
print(iv)
for kk in range(num):
if def_idx[kk] == 0:
temp[kk] = temp[kk] * 1.11
elif def_idx[kk] == -1:
temp[kk] = 0.99 * temp[kk]
iv = interp(temp)
print("\t", temp)
print("\t", iv)
cc = smap.to_rgba(iv)
ss = 20
ax.scatter(temp[jj], temp[ii], color='0.5', s=2*ss)
ax.scatter(temp[jj], temp[ii], color=cc, s=ss)
if ii == num-1:
ax.set_xlabel(grid_names[jj])
if jj == 0 and ii != 0:
ax.set_ylabel(grid_names[ii])
return fig
class Fast_Mahadevan96:
def __init__(self, mass, fedd, rmin, rmax, temp_e=None, interp=None):
"""
"""
self.mass = mass
# Mass in units of solar=masses
self.msol = mass/MSOL
self.fedd = fedd
self.rmin = rmin
self.rmax = rmax
if temp_e is None:
if interp is None:
interp = get_interp()
temp_e = interp([mass, fedd, rmin, rmax])
self.temp_e = temp_e
xm_e = spectra.xm_from_te(temp_e, self.msol, fedd)
self.s2 = 1.19e-13 * xm_e
theta_e = radiation.dimensionless_temperature_theta(temp_e, MELC)
# Eq. 31
tau_es = 23.87 * fedd * (0.3 / ALPHA_VISC) * (0.5 / C1) * np.sqrt(3/rmin)
# Eq. 32
mean_amp_a = 1.0 + 4.0 * theta_e + 16*np.square(theta_e)
# Eq. 34
self.alpha_crit = - np.log(tau_es) / np.log(mean_amp_a)
return
def spectrum(self, freqs):
synch = self._calc_spectrum_synch(freqs)
brems = self._calc_spectrum_brems(freqs)
compt = self._calc_spectrum_compt(freqs)
spectrum = synch + brems + compt
return spectrum
def _calc_spectrum_synch(self, freqs):
"""Mahadevan 1996 - Eq. 25
Cutoff above peak frequency (i.e. ignore exponential portion).
Ignore low-frequency transition to steeper (22/13 slope) from rmax.
"""
msol = self.msol
fedd = self.fedd
scalar = np.isscalar(freqs)
freqs = np.atleast_1d(freqs)
lnu = S3 * np.power(S1*self.s2, 1.6)
lnu *= np.power(msol, 1.2) * np.power(fedd, 0.8)
lnu *= np.power(self.temp_e, 4.2) * np.power(freqs, 0.4)
nu_p = self._freq_synch_peak(self.temp_e, msol, fedd)
lnu[freqs > nu_p] = 0.0
if scalar:
lnu = np.squeeze(lnu)
return lnu
def _calc_spectrum_brems(self, freqs):
"""Mahadevan 1996 - Eq. 30
"""
msol = self.msol
fedd = self.fedd
temp = self.temp_e
const = 2.29e24 # erg/s/Hz
scalar = np.isscalar(freqs)
freqs = np.atleast_1d(freqs)
t1 = np.log(self.rmax/self.rmin) / np.square(ALPHA_VISC * C1)
t2 = np.exp(-H_PLNK*freqs / (K_BLTZ * temp)) * msol * np.square(fedd) / temp
fe = radiation._brems_fit_func_f(temp)
lbrems = const * t1 * fe * t2
if scalar:
lbrems = np.squeeze(lbrems)
return lbrems
def _calc_spectrum_compt(self, freqs):
"""Compton Scattering spectrum from upscattering of Synchrotron photons.
Mahadevan 1996 - Eq. 38
"""
fedd = self.fedd
temp = self.temp_e
scalar = np.isscalar(freqs)
freqs = np.atleast_1d(freqs)
f_p, l_p = self._synch_peak(fedd, self.msol, temp)
lsp = np.power(freqs/f_p, -self.alpha_crit) * l_p
lsp[freqs < f_p] = 0.0
# See Eq. 35
max_freq = 3*K_BLTZ*temp/H_PLNK
lsp[freqs > max_freq] = 0.0
if scalar:
lsp = np.squeeze(lsp)
return lsp
def _freq_synch_peak(self, temp, msol, fedd):
"""Mahadevan 1996 Eq. 24
"""
nu_p = S1 * self.s2 * np.sqrt(fedd/msol) * np.square(temp) * np.power(self.rmin, -1.25)
return nu_p
def _synch_peak(self, fedd, msol, temp):
f_p = self._freq_synch_peak(temp, msol, fedd)
l_p = np.power(S1 * self.s2, 3) * S3 * np.power(self.rmin, -1.75) * np.sqrt(msol)
l_p *= np.power(fedd, 1.5) * np.power(temp, 7) / f_p
return f_p, l_p
class Fast_Mahadevan96_Array:
def __init__(self, mass, fedd, rmin, rmax, temp_e=None, interp=None):
"""
"""
self.mass = mass
# Mass in units of solar=masses
self.msol = mass/MSOL
self.fedd = fedd
self.rmin = rmin
self.rmax = rmax
if temp_e is None:
if interp is None:
interp = get_interp()
args = [mass, fedd, rmin, rmax]
shp = np.shape(args[0])
if not np.all([shp == np.shape(aa) for aa in args]):
all_shps = [np.shape(aa) for aa in args]
print("all shapes = ", all_shps)
raise ValueError("Shape mismatch!")
args = [aa.flatten() for aa in args]
args = np.array(args).T
temp_e = interp(args)
temp_e = temp_e.reshape(shp)
assert np.shape(temp_e) == np.shape(mass), "Output shape mismatch!"
self.temp_e = temp_e
xm_e = spectra.xm_from_te(temp_e, self.msol, fedd)
self.s2 = 1.19e-13 * xm_e
theta_e = radiation.dimensionless_temperature_theta(temp_e, MELC)
# Eq. 31
tau_es = 23.87 * fedd * (0.3 / ALPHA_VISC) * (0.5 / C1) * np.sqrt(3/rmin)
# Eq. 32
mean_amp_a = 1.0 + 4.0 * theta_e + 16*np.square(theta_e)
# Eq. 34
self.alpha_crit = - np.log(tau_es) / np.log(mean_amp_a)
return
def spectrum(self, freqs):
synch = self._calc_spectrum_synch(freqs)
brems = self._calc_spectrum_brems(freqs)
compt = self._calc_spectrum_compt(freqs)
spectrum = synch + brems + compt
return spectrum
def _calc_spectrum_synch(self, freqs):
"""Mahadevan 1996 - Eq. 25
Cutoff above peak frequency (i.e. ignore exponential portion).
Ignore low-frequency transition to steeper (22/13 slope) from rmax.
"""
msol = self.msol
fedd = self.fedd
scalar = np.isscalar(freqs)
freqs = np.atleast_1d(freqs)
lnu = S3 * np.power(S1*self.s2, 1.6)
# lnu *= np.power(msol, 1.2) * np.power(fedd, 0.8)
# lnu *= np.power(self.temp_e, 4.2) * np.power(freqs, 0.4)
lnu = lnu * np.power(msol, 1.2) * np.power(fedd, 0.8)
lnu = lnu * np.power(self.temp_e, 4.2) * np.power(freqs, 0.4)
nu_p = self._freq_synch_peak(self.temp_e, msol, fedd)
lnu[freqs > nu_p] = 0.0
if scalar:
lnu = np.squeeze(lnu)
return lnu
def _calc_spectrum_brems(self, freqs):
"""Mahadevan 1996 - Eq. 30
"""
msol = self.msol
fedd = self.fedd
temp = self.temp_e
const = 2.29e24 # erg/s/Hz
scalar = np.isscalar(freqs)
freqs = np.atleast_1d(freqs)
t1 = np.log(self.rmax/self.rmin) / np.square(ALPHA_VISC * C1)
t2 = np.exp(-H_PLNK*freqs / (K_BLTZ * temp)) * msol * np.square(fedd) / temp
fe = radiation._brems_fit_func_f(temp)
lbrems = const * t1 * fe * t2
if scalar:
lbrems = np.squeeze(lbrems)
return lbrems
def _calc_spectrum_compt(self, freqs):
"""Compton Scattering spectrum from upscattering of Synchrotron photons.
Mahadevan 1996 - Eq. 38
"""
fedd = self.fedd
temp = self.temp_e
scalar = np.isscalar(freqs)
freqs = np.atleast_1d(freqs)
f_p, l_p = self._synch_peak(fedd, self.msol, temp)
lsp = np.power(freqs/f_p, -self.alpha_crit) * l_p
lsp[freqs < f_p] = 0.0
# See Eq. 35
max_freq = 3*K_BLTZ*temp/H_PLNK
lsp[freqs > max_freq] = 0.0
if scalar:
lsp = np.squeeze(lsp)
return lsp
def _freq_synch_peak(self, temp, msol, fedd):
"""Mahadevan 1996 Eq. 24
"""
nu_p = S1 * self.s2 * np.sqrt(fedd/msol) * np.square(temp) * np.power(self.rmin, -1.25)
return nu_p
def _synch_peak(self, fedd, msol, temp):
f_p = self._freq_synch_peak(temp, msol, fedd)
l_p = np.power(S1 * self.s2, 3) * S3 * np.power(self.rmin, -1.75) * np.sqrt(msol)
l_p *= np.power(fedd, 1.5) * np.power(temp, 7) / f_p
return f_p, l_p
if __name__ == "__main__":
main()
| [
"numpy.product",
"zcode.plot.colormap",
"numpy.log10",
"numpy.sqrt",
"numpy.log",
"zcode.math.stats_str",
"numpy.argsort",
"numpy.array",
"logging.log",
"numpy.count_nonzero",
"numpy.isfinite",
"zcode.inout.get_file_size",
"os.path.exists",
"numpy.mean",
"numpy.isscalar",
"scipy.interp... | [((384, 442), 'numpy.seterr', 'np.seterr', ([], {'divide': '"""ignore"""', 'invalid': '"""ignore"""', 'over': '"""raise"""'}), "(divide='ignore', invalid='ignore', over='raise')\n", (393, 442), True, 'import numpy as np\n'), ((1018, 1047), 'numpy.sqrt', 'np.sqrt', (['(C3 / C1 / ALPHA_VISC)'], {}), '(C3 / C1 / ALPHA_VISC)\n', (1025, 1047), True, 'import numpy as np\n'), ((1281, 1302), 'os.path.exists', 'os.path.exists', (['fname'], {}), '(fname)\n', (1295, 1302), False, 'import os\n'), ((1810, 1841), 'numpy.mean', 'np.mean', (['grid_temps[grid_valid]'], {}), '(grid_temps[grid_valid])\n', (1817, 1841), True, 'import numpy as np\n'), ((1961, 1981), 'numpy.log10', 'np.log10', (['grid_temps'], {}), '(grid_temps)\n', (1969, 1981), True, 'import numpy as np\n'), ((1998, 2054), 'scipy.interpolate.RegularGridInterpolator', 'sp.interpolate.RegularGridInterpolator', (['mesh', 'grid_temps'], {}), '(mesh, grid_temps)\n', (2036, 2054), True, 'import scipy as sp\n'), ((2554, 2584), 'os.path.join', 'os.path.join', (['PATH_DATA', 'fname'], {}), '(PATH_DATA, fname)\n', (2566, 2584), False, 'import os\n'), ((2680, 2702), 'os.path.abspath', 'os.path.abspath', (['fname'], {}), '(fname)\n', (2695, 2702), False, 'import os\n'), ((3449, 3471), 'os.path.abspath', 'os.path.abspath', (['fname'], {}), '(fname)\n', (3464, 3471), False, 'import os\n'), ((4253, 4270), 'numpy.product', 'np.product', (['shape'], {}), '(shape)\n', (4263, 4270), True, 'import numpy as np\n'), ((4288, 4303), 'numpy.zeros', 'np.zeros', (['shape'], {}), '(shape)\n', (4296, 4303), True, 'import numpy as np\n'), ((4321, 4347), 'numpy.ones', 'np.ones', (['shape'], {'dtype': 'bool'}), '(shape, dtype=bool)\n', (4328, 4347), True, 'import numpy as np\n'), ((4371, 4394), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4392, 4394), False, 'import datetime\n'), ((4738, 4761), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4759, 4761), False, 'import datetime\n'), ((4837, 4857), 'numpy.isnan', 'np.isnan', (['grid_temps'], {}), '(grid_temps)\n', (4845, 4857), True, 'import numpy as np\n'), ((4875, 4900), 'numpy.nan_to_num', 'np.nan_to_num', (['grid_temps'], {}), '(grid_temps)\n', (4888, 4900), True, 'import numpy as np\n'), ((9339, 9358), 'numpy.copy', 'np.copy', (['grid_temps'], {}), '(grid_temps)\n', (9346, 9358), True, 'import numpy as np\n'), ((9624, 9644), 'tqdm.tqdm', 'tqdm.tqdm', (['bads_inds'], {}), '(bads_inds)\n', (9633, 9644), False, 'import tqdm\n'), ((11798, 11829), 'zcode.math.minmax', 'zmath.minmax', (['temps'], {'filter': '""">"""'}), "(temps, filter='>')\n", (11810, 11829), True, 'import zcode.math as zmath\n'), ((11841, 11872), 'zcode.plot.colormap', 'zplot.colormap', (['extr', '"""viridis"""'], {}), "(extr, 'viridis')\n", (11855, 11872), True, 'import zcode.plot as zplot\n'), ((11955, 12007), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '[14, 14]', 'nrows': 'num', 'ncols': 'num'}), '(figsize=[14, 14], nrows=num, ncols=num)\n', (11967, 12007), True, 'import matplotlib.pyplot as plt\n'), ((12012, 12055), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.4)', 'wspace': '(0.4)'}), '(hspace=0.4, wspace=0.4)\n', (12031, 12055), True, 'import matplotlib.pyplot as plt\n'), ((12112, 12132), 'numpy.ndenumerate', 'np.ndenumerate', (['axes'], {}), '(axes)\n', (12126, 12132), True, 'import numpy as np\n'), ((881, 902), 'numpy.square', 'np.square', (['ALPHA_VISC'], {}), '(ALPHA_VISC)\n', (890, 902), True, 'import numpy as np\n'), ((995, 1015), 'numpy.sqrt', 'np.sqrt', (['(1 - BETA_GP)'], {}), '(1 - BETA_GP)\n', (1002, 1015), True, 'import numpy as np\n'), ((1915, 1927), 'numpy.log10', 'np.log10', (['gg'], {}), '(gg)\n', (1923, 1927), True, 'import numpy as np\n'), ((2712, 2733), 'h5py.File', 'h5py.File', (['fname', '"""w"""'], {}), "(fname, 'w')\n", (2721, 2733), False, 'import h5py\n'), ((3483, 3504), 'os.path.exists', 'os.path.exists', (['fname'], {}), '(fname)\n', (3497, 3504), False, 'import os\n'), ((3584, 3605), 'h5py.File', 'h5py.File', (['fname', '"""r"""'], {}), "(fname, 'r')\n", (3593, 3605), False, 'import h5py\n'), ((4420, 4438), 'numpy.ndindex', 'np.ndindex', (['*shape'], {}), '(*shape)\n', (4430, 4438), True, 'import numpy as np\n'), ((4925, 4952), 'numpy.isclose', 'np.isclose', (['grid_temps', '(0.0)'], {}), '(grid_temps, 0.0)\n', (4935, 4952), True, 'import numpy as np\n'), ((7410, 7430), 'numpy.power', 'np.power', (['(10.0)', 'logt'], {}), '(10.0, logt)\n', (7418, 7430), True, 'import numpy as np\n'), ((8655, 8675), 'numpy.fabs', 'np.fabs', (['(heat - cool)'], {}), '(heat - cool)\n', (8662, 8675), True, 'import numpy as np\n'), ((9383, 9410), 'numpy.isclose', 'np.isclose', (['grid_temps', '(0.0)'], {}), '(grid_temps, 0.0)\n', (9393, 9410), True, 'import numpy as np\n'), ((17426, 17444), 'numpy.isscalar', 'np.isscalar', (['freqs'], {}), '(freqs)\n', (17437, 17444), True, 'import numpy as np\n'), ((17461, 17481), 'numpy.atleast_1d', 'np.atleast_1d', (['freqs'], {}), '(freqs)\n', (17474, 17481), True, 'import numpy as np\n'), ((18041, 18059), 'numpy.isscalar', 'np.isscalar', (['freqs'], {}), '(freqs)\n', (18052, 18059), True, 'import numpy as np\n'), ((18076, 18096), 'numpy.atleast_1d', 'np.atleast_1d', (['freqs'], {}), '(freqs)\n', (18089, 18096), True, 'import numpy as np\n'), ((18660, 18678), 'numpy.isscalar', 'np.isscalar', (['freqs'], {}), '(freqs)\n', (18671, 18678), True, 'import numpy as np\n'), ((18695, 18715), 'numpy.atleast_1d', 'np.atleast_1d', (['freqs'], {}), '(freqs)\n', (18708, 18715), True, 'import numpy as np\n'), ((21493, 21511), 'numpy.isscalar', 'np.isscalar', (['freqs'], {}), '(freqs)\n', (21504, 21511), True, 'import numpy as np\n'), ((21528, 21548), 'numpy.atleast_1d', 'np.atleast_1d', (['freqs'], {}), '(freqs)\n', (21541, 21548), True, 'import numpy as np\n'), ((22244, 22262), 'numpy.isscalar', 'np.isscalar', (['freqs'], {}), '(freqs)\n', (22255, 22262), True, 'import numpy as np\n'), ((22279, 22299), 'numpy.atleast_1d', 'np.atleast_1d', (['freqs'], {}), '(freqs)\n', (22292, 22299), True, 'import numpy as np\n'), ((22863, 22881), 'numpy.isscalar', 'np.isscalar', (['freqs'], {}), '(freqs)\n', (22874, 22881), True, 'import numpy as np\n'), ((22898, 22918), 'numpy.atleast_1d', 'np.atleast_1d', (['freqs'], {}), '(freqs)\n', (22911, 22918), True, 'import numpy as np\n'), ((3169, 3193), 'zcode.inout.get_file_size', 'zio.get_file_size', (['fname'], {}), '(fname)\n', (3186, 3193), True, 'import zcode.inout as zio\n'), ((4020, 4039), 'numpy.array', 'np.array', (['MASS_EXTR'], {}), '(MASS_EXTR)\n', (4028, 4039), True, 'import numpy as np\n'), ((4999, 5043), 'zcode.math.frac_str', 'zmath.frac_str', (['(grid_temps[grid_valid] > 0.0)'], {}), '(grid_temps[grid_valid] > 0.0)\n', (5013, 5043), True, 'import zcode.math as zmath\n'), ((5090, 5114), 'zcode.math.frac_str', 'zmath.frac_str', (['bads_nan'], {}), '(bads_nan)\n', (5104, 5114), True, 'import zcode.math as zmath\n'), ((5161, 5181), 'zcode.math.frac_str', 'zmath.frac_str', (['bads'], {}), '(bads)\n', (5175, 5181), True, 'import zcode.math as zmath\n'), ((5962, 5979), 'numpy.sqrt', 'np.sqrt', (['(3 / rmin)'], {}), '(3 / rmin)\n', (5969, 5979), True, 'import numpy as np\n'), ((6083, 6101), 'numpy.log', 'np.log', (['mean_amp_a'], {}), '(mean_amp_a)\n', (6089, 6101), True, 'import numpy as np\n'), ((6546, 6567), 'numpy.power', 'np.power', (['rmin', '(-1.25)'], {}), '(rmin, -1.25)\n', (6554, 6567), True, 'import numpy as np\n'), ((6645, 6658), 'numpy.sqrt', 'np.sqrt', (['msol'], {}), '(msol)\n', (6652, 6658), True, 'import numpy as np\n'), ((6803, 6830), 'numpy.power', 'np.power', (['(alpha / 0.3)', '(-1.5)'], {}), '(alpha / 0.3, -1.5)\n', (6811, 6830), True, 'import numpy as np\n'), ((6852, 6883), 'numpy.power', 'np.power', (['((1 - beta) / 0.5)', '(1.5)'], {}), '((1 - beta) / 0.5, 1.5)\n', (6860, 6883), True, 'import numpy as np\n'), ((6884, 6908), 'numpy.power', 'np.power', (['(C1 / 0.5)', '(-1.5)'], {}), '(C1 / 0.5, -1.5)\n', (6892, 6908), True, 'import numpy as np\n'), ((7027, 7048), 'numpy.square', 'np.square', (['(alpha * C1)'], {}), '(alpha * C1)\n', (7036, 7048), True, 'import numpy as np\n'), ((7246, 7336), 'numpy.power', 'np.power', (['(62000000.0 * (temp / 1000000000.0) / (f_p / 1000000000000.0))', '(1 - alpha_crit)'], {}), '(62000000.0 * (temp / 1000000000.0) / (f_p / 1000000000000.0), 1 -\n alpha_crit)\n', (7254, 7336), True, 'import numpy as np\n'), ((7747, 7767), 'numpy.power', 'np.power', (['(10.0)', 'logt'], {}), '(10.0, logt)\n', (7755, 7767), True, 'import numpy as np\n'), ((8347, 8403), 'logging.log', 'logging.log', (['lvl', '"""FAILED to find electron temperature!"""'], {}), "(lvl, 'FAILED to find electron temperature!')\n", (8358, 8403), False, 'import logging\n'), ((8490, 8511), 'logging.log', 'logging.log', (['lvl', 'err'], {}), '(lvl, err)\n', (8501, 8511), False, 'import logging\n'), ((9203, 9224), 'logging.log', 'logging.log', (['lvl', 'err'], {}), '(lvl, err)\n', (9214, 9224), False, 'import logging\n'), ((9499, 9519), 'zcode.math.frac_str', 'zmath.frac_str', (['bads'], {}), '(bads)\n', (9513, 9519), True, 'import zcode.math as zmath\n'), ((9591, 9605), 'numpy.where', 'np.where', (['bads'], {}), '(bads)\n', (9599, 9605), True, 'import numpy as np\n'), ((10309, 10333), 'zcode.math.stats_str', 'zmath.stats_str', (['num_nbs'], {}), '(num_nbs)\n', (10324, 10333), True, 'import zcode.math as zmath\n'), ((10384, 10415), 'zcode.math.stats_str', 'zmath.stats_str', (['good_neighbors'], {}), '(good_neighbors)\n', (10399, 10415), True, 'import zcode.math as zmath\n'), ((10496, 10522), 'numpy.argsort', 'np.argsort', (['good_neighbors'], {}), '(good_neighbors)\n', (10506, 10522), True, 'import numpy as np\n'), ((10683, 10704), 'numpy.zeros', 'np.zeros', (['num_nbs[ii]'], {}), '(num_nbs[ii])\n', (10691, 10704), True, 'import numpy as np\n'), ((10822, 10846), 'numpy.count_nonzero', 'np.count_nonzero', (['values'], {}), '(values)\n', (10838, 10846), True, 'import numpy as np\n'), ((11518, 11540), 'zcode.math.stats_str', 'zmath.stats_str', (['goods'], {}), '(goods)\n', (11533, 11540), True, 'import zcode.math as zmath\n'), ((11593, 11632), 'zcode.math.stats_str', 'zmath.stats_str', (['grid_temps[grid_valid]'], {}), '(grid_temps[grid_valid])\n', (11608, 11632), True, 'import zcode.math as zmath\n'), ((13824, 13858), 'numpy.meshgrid', 'np.meshgrid', (['xx', 'yy'], {'indexing': '"""ij"""'}), "(xx, yy, indexing='ij')\n", (13835, 13858), True, 'import numpy as np\n'), ((16680, 16697), 'numpy.sqrt', 'np.sqrt', (['(3 / rmin)'], {}), '(3 / rmin)\n', (16687, 16697), True, 'import numpy as np\n'), ((16840, 16858), 'numpy.log', 'np.log', (['mean_amp_a'], {}), '(mean_amp_a)\n', (16846, 16858), True, 'import numpy as np\n'), ((17502, 17529), 'numpy.power', 'np.power', (['(S1 * self.s2)', '(1.6)'], {}), '(S1 * self.s2, 1.6)\n', (17510, 17529), True, 'import numpy as np\n'), ((17543, 17562), 'numpy.power', 'np.power', (['msol', '(1.2)'], {}), '(msol, 1.2)\n', (17551, 17562), True, 'import numpy as np\n'), ((17565, 17584), 'numpy.power', 'np.power', (['fedd', '(0.8)'], {}), '(fedd, 0.8)\n', (17573, 17584), True, 'import numpy as np\n'), ((17600, 17626), 'numpy.power', 'np.power', (['self.temp_e', '(4.2)'], {}), '(self.temp_e, 4.2)\n', (17608, 17626), True, 'import numpy as np\n'), ((17629, 17649), 'numpy.power', 'np.power', (['freqs', '(0.4)'], {}), '(freqs, 0.4)\n', (17637, 17649), True, 'import numpy as np\n'), ((17782, 17797), 'numpy.squeeze', 'np.squeeze', (['lnu'], {}), '(lnu)\n', (17792, 17797), True, 'import numpy as np\n'), ((18111, 18140), 'numpy.log', 'np.log', (['(self.rmax / self.rmin)'], {}), '(self.rmax / self.rmin)\n', (18117, 18140), True, 'import numpy as np\n'), ((18141, 18167), 'numpy.square', 'np.square', (['(ALPHA_VISC * C1)'], {}), '(ALPHA_VISC * C1)\n', (18150, 18167), True, 'import numpy as np\n'), ((18378, 18396), 'numpy.squeeze', 'np.squeeze', (['lbrems'], {}), '(lbrems)\n', (18388, 18396), True, 'import numpy as np\n'), ((18790, 18829), 'numpy.power', 'np.power', (['(freqs / f_p)', '(-self.alpha_crit)'], {}), '(freqs / f_p, -self.alpha_crit)\n', (18798, 18829), True, 'import numpy as np\n'), ((19000, 19015), 'numpy.squeeze', 'np.squeeze', (['lsp'], {}), '(lsp)\n', (19010, 19015), True, 'import numpy as np\n'), ((19201, 19227), 'numpy.power', 'np.power', (['self.rmin', '(-1.25)'], {}), '(self.rmin, -1.25)\n', (19209, 19227), True, 'import numpy as np\n'), ((19424, 19437), 'numpy.sqrt', 'np.sqrt', (['msol'], {}), '(msol)\n', (19431, 19437), True, 'import numpy as np\n'), ((19985, 20002), 'numpy.shape', 'np.shape', (['args[0]'], {}), '(args[0])\n', (19993, 20002), True, 'import numpy as np\n'), ((20747, 20764), 'numpy.sqrt', 'np.sqrt', (['(3 / rmin)'], {}), '(3 / rmin)\n', (20754, 20764), True, 'import numpy as np\n'), ((20907, 20925), 'numpy.log', 'np.log', (['mean_amp_a'], {}), '(mean_amp_a)\n', (20913, 20925), True, 'import numpy as np\n'), ((21569, 21596), 'numpy.power', 'np.power', (['(S1 * self.s2)', '(1.6)'], {}), '(S1 * self.s2, 1.6)\n', (21577, 21596), True, 'import numpy as np\n'), ((21763, 21782), 'numpy.power', 'np.power', (['fedd', '(0.8)'], {}), '(fedd, 0.8)\n', (21771, 21782), True, 'import numpy as np\n'), ((21832, 21852), 'numpy.power', 'np.power', (['freqs', '(0.4)'], {}), '(freqs, 0.4)\n', (21840, 21852), True, 'import numpy as np\n'), ((21985, 22000), 'numpy.squeeze', 'np.squeeze', (['lnu'], {}), '(lnu)\n', (21995, 22000), True, 'import numpy as np\n'), ((22314, 22343), 'numpy.log', 'np.log', (['(self.rmax / self.rmin)'], {}), '(self.rmax / self.rmin)\n', (22320, 22343), True, 'import numpy as np\n'), ((22344, 22370), 'numpy.square', 'np.square', (['(ALPHA_VISC * C1)'], {}), '(ALPHA_VISC * C1)\n', (22353, 22370), True, 'import numpy as np\n'), ((22581, 22599), 'numpy.squeeze', 'np.squeeze', (['lbrems'], {}), '(lbrems)\n', (22591, 22599), True, 'import numpy as np\n'), ((22993, 23032), 'numpy.power', 'np.power', (['(freqs / f_p)', '(-self.alpha_crit)'], {}), '(freqs / f_p, -self.alpha_crit)\n', (23001, 23032), True, 'import numpy as np\n'), ((23203, 23218), 'numpy.squeeze', 'np.squeeze', (['lsp'], {}), '(lsp)\n', (23213, 23218), True, 'import numpy as np\n'), ((23404, 23430), 'numpy.power', 'np.power', (['self.rmin', '(-1.25)'], {}), '(self.rmin, -1.25)\n', (23412, 23430), True, 'import numpy as np\n'), ((23627, 23640), 'numpy.sqrt', 'np.sqrt', (['msol'], {}), '(msol)\n', (23634, 23640), True, 'import numpy as np\n'), ((796, 841), 'numpy.square', 'np.square', (['(ALPHA_VISC / (5.0 + 2 * EPS_PRIME))'], {}), '(ALPHA_VISC / (5.0 + 2 * EPS_PRIME))\n', (805, 841), True, 'import numpy as np\n'), ((4154, 4168), 'numpy.log10', 'np.log10', (['extr'], {}), '(extr)\n', (4162, 4168), True, 'import numpy as np\n'), ((6024, 6042), 'numpy.square', 'np.square', (['theta_e'], {}), '(theta_e)\n', (6033, 6042), True, 'import numpy as np\n'), ((6066, 6080), 'numpy.log', 'np.log', (['tau_es'], {}), '(tau_es)\n', (6072, 6080), True, 'import numpy as np\n'), ((6294, 6315), 'numpy.square', 'np.square', (['(alpha * C1)'], {}), '(alpha * C1)\n', (6303, 6315), True, 'import numpy as np\n'), ((6528, 6543), 'numpy.square', 'np.square', (['temp'], {}), '(temp)\n', (6537, 6543), True, 'import numpy as np\n'), ((6621, 6642), 'numpy.power', 'np.power', (['rmin', '(-1.75)'], {}), '(rmin, -1.75)\n', (6629, 6642), True, 'import numpy as np\n'), ((6685, 6704), 'numpy.power', 'np.power', (['fedd', '(1.5)'], {}), '(fedd, 1.5)\n', (6693, 6704), True, 'import numpy as np\n'), ((6707, 6724), 'numpy.power', 'np.power', (['temp', '(7)'], {}), '(temp, 7)\n', (6715, 6724), True, 'import numpy as np\n'), ((6780, 6802), 'numpy.power', 'np.power', (['(xm / 1000)', '(3)'], {}), '(xm / 1000, 3)\n', (6788, 6802), True, 'import numpy as np\n'), ((7007, 7026), 'numpy.log', 'np.log', (['(rmax / rmin)'], {}), '(rmax / rmin)\n', (7013, 7026), True, 'import numpy as np\n'), ((7689, 7701), 'numpy.log10', 'np.log10', (['t0'], {}), '(t0)\n', (7697, 7701), True, 'import numpy as np\n'), ((12843, 12883), 'zcode.math.spacing', 'zmath.spacing', (['grid[ii]', '"""log"""', 'num_test'], {}), "(grid[ii], 'log', num_test)\n", (12856, 12883), True, 'import zcode.math as zmath\n'), ((13943, 13971), 'numpy.count_nonzero', 'np.count_nonzero', (['(vals > 0.0)'], {}), '(vals > 0.0)\n', (13959, 13971), True, 'import numpy as np\n'), ((16759, 16777), 'numpy.square', 'np.square', (['theta_e'], {}), '(theta_e)\n', (16768, 16777), True, 'import numpy as np\n'), ((16823, 16837), 'numpy.log', 'np.log', (['tau_es'], {}), '(tau_es)\n', (16829, 16837), True, 'import numpy as np\n'), ((18230, 18245), 'numpy.square', 'np.square', (['fedd'], {}), '(fedd)\n', (18239, 18245), True, 'import numpy as np\n'), ((19183, 19198), 'numpy.square', 'np.square', (['temp'], {}), '(temp)\n', (19192, 19198), True, 'import numpy as np\n'), ((19395, 19421), 'numpy.power', 'np.power', (['self.rmin', '(-1.75)'], {}), '(self.rmin, -1.75)\n', (19403, 19421), True, 'import numpy as np\n'), ((19453, 19472), 'numpy.power', 'np.power', (['fedd', '(1.5)'], {}), '(fedd, 1.5)\n', (19461, 19472), True, 'import numpy as np\n'), ((19475, 19492), 'numpy.power', 'np.power', (['temp', '(7)'], {}), '(temp, 7)\n', (19483, 19492), True, 'import numpy as np\n'), ((20294, 20308), 'numpy.array', 'np.array', (['args'], {}), '(args)\n', (20302, 20308), True, 'import numpy as np\n'), ((20405, 20421), 'numpy.shape', 'np.shape', (['temp_e'], {}), '(temp_e)\n', (20413, 20421), True, 'import numpy as np\n'), ((20425, 20439), 'numpy.shape', 'np.shape', (['mass'], {}), '(mass)\n', (20433, 20439), True, 'import numpy as np\n'), ((20826, 20844), 'numpy.square', 'np.square', (['theta_e'], {}), '(theta_e)\n', (20835, 20844), True, 'import numpy as np\n'), ((20890, 20904), 'numpy.log', 'np.log', (['tau_es'], {}), '(tau_es)\n', (20896, 20904), True, 'import numpy as np\n'), ((21741, 21760), 'numpy.power', 'np.power', (['msol', '(1.2)'], {}), '(msol, 1.2)\n', (21749, 21760), True, 'import numpy as np\n'), ((21803, 21829), 'numpy.power', 'np.power', (['self.temp_e', '(4.2)'], {}), '(self.temp_e, 4.2)\n', (21811, 21829), True, 'import numpy as np\n'), ((22433, 22448), 'numpy.square', 'np.square', (['fedd'], {}), '(fedd)\n', (22442, 22448), True, 'import numpy as np\n'), ((23386, 23401), 'numpy.square', 'np.square', (['temp'], {}), '(temp)\n', (23395, 23401), True, 'import numpy as np\n'), ((23598, 23624), 'numpy.power', 'np.power', (['self.rmin', '(-1.75)'], {}), '(self.rmin, -1.75)\n', (23606, 23624), True, 'import numpy as np\n'), ((23656, 23675), 'numpy.power', 'np.power', (['fedd', '(1.5)'], {}), '(fedd, 1.5)\n', (23664, 23675), True, 'import numpy as np\n'), ((23678, 23695), 'numpy.power', 'np.power', (['temp', '(7)'], {}), '(temp, 7)\n', (23686, 23695), True, 'import numpy as np\n'), ((2121, 2133), 'numpy.log10', 'np.log10', (['xx'], {}), '(xx)\n', (2129, 2133), True, 'import numpy as np\n'), ((6276, 6291), 'numpy.square', 'np.square', (['fedd'], {}), '(fedd)\n', (6285, 6291), True, 'import numpy as np\n'), ((6507, 6527), 'numpy.sqrt', 'np.sqrt', (['(fedd / msol)'], {}), '(fedd / msol)\n', (6514, 6527), True, 'import numpy as np\n'), ((6593, 6613), 'numpy.power', 'np.power', (['(S1 * s2)', '(3)'], {}), '(S1 * s2, 3)\n', (6601, 6613), True, 'import numpy as np\n'), ((10959, 10987), 'numpy.log10', 'np.log10', (['values[values > 0]'], {}), '(values[values > 0])\n', (10967, 10987), True, 'import numpy as np\n'), ((14047, 14077), 'zcode.math.minmax', 'zmath.minmax', (['vals'], {'filter': '""">"""'}), "(vals, filter='>')\n", (14059, 14077), True, 'import zcode.math as zmath\n'), ((18181, 18222), 'numpy.exp', 'np.exp', (['(-H_PLNK * freqs / (K_BLTZ * temp))'], {}), '(-H_PLNK * freqs / (K_BLTZ * temp))\n', (18187, 18222), True, 'import numpy as np\n'), ((19162, 19182), 'numpy.sqrt', 'np.sqrt', (['(fedd / msol)'], {}), '(fedd / msol)\n', (19169, 19182), True, 'import numpy as np\n'), ((19362, 19387), 'numpy.power', 'np.power', (['(S1 * self.s2)', '(3)'], {}), '(S1 * self.s2, 3)\n', (19370, 19387), True, 'import numpy as np\n'), ((20096, 20108), 'numpy.shape', 'np.shape', (['aa'], {}), '(aa)\n', (20104, 20108), True, 'import numpy as np\n'), ((22384, 22425), 'numpy.exp', 'np.exp', (['(-H_PLNK * freqs / (K_BLTZ * temp))'], {}), '(-H_PLNK * freqs / (K_BLTZ * temp))\n', (22390, 22425), True, 'import numpy as np\n'), ((23365, 23385), 'numpy.sqrt', 'np.sqrt', (['(fedd / msol)'], {}), '(fedd / msol)\n', (23372, 23385), True, 'import numpy as np\n'), ((23565, 23590), 'numpy.power', 'np.power', (['(S1 * self.s2)', '(3)'], {}), '(S1 * self.s2, 3)\n', (23573, 23590), True, 'import numpy as np\n'), ((2302, 2314), 'numpy.log10', 'np.log10', (['xx'], {}), '(xx)\n', (2310, 2314), True, 'import numpy as np\n'), ((12754, 12771), 'numpy.ones', 'np.ones', (['num_test'], {}), '(num_test)\n', (12761, 12771), True, 'import numpy as np\n'), ((15063, 15082), 'numpy.isclose', 'np.isclose', (['iv', '(0.0)'], {}), '(iv, 0.0)\n', (15073, 15082), True, 'import numpy as np\n'), ((2399, 2415), 'zcode.math.minmax', 'zmath.minmax', (['gg'], {}), '(gg)\n', (2411, 2415), True, 'import zcode.math as zmath\n'), ((12934, 12948), 'numpy.array', 'np.array', (['test'], {}), '(test)\n', (12942, 12948), True, 'import numpy as np\n'), ((15044, 15059), 'numpy.isfinite', 'np.isfinite', (['iv'], {}), '(iv)\n', (15055, 15059), True, 'import numpy as np\n'), ((20037, 20049), 'numpy.shape', 'np.shape', (['aa'], {}), '(aa)\n', (20045, 20049), True, 'import numpy as np\n'), ((14602, 14619), 'numpy.log10', 'np.log10', (['xx[idx]'], {}), '(xx[idx])\n', (14610, 14619), True, 'import numpy as np\n'), ((14683, 14700), 'numpy.log10', 'np.log10', (['yy[idx]'], {}), '(yy[idx])\n', (14691, 14700), True, 'import numpy as np\n')] |
from django import template
register = template.Library()
@register.simple_tag
def format_date_range(date_from, date_to, separator=" - ",
format_str="%B %d, %Y", year_f=", %Y", month_f="%B", date_f=" %d"):
""" Takes a start date, end date, separator and formatting strings and
returns a pretty date range string
"""
if (date_to and date_to != date_from):
from_format = to_format = format_str
if (date_from.year == date_to.year):
from_format = from_format.replace(year_f, '')
if (date_from.month == date_to.month):
to_format = to_format.replace(month_f, '')
return separator.join((date_from.strftime(from_format), date_to.strftime(to_format)))
else:
return date_from.strftime(format_str) | [
"django.template.Library"
] | [((40, 58), 'django.template.Library', 'template.Library', ([], {}), '()\n', (56, 58), False, 'from django import template\n')] |
#!/usr/bin/env python3
from library_api_util import *
import json
# This variable is to decide how many nearest libraries you will look for.
LATITUDE = 35.7
LONGTITUDE = 139.8
ISBN = '4577002086'
LIBRARY_API_SEARCH_NUM = 5
try:
with open('apikey.json', 'r') as f:
api_data = json.load(f)
library_api = LibraryAPI()
data = library_api.get(api_data, LATITUDE, LONGTITUDE, ISBN, LIBRARY_API_SEARCH_NUM)
print(data)
f = open('json/sample2.json', 'w')
f.write(json.dumps(data, ensure_ascii=False, indent=4))
f.close()
except json.decoder.JSONDecodeError as e:
print("JSON Decode Error: {}".format(e))
except:
print("Unknown Error: ", sys.exc_info()[0])
raise
| [
"json.load",
"json.dumps"
] | [((290, 302), 'json.load', 'json.load', (['f'], {}), '(f)\n', (299, 302), False, 'import json\n'), ((490, 536), 'json.dumps', 'json.dumps', (['data'], {'ensure_ascii': '(False)', 'indent': '(4)'}), '(data, ensure_ascii=False, indent=4)\n', (500, 536), False, 'import json\n')] |
#!/usr/bin/env python3
import argparse
import sys, os
import subprocess
from collections import defaultdict
import logging
import re
import math
sys.path.insert(0, os.path.sep.join([os.path.dirname(os.path.realpath(__file__)), "../PyLib"]))
import ctat_util
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
logger = logging.getLogger(__name__)
def contains_homopolymer(side_window_size, window_seq, ref_base_nuc, edit_base_nuc):
homopolymer = False
window_seq = window_seq.upper()
ref_base_nuc = ref_base_nuc.upper()
edit_base_nuc = edit_base_nuc.upper()
# iterate of the sequence
# check if the side_window_size consecutive nucleotides are the edited nucleotide
# if true will set th variable homopolymer to TRUE
for k in range(len(window_seq)-side_window_size):
## check edit base
all_match_flag = True
for pos in range(side_window_size):
if window_seq[pos] != edit_base_nuc:
all_match_flag = False
if all_match_flag:
return True
## check ref base
all_match_flag = True
for pos in range(side_window_size):
if window_seq[pos] != ref_base_nuc:
all_match_flag = False
if all_match_flag:
return True
# no homopolymer found
return False
def compute_entropy(window_seq):
window_seq = window_seq.upper()
window_length = len(window_seq)
base_counter_dict = defaultdict(int)
for nucleotide in window_seq:
base_counter_dict[ nucleotide ] += 1
entropy = 0
for nucleotide, count in base_counter_dict.items():
p_val = count / window_length
entropy += -1 * p_val * math.log2(p_val)
entropy = "{:0.3f}".format(entropy)
return entropy
def main():
#add options to inputs
parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,
description = "Adds repeat feature annotations to vcf file.\n")
parser.add_argument('--input_vcf', required=True, help="input vcf file")
parser.add_argument('--ref_genome_fa', required=True, help='reference file')
parser.add_argument('--output_vcf', required=True,
help="output vcf file including annotation for distance to splice neighbor")
parser.add_argument("--side_window_size", type=int, default=3,
help="a window is centered at the variant position with side_window_size on each side, so window length = (2 * side_window_size) + 1 ")
parser.add_argument('--debug', default=False, action='store_true',
help='debug mode, retains temporary intermediate files')
parser.add_argument("--tmpdir", default="/tmp", help="tmp directory")
args = parser.parse_args()
input_vcf_file = args.input_vcf
genome_fa_file = args.ref_genome_fa
side_window_size = args.side_window_size
output_vcf_file = args.output_vcf
DEBUG_MODE = args.debug
if DEBUG_MODE:
logger.setLevel('DEBUG')
tmpdir = args.tmpdir
# 1) Create the bed file containing the windowed variants
#------------------------
windowed_variants_bed_file = os.path.join(tmpdir, os.path.basename(input_vcf_file) + ".windowed_{}_variants.bed".format(2*side_window_size+1))
with open(windowed_variants_bed_file, 'w') as ofh:
with ctat_util.open_file_for_reading(input_vcf_file) as fh:
for line in fh:
if re.match("#", line):
continue
vals = line.split("\t")
chr_val = vals[0]
chr_coord = int(vals[1])
ref_base_nuc = vals[3]
edit_base_nuc = vals[4]
chrpostoken = "{}:{}:{}:{}".format(chr_val, chr_coord, ref_base_nuc, edit_base_nuc)
ofh.write("\t".join([chr_val,
str(chr_coord - side_window_size - 1),
str(chr_coord + side_window_size),
chrpostoken]) + "\n")
## get fasta coordinates for feature
window_seqs_file = windowed_variants_bed_file + ".seqs"
cmd = "fastaFromBed -name -tab -fi {} -bed {} -fo {}".format(genome_fa_file, windowed_variants_bed_file, window_seqs_file)
logger.info("CMD: {}".format(cmd))
subprocess.check_call(cmd, shell=True)
chrpos_homopolymer_set = set()
chrpos_entropy_dict = dict()
with open(window_seqs_file) as fh:
for line in fh:
line = line.rstrip()
vals = line.split("\t")
chrpostoken = vals[0]
window_seq = vals[1]
(chrom, position, edit_base_nuc) = chrpostoken.split(":")[0:3]
chrpos = "{}:{}".format(chrom, position)
# set the constant homopolymer to false for this line
homopolymer_flag = contains_homopolymer(side_window_size, window_seq, ref_base_nuc, edit_base_nuc)
if homopolymer_flag:
chrpos_homopolymer_set.add(chrpos)
#print("{}\t{}\tHOMOP".format(window_seq, chrpostoken))
entropy_val = compute_entropy(window_seq)
chrpos_entropy_dict[chrpos] = entropy_val
if DEBUG_MODE:
logger.debug("\t".join([chrpostoken, window_seq, "entropy:{}".format(entropy_val), "homopolymer:{}".format(homopolymer_flag)]))
###############################################
## Add feature annotations to original vcf file
logger.info("Adding entropy and homopolymer annotations")
## make output a vcf formatted file:
with open(output_vcf_file, 'w') as ofh:
with ctat_util.open_file_for_reading(input_vcf_file) as fh:
for line in fh:
if line[0] == "#":
if re.match("#CHROM\t", line):
# add header info line for the repeat annotation type
ofh.write("##INFO=<ID=Homopolymer,Number=1,Type=Integer,Description=\"Variant is located in or near a homopolymer sequence\">\n")
ofh.write("##INFO=<ID=Entropy,Number=1,Type=Float,Description=\"Entropy for sequence in window of length {} centered at the variant position\">\n".format(2*side_window_size+1))
ofh.write(line)
else:
line = line.rstrip()
vals = line.split("\t")
chrpos = "{}:{}".format(vals[0], vals[1])
if chrpos in chrpos_homopolymer_set:
vals[7] += ";Homopolymer=1"
entropy_val = chrpos_entropy_dict[chrpos]
vals[7] += ";Entropy={}".format(entropy_val)
ofh.write("\t".join(vals) + "\n")
# cleanup
if not DEBUG_MODE:
pass
sys.exit(0)
if __name__ == "__main__":
main()
| [
"logging.basicConfig",
"logging.getLogger",
"argparse.ArgumentParser",
"subprocess.check_call",
"math.log2",
"re.match",
"os.path.realpath",
"collections.defaultdict",
"os.path.basename",
"ctat_util.open_file_for_reading",
"sys.exit"
] | [((263, 321), 'logging.basicConfig', 'logging.basicConfig', ([], {'stream': 'sys.stderr', 'level': 'logging.INFO'}), '(stream=sys.stderr, level=logging.INFO)\n', (282, 321), False, 'import logging\n'), ((331, 358), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (348, 358), False, 'import logging\n'), ((1504, 1520), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (1515, 1520), False, 'from collections import defaultdict\n'), ((1888, 2027), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.RawTextHelpFormatter', 'description': '"""Adds repeat feature annotations to vcf file.\n"""'}), '(formatter_class=argparse.RawTextHelpFormatter,\n description="""Adds repeat feature annotations to vcf file.\n""")\n', (1911, 2027), False, 'import argparse\n'), ((4440, 4478), 'subprocess.check_call', 'subprocess.check_call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (4461, 4478), False, 'import subprocess\n'), ((7029, 7040), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (7037, 7040), False, 'import sys, os\n'), ((1747, 1763), 'math.log2', 'math.log2', (['p_val'], {}), '(p_val)\n', (1756, 1763), False, 'import math\n'), ((3274, 3306), 'os.path.basename', 'os.path.basename', (['input_vcf_file'], {}), '(input_vcf_file)\n', (3290, 3306), False, 'import sys, os\n'), ((3435, 3482), 'ctat_util.open_file_for_reading', 'ctat_util.open_file_for_reading', (['input_vcf_file'], {}), '(input_vcf_file)\n', (3466, 3482), False, 'import ctat_util\n'), ((5801, 5848), 'ctat_util.open_file_for_reading', 'ctat_util.open_file_for_reading', (['input_vcf_file'], {}), '(input_vcf_file)\n', (5832, 5848), False, 'import ctat_util\n'), ((201, 227), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (217, 227), False, 'import sys, os\n'), ((3537, 3556), 're.match', 're.match', (['"""#"""', 'line'], {}), "('#', line)\n", (3545, 3556), False, 'import re\n'), ((5963, 5989), 're.match', 're.match', (['"""#CHROM\t"""', 'line'], {}), "('#CHROM\\t', line)\n", (5971, 5989), False, 'import re\n')] |
import os
from abc import ABC, abstractmethod
from typing import List, Optional
import feedparser as fp
import requests
from feedgen.feed import FeedGenerator
class Feed(ABC):
def __init__(
self,
id_: str,
links: List[dict],
title: str,
updated: Optional[str],
):
fg = FeedGenerator()
fg.id(id_)
fg.link(links)
fg.title(title)
fg.updated(updated)
fg.generator("")
self._feed_generator = fg
@classmethod
def from_xml(cls, feed_xml: str):
parsed = fp.parse(feed_xml)
head = parsed["feed"]
entries = parsed["entries"]
id_ = head.get("id") or head["link"]
self = cls(id_, head["links"], head["title"], head.get("updated"))
key, *_ = [k for k in ("published_parsed", "updated_parsed") if k in entries[0]]
entries = sorted(entries, key=lambda x: x[key], reverse=True)[:10]
for entry in sorted(entries, key=lambda x: x[key]):
self.transform_and_add(entry)
return self
@classmethod
def from_url(cls, feed_url: str):
return cls.from_xml(requests.get(feed_url).text)
@classmethod
def from_upstream(cls):
return cls.from_url(cls.feed_url)
@property
@abstractmethod
def feed_url(self) -> str:
pass
def add_entry(
self,
title: str,
link: str,
updated: str,
image_link: str,
mimetype: str = "image/gif",
content: Optional[str] = None,
) -> None:
fe = self._feed_generator.add_entry()
fe.id(link)
fe.updated(updated)
fe.title(title)
fe.link([{"rel": "alternate", "type": mimetype, "href": image_link}])
if content:
fe.content(content)
@abstractmethod
def transform_and_add(self, entry: dict) -> None:
pass
def to_string(self) -> str:
return self._feed_generator.atom_str(pretty=True).decode("utf-8")
def to_file(self, path: str) -> None:
dirname, basename = os.path.split(path)
os.makedirs(dirname, exist_ok=True)
with open(path, "w") as w:
w.write(self.to_string())
| [
"os.makedirs",
"feedparser.parse",
"requests.get",
"os.path.split",
"feedgen.feed.FeedGenerator"
] | [((328, 343), 'feedgen.feed.FeedGenerator', 'FeedGenerator', ([], {}), '()\n', (341, 343), False, 'from feedgen.feed import FeedGenerator\n'), ((570, 588), 'feedparser.parse', 'fp.parse', (['feed_xml'], {}), '(feed_xml)\n', (578, 588), True, 'import feedparser as fp\n'), ((2066, 2085), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (2079, 2085), False, 'import os\n'), ((2094, 2129), 'os.makedirs', 'os.makedirs', (['dirname'], {'exist_ok': '(True)'}), '(dirname, exist_ok=True)\n', (2105, 2129), False, 'import os\n'), ((1145, 1167), 'requests.get', 'requests.get', (['feed_url'], {}), '(feed_url)\n', (1157, 1167), False, 'import requests\n')] |
#!/usr/bin/env python3
import argparse
import os
import shutil
import re
from builder import Builder
from database_deployer import FlywayDatabaseDeployer
from token_fetcher import TokenFetcher
from web_deployer import WebDeployer
from web_static_deployer import WebStaticDeployer
from util import extract_zipfile, get_directories_in_directory, load_json_file, load_config, pretty_string_to_bool
def build(options):
config = load_config(options.config_file)
tokens = TokenFetcher(config).fetch_build_tokens()
#TODO: Why are we loading the conig here vs passing in the config file path?
project_config = load_config(options.project_config)
project_config["build_number"] = options.build_number
project_config["build_version"] = options.build_version
if project_config:
builder = Builder(project_config, tokens)
builder.build()
def get_project_name(zipfile_name, metadata):
if "name" in metadata:
return metadata["name"].lower()
matches = re.match(r"(.*)-\d+\.\d+\.\d+(-alpha.\d+)?\.pydist", zipfile_name)
if matches:
# if the package name has the version in it, then return the first part of the file as name
return matches.group(1).lower()
# Otherwise default to the original logic
return zipfile_name.split(".")[0].lower()
def deploy(options):
#Load the pydeployer config
config = load_config(options.config_file)
zipfile_name = os.path.basename(options.deploy_file)
staging_dir_prefix = zipfile_name.split(".pydist")[0].lower().replace(".", "-") # TODO: only allow one dot for now
staging_dir = os.path.join(os.getcwd(), staging_dir_prefix + "_pkg")
os.makedirs(staging_dir)
# Extract the zip file
print("Deploy: Extracting zipfile {zipfile} to {staging_dir}".format(
zipfile=options.deploy_file,
staging_dir=staging_dir
))
extract_zipfile(options.deploy_file, staging_dir)
# Load up the metadata file
metadata_file = os.path.join(staging_dir, "metadata.json")
if os.path.isfile(metadata_file):
metadata = load_json_file(metadata_file)
else:
raise Exception("Unable to load package metadata!")
project_name = get_project_name(zipfile_name, metadata)
#Fetch the tokens
# if there was a token file specified as a parameter use the tokens from there
# otherwise use the token service
if getattr(options, "tokens_file", None):
tokens = load_json_file(options.tokens_file)
else:
tokens = TokenFetcher(config).fetch_deploy_tokens(metadata["uuid"])
# Populate build tokens in tokens file if they exist
build_tokens_file = os.path.join(staging_dir, "build_tokens.json")
if os.path.isfile(build_tokens_file):
build_tokens = load_json_file(build_tokens_file)
tokens.update(build_tokens)
# Run through each of the projects in the zip
# Have a config file or just use folder names?
directories = get_directories_in_directory(staging_dir)
print("Deploy: Parsing directories {0}".format(directories))
# Store the dictionary for the output paths for each different target type
target_outputs = {}
# If output path is defined in the config, use it as the default output directory
if "output_path" in config:
target_outputs[None] = config["output_path"]
# If targets have been specified in the config, load them
if "targets" in config:
target_outputs.update(config["targets"])
def get_output_path_for_target(target):
if target in target_outputs:
return target_outputs[target]
if None in target_outputs:
return target_outputs[None]
raise Exception("No output path defined for target {target}".format(target=target))
for directory in directories:
if directory == "database":
print("Deploy: Starting deploying database.")
project_config = metadata.get("database", {})
scripts_directory = os.path.join(staging_dir, directory, project_config.pop("scriptDirectory", "scripts"))
db_config = create_database_config(tokens, scripts_directory)
deployer = FlywayDatabaseDeployer(db_config)
# TODO: Do some sort of error handling? Otherwise we have no idea if database deploy was successful or not
deployer.deploy()
print("Deploy: Ended deploying database.")
elif directory == "web":
project_config = metadata.get("web", {})
project_directory = os.path.join(staging_dir, directory)
deploy_dir = os.path.join(get_output_path_for_target(project_config.get("target", directory)), project_name)
deployer = WebDeployer(dict())
deployer.deploy(project_directory, deploy_dir, tokens, project_name, project_config)
print("Deploy: Ended deploying web.")
elif directory == "web-static":
project_config = metadata.get("web-static", {})
project_directory = os.path.join(staging_dir, directory)
deploy_as_root = pretty_string_to_bool(tokens.get("deploy_as_root", "false"))
if deploy_as_root:
#If Deploy as Root is set, then we just deploy directly to the output directory
deploy_dir = get_output_path_for_target(project_config.get("target", directory))
else:
deploy_dir = os.path.join(get_output_path_for_target(project_config.get("target", directory)), project_name)
deployer = WebStaticDeployer(dict())
deployer.deploy(project_directory, deploy_dir, tokens, project_name, project_config,
delete_root_dir=not deploy_as_root)
# delete staging directory once done
shutil.rmtree(staging_dir)
def create_database_config(tokens, scripts_directory):
config = {
"user": tokens.get("database_deploy_user") or tokens["database_user"],
"password": tokens.get("database_deploy_password") or tokens["database_password"],
"host": tokens["database_host"],
"port": tokens["database_port"],
"schema": tokens["database_schema"],
"scripts_directory": scripts_directory
}
return config
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("command", nargs="?", default="build",
help="Command to execute: build, deploy. default: build")
parser.add_argument("deploy_file", nargs="?", default=None,
help="File to deploy. Required if deploy is specified as command")
parser.add_argument("-b", "--build-number", dest="build_number", default="0")
parser.add_argument("-v", "--version", dest="build_version", default="0.0.1")
parser.add_argument("-c", "--config-file", dest="config_file", default="/opt/pydeployer/conf/config.yaml",
help="Location of the pydeployer configuration file")
parser.add_argument("-p", "--project-config", dest="project_config", default="config.yaml",
help="Location of the project's config file. default: config.yaml")
parser.add_argument("-d", "--project-directory", dest="project_directory", default=None)
parser.add_argument("-t", "--tokens-file", dest="tokens_file", default=None,
help="Path to the file containing the deployment tokens")
parser.add_argument("-u", "--build-tokens-file", dest="build_tokens_file", default=None,
help="Path to the file containing the build tokens")
args = parser.parse_args()
if args.command == "build":
build(args)
elif args.command == "deploy":
if not args.deploy_file:
print("Deploy file is required.")
else:
deploy(args)
else:
print("Unknown command: " + args.command)
"""
- Create a installation/deployment package
-Output of the compile / build
-Run the build
-Copy build output to the resulting zipfile
-Skeleton config file
-Database scripts to update
- Application to perform the deployment
- Stop the currently running application
- Copy the build output to the destination
- Create any needed config files
- Deploy the database
- Start the currently running application
- Web Application to store the configuration
- Stores the configuration for the application
- Database connection info
- API Keys
- Any environment dependent config
- API Endpoints to fetch the configuration keys
-Certificate authentication?
- UI to add projects and their environment config
-- Running the build
- Need to define the build steps / how to actually do the build
- Different types of projects
-Web project that needs to download dependencies / perform post processing
-.NET Projects that need to be built
"""
| [
"builder.Builder",
"os.makedirs",
"argparse.ArgumentParser",
"re.match",
"os.path.join",
"os.getcwd",
"os.path.isfile",
"token_fetcher.TokenFetcher",
"os.path.basename",
"database_deployer.FlywayDatabaseDeployer",
"shutil.rmtree",
"util.get_directories_in_directory",
"util.load_json_file",
... | [((432, 464), 'util.load_config', 'load_config', (['options.config_file'], {}), '(options.config_file)\n', (443, 464), False, 'from util import extract_zipfile, get_directories_in_directory, load_json_file, load_config, pretty_string_to_bool\n'), ((623, 658), 'util.load_config', 'load_config', (['options.project_config'], {}), '(options.project_config)\n', (634, 658), False, 'from util import extract_zipfile, get_directories_in_directory, load_json_file, load_config, pretty_string_to_bool\n'), ((1003, 1075), 're.match', 're.match', (['"""(.*)-\\\\d+\\\\.\\\\d+\\\\.\\\\d+(-alpha.\\\\d+)?\\\\.pydist"""', 'zipfile_name'], {}), "('(.*)-\\\\d+\\\\.\\\\d+\\\\.\\\\d+(-alpha.\\\\d+)?\\\\.pydist', zipfile_name)\n", (1011, 1075), False, 'import re\n'), ((1386, 1418), 'util.load_config', 'load_config', (['options.config_file'], {}), '(options.config_file)\n', (1397, 1418), False, 'from util import extract_zipfile, get_directories_in_directory, load_json_file, load_config, pretty_string_to_bool\n'), ((1439, 1476), 'os.path.basename', 'os.path.basename', (['options.deploy_file'], {}), '(options.deploy_file)\n', (1455, 1476), False, 'import os\n'), ((1675, 1699), 'os.makedirs', 'os.makedirs', (['staging_dir'], {}), '(staging_dir)\n', (1686, 1699), False, 'import os\n'), ((1882, 1931), 'util.extract_zipfile', 'extract_zipfile', (['options.deploy_file', 'staging_dir'], {}), '(options.deploy_file, staging_dir)\n', (1897, 1931), False, 'from util import extract_zipfile, get_directories_in_directory, load_json_file, load_config, pretty_string_to_bool\n'), ((1985, 2027), 'os.path.join', 'os.path.join', (['staging_dir', '"""metadata.json"""'], {}), "(staging_dir, 'metadata.json')\n", (1997, 2027), False, 'import os\n'), ((2035, 2064), 'os.path.isfile', 'os.path.isfile', (['metadata_file'], {}), '(metadata_file)\n', (2049, 2064), False, 'import os\n'), ((2657, 2703), 'os.path.join', 'os.path.join', (['staging_dir', '"""build_tokens.json"""'], {}), "(staging_dir, 'build_tokens.json')\n", (2669, 2703), False, 'import os\n'), ((2711, 2744), 'os.path.isfile', 'os.path.isfile', (['build_tokens_file'], {}), '(build_tokens_file)\n', (2725, 2744), False, 'import os\n'), ((2962, 3003), 'util.get_directories_in_directory', 'get_directories_in_directory', (['staging_dir'], {}), '(staging_dir)\n', (2990, 3003), False, 'from util import extract_zipfile, get_directories_in_directory, load_json_file, load_config, pretty_string_to_bool\n'), ((5774, 5800), 'shutil.rmtree', 'shutil.rmtree', (['staging_dir'], {}), '(staging_dir)\n', (5787, 5800), False, 'import shutil\n'), ((6283, 6308), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6306, 6308), False, 'import argparse\n'), ((818, 849), 'builder.Builder', 'Builder', (['project_config', 'tokens'], {}), '(project_config, tokens)\n', (825, 849), False, 'from builder import Builder\n'), ((1629, 1640), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1638, 1640), False, 'import os\n'), ((2085, 2114), 'util.load_json_file', 'load_json_file', (['metadata_file'], {}), '(metadata_file)\n', (2099, 2114), False, 'from util import extract_zipfile, get_directories_in_directory, load_json_file, load_config, pretty_string_to_bool\n'), ((2453, 2488), 'util.load_json_file', 'load_json_file', (['options.tokens_file'], {}), '(options.tokens_file)\n', (2467, 2488), False, 'from util import extract_zipfile, get_directories_in_directory, load_json_file, load_config, pretty_string_to_bool\n'), ((2769, 2802), 'util.load_json_file', 'load_json_file', (['build_tokens_file'], {}), '(build_tokens_file)\n', (2783, 2802), False, 'from util import extract_zipfile, get_directories_in_directory, load_json_file, load_config, pretty_string_to_bool\n'), ((478, 498), 'token_fetcher.TokenFetcher', 'TokenFetcher', (['config'], {}), '(config)\n', (490, 498), False, 'from token_fetcher import TokenFetcher\n'), ((4181, 4214), 'database_deployer.FlywayDatabaseDeployer', 'FlywayDatabaseDeployer', (['db_config'], {}), '(db_config)\n', (4203, 4214), False, 'from database_deployer import FlywayDatabaseDeployer\n'), ((2516, 2536), 'token_fetcher.TokenFetcher', 'TokenFetcher', (['config'], {}), '(config)\n', (2528, 2536), False, 'from token_fetcher import TokenFetcher\n'), ((4539, 4575), 'os.path.join', 'os.path.join', (['staging_dir', 'directory'], {}), '(staging_dir, directory)\n', (4551, 4575), False, 'import os\n'), ((5022, 5058), 'os.path.join', 'os.path.join', (['staging_dir', 'directory'], {}), '(staging_dir, directory)\n', (5034, 5058), False, 'import os\n')] |
# This file contains fairly exhaustive tests of almost all the methods
# supported by the Python `str` type, and tests that `untrusted.string` type:
# * correctly supports the same methods
# * accepts `str` and/or `untrusted.string` arguments interchangeably
# * never returns `str` or any iterable of `str`, only an
# appropriate `untrusted.*` type.
# Also tests that subclassed instances of untrusted.string work
import untrusted
from sys import stderr
import html
class customstring(untrusted.string):
pass
def same(a, b):
if type(a) != type(b):
return False
if isinstance(a, untrusted.string):
a = a.value
if isinstance(b, untrusted.string):
b = b.value
if a != b:
return False
return True
# Test the test
assert same("cat", "cat")
assert not same("cat", "dog")
assert same(untrusted.string("cat"), untrusted.string("cat"))
assert not same(untrusted.string("cat"), untrusted.string("dog"))
assert not same(untrusted.string("cat"), "cat")
assert not same("cat", untrusted.string("cat"))
assert not same("cat", None)
assert not same(untrusted.string("cat"), None)
assert not same(untrusted.string("cat"), customstring("cat"))
assert same(None, None)
# Test an untrusted.string is never None!
try:
_ = untrusted.string(None)
raise AssertionError
except TypeError:
pass
# Test an untrusted.string doesn't print!
try:
print(untrusted.string("Hello"))
raise AssertionError
except TypeError:
pass # expected!
# Test the subclassed string doesn't print!
try:
print(customstring("Hello"))
raise AssertionError
except TypeError:
pass # expected!
# Test equality checks still work
assert "cat" == untrusted.string("cat")
assert untrusted.string("cat") == "cat"
assert "cat" == customstring("cat")
assert customstring("cat") == "cat"
assert customstring("cat") == untrusted.string("cat")
assert untrusted.string("cat") == customstring("cat")
# Test hashable with set membership
assert "cat" in [untrusted.string("cat"), untrusted.string("dog")]
assert untrusted.string("cat") in [untrusted.string("cat"), untrusted.string("dog")]
assert untrusted.string("cat") in ["cat", "dog"]
# Test hashable with sorted
it = iter(sorted(["cat", "dog", "aligator", "zebra", "mouse"]))
assert same("aligator", next(it))
assert same("cat", next(it))
assert same("dog", next(it))
assert same("mouse", next(it))
assert same("zebra", next(it))
it = iter(sorted([untrusted.string("cat"),
untrusted.string("dog"),
untrusted.string("aligator"),
untrusted.string("zebra"),
untrusted.string("mouse")]))
assert same(untrusted.string("aligator"), next(it))
assert same(untrusted.string("cat"), next(it))
assert same(untrusted.string("dog"), next(it))
assert same(untrusted.string("mouse"), next(it))
assert same(untrusted.string("zebra"), next(it))
it = iter(sorted(untrusted.sequence(["cat", "dog", "aligator", "zebra", "mouse"])))
assert same(untrusted.string("aligator"), next(it))
assert same(untrusted.string("cat"), next(it))
assert same(untrusted.string("dog"), next(it))
assert same(untrusted.string("mouse"), next(it))
assert same(untrusted.string("zebra"), next(it))
# container iteration
for i in "cat":
assert i in ("c", "a", "t")
for i in untrusted.string("cat"):
assert i in ("c", "a", "t")
assert same(i, untrusted.string("c")) or same(i, untrusted.string("a")) or same(i, untrusted.string("t"))
# "Strings implement all of the common sequence operations"
# https://docs.python.org/3.4/library/stdtypes.html#typesseq-common
# membership: x in s
assert "a" in "cat"
assert "a" in untrusted.string("cat")
assert untrusted.string("a") in untrusted.string("cat")
assert not ("b" in "cat")
assert not ("b" in untrusted.string("cat"))
assert not (untrusted.string("b") in untrusted.string("cat"))
assert "cat" in "dogcatmouse"
assert "cat" in untrusted.string("dogcatmouse")
assert untrusted.string("cat") in untrusted.string("dogcatmouse")
assert customstring("a") in untrusted.string("cat")
assert untrusted.string("a") in customstring("a")
# membership: x not in s
assert "b" not in "cat"
assert "b" not in untrusted.string("cat")
assert untrusted.string("b") not in untrusted.string("cat")
assert not ("a" not in "cat")
assert not ("a" not in untrusted.string("cat"))
assert not (untrusted.string("a") not in untrusted.string("cat"))
assert customstring("b") not in untrusted.string("cat")
# concatenation: s + t
assert same("cat"+"dog", "catdog")
assert same(untrusted.string("cat") + "dog", untrusted.string("catdog"))
assert same("cat" + untrusted.string("dog"), untrusted.string("catdog"))
assert same(untrusted.string("cat") + untrusted.string("dog"), untrusted.string("catdog"))
# concatination with subclasses - becomes left-most class
assert same(untrusted.string("a") + customstring("b"), untrusted.string("ab"))
assert same(customstring("a") + untrusted.string("b"), customstring("ab"))
# s * n or n * s - "equivalent to adding s to itself n times"
assert same(3*"cat", "catcatcat")
assert same(3*untrusted.string("cat"), untrusted.string("catcatcat"))
assert same(3*customstring("cat"), customstring("catcatcat"))
assert same("cat"*3, "catcatcat")
assert same(untrusted.string("cat")*3, untrusted.string("catcatcat"))
assert same(customstring("cat")*3, customstring("catcatcat"))
assert same(0*"cat", "")
assert same(0*untrusted.string("cat"), untrusted.string(""))
assert same("cat"*0, "")
assert same(untrusted.string("cat")*0, untrusted.string(""))
# s[i] - item at index i
assert same("cat"[1], "a")
assert same(untrusted.string("cat")[1], untrusted.string("a"))
assert same("cat"[-1], "t")
assert same(untrusted.string("cat")[-1], untrusted.string("t"))
try:
_ = "cat"[4]
raise AssertionError
except IndexError:
pass # expected!
try:
_ = untrusted.string("cat")[4]
raise AssertionError
except IndexError:
pass # expected!
# s[i:j:k] - slice i to j with step k
assert same("dogcatmouse"[3:6], "cat")
assert same(untrusted.string("dogcatmouse")[3:6], untrusted.string("cat"))
assert same(customstring("dogcatmouse")[3:6], customstring("cat"))
assert same("dogcatmouse"[3:6:2], "ct")
assert same(untrusted.string("dogcatmouse")[3:6:2], untrusted.string("ct"))
assert same(customstring("dogcatmouse")[3:6:2], customstring("ct"))
# len(s)
assert len("cat") == 3
assert len(untrusted.string("cat")) == 3
#min(s) smallest item of s
assert same(min("cat"), "a")
assert same(min(untrusted.string("cat")), untrusted.string("a"))
#max(s) largest item of s
assert same(max("cat"), "t")
assert same(max(untrusted.string("cat")), untrusted.string("t"))
# s.index(x[, i[, j]])
# "index of the first occurrence of x in s
# (at or after index i and before index j)"
assert "cat".index("a") == 1
assert untrusted.string("cat").index("a") == 1
assert "dogcatmouse".index("cat") == 3
assert untrusted.string("dogcatmouse").index("cat") == 3
assert untrusted.string("dogcatmouse").index(untrusted.string("cat")) == 3
# s.count(x) - occurrences of x in s
assert "cat".count("a") == 1
assert untrusted.string("cat").count("a") == 1
assert untrusted.string("cat").count(untrusted.string("a")) == 1
assert "cataclasm".count("a") == 3
assert untrusted.string("cataclasm").count("a") == 3
assert untrusted.string("cataclasm").count(untrusted.string("a")) == 3
assert "cat attack".count("at") == 2
assert untrusted.string("cat attack").count("at") == 2
assert untrusted.string("cat attack").count(untrusted.string("at")) == 2
# x.join(y)
assert same(''.join([]), "")
assert same(untrusted.string('').join([]), untrusted.string(""))
assert same(''.join("cat"), "cat")
assert same(untrusted.string('').join("cat"), untrusted.string("cat"))
assert same(untrusted.string('').join(untrusted.string("cat")), untrusted.string("cat"))
assert same(','.join(["cat", "dog", "mouse"]), "cat,dog,mouse")
assert same(untrusted.string(',').join(["cat", "dog", "mouse"]), untrusted.string("cat,dog,mouse"))
assert same(untrusted.string(',').join([untrusted.string("cat"), untrusted.string("dog"), untrusted.string("mouse")]), untrusted.string("cat,dog,mouse"))
# sorry, str('').join(untrusted.string(...)) won't work
# but let's make sure we get an exception
# to be certain that an untrusted.string doesn't ever leak into a normal str
try:
_ = ''.join(untrusted.string("hello"))
raise AssertionError
except TypeError:
pass # expected
try:
_ = ''.join(customstring("hello"))
raise AssertionError
except TypeError:
pass # expected
# x.reversed()
assert same(''.join(reversed("cat")), "tac")
assert same(untrusted.string('').join(reversed(untrusted.string("cat"))), untrusted.string("tac"))
# iteration
for i in "cat":
assert same(i, "c") or same(i, "a") or same(i, "t")
for i in untrusted.string("cat"):
assert same(i, untrusted.string("c")) or same(i, untrusted.string("a")) or same(i, untrusted.string("t"))
# string methods
# https://docs.python.org/3.4/library/stdtypes.html#string-methods
# str.capitalize()
assert same("cAt".capitalize(), "Cat")
assert same(untrusted.string("cAt").capitalize(), untrusted.string("Cat"))
# str.casefold()
assert same("Catß".casefold(), "catss")
assert same(untrusted.string("Catß").casefold(), untrusted.string("catss"))
# str.center(width[, fillchar])
assert same("cat".center(7), " cat ")
assert same(untrusted.string("cat").center(7), untrusted.string(" cat "))
assert same("cat".center(7, "-"), "--cat--")
assert same(untrusted.string("cat").center(7, "-"), untrusted.string("--cat--"))
assert same(untrusted.string("cat").center(7, untrusted.string("-")), untrusted.string("--cat--"))
# str.count(sub[, start[, end]])
assert "dogcatmousecat".count("cat", 0, 3) == 0
assert "dogcatmousecat".count("cat", 3, 6) == 1
assert "dogcatmousecat".count("cat", 3) == 2
assert untrusted.string("dogcatmousecat").count("cat", 0, 3) == 0
assert untrusted.string("dogcatmousecat").count("cat", 3, 6) == 1
assert untrusted.string("dogcatmousecat").count("cat", 3) == 2
assert untrusted.string("dogcatmousecat").count(untrusted.string("cat"), 0, 3) == 0
assert untrusted.string("dogcatmousecat").count(untrusted.string("cat"), 3, 6) == 1
assert untrusted.string("dogcatmousecat").count(untrusted.string("cat"), 3) == 2
# str.encode
# disabled on purpose for untrusted.string!!!
assert same("cat".encode("ascii"), b"cat")
try:
_ = untrusted.string("cat").encode("ascii")
raise AssertionError
except TypeError:
pass # expected!
# str.endswith(suffix[, start[, end]])
assert "catdogmouse".endswith("mouse")
assert untrusted.string("catdogmouse").endswith("mouse")
assert untrusted.string("catdogmouse").endswith(untrusted.string("mouse"))
assert not "catdogmouse".endswith("cat")
assert not untrusted.string("catdogmouse").endswith("cat")
assert not untrusted.string("catdogmouse").endswith(untrusted.string("cat"))
assert "catdogmouse".endswith("dog", 0, 6)
assert untrusted.string("catdogmouse").endswith("dog", 0, 6)
assert untrusted.string("catdogmouse").endswith(untrusted.string("dog"), 0, 6)
assert not "catdogmouse".endswith("dog", 4)
assert not untrusted.string("catdogmouse").endswith("dog", 4)
assert not untrusted.string("catdogmouse").endswith(untrusted.string("dog"), 4)
# str.expandtabs(tabsize=8)
assert same("\tHello\tworld!".expandtabs(), " Hello world!")
assert same(untrusted.string("\tHello\tworld!").expandtabs(), untrusted.string(" Hello world!"))
# str.find(sub[, start[, end]])
assert "dogcatmouse".find("cat") == 3
assert untrusted.string("dogcatmouse").find("cat") == 3
assert untrusted.string("dogcatmouse").find(untrusted.string("cat")) == 3
assert "dogcatmouse".find("cat", 4) == -1
assert untrusted.string("dogcatmouse").find("cat", 4) == -1
assert untrusted.string("dogcatmouse").find(untrusted.string("cat"), 4) == -1
# str.format(*args, **kwargs)
# with numeric argument:
assert same(
"Hello {0}, UserID: {1}".format("Sarah", 123),
"Hello Sarah, UserID: 123"
)
assert same(
untrusted.string("Hello {0}, UserID: {1}").format("Sarah", 123),
untrusted.string("Hello Sarah, UserID: 123")
)
assert same(
untrusted.string("Hello {0}, UserID: {1}").format(untrusted.string("Sarah"), 123),
untrusted.string("Hello Sarah, UserID: 123")
)
# ensure untrusted.string never leaks into a str...
try:
_ = "Hello {0}, UserID: {1}".format(untrusted.string("Sarah"), 123),
raise AssertionError
except TypeError:
pass # expected!
# with named arguments:
assert same(
"Hello {name}, UserID: {uid}".format(name="Sarah", uid=123),
"Hello Sarah, UserID: 123"
)
assert same(
untrusted.string("Hello {name}, UserID: {uid}").format(name="Sarah", uid=123),
untrusted.string("Hello Sarah, UserID: 123")
)
assert same(
untrusted.string("Hello {name}, UserID: {uid}").format(name=untrusted.string("Sarah"), uid=123),
untrusted.string("Hello Sarah, UserID: 123")
)
# str.format_map(mapping)
assert same(
"Hello {name}, UserID: {uid}".format_map({"name": "Sarah", "uid": 123}),
"Hello Sarah, UserID: 123"
)
assert same(
untrusted.string("Hello {name}, UserID: {uid}").format_map({"name": "Sarah", "uid": 123}),
untrusted.string("Hello Sarah, UserID: 123")
)
assert same(
untrusted.string("Hello {name}, UserID: {uid}").format_map({"name": untrusted.string("Sarah"), "uid": "123"}),
untrusted.string("Hello Sarah, UserID: 123")
)
# advanced! format_map with an untrusted.mapping!!
myUntrustedDict = untrusted.mapping({'name': 'Sarah', "uid": "123"})
assert same(
untrusted.string("Hello {name}, UserID: {uid}").format_map(myUntrustedDict),
untrusted.string("Hello Sarah, UserID: 123")
)
# An untrusted mapping with untrusted keys is not allowed to format a string
# This is by design!
myUntrustedDict = untrusted.mappingOf(untrusted.string, untrusted.string)({'name': 'Sarah', "uid": "123"})
try:
assert same(
untrusted.string("Hello {name}, UserID: {uid}").format_map(myUntrustedDict),
untrusted.string("Hello Sarah, UserID: 123")
)
raise AssrtionError
except TypeError:
pass # expected
# ensure untrusted.mapping never leaks into a str...
try:
_ = "Hello {name}, UserID: {uid}".format_map(myUntrustedDict),
raise AssertionError
except TypeError:
pass # expected!
# str.index(sub[, start[, end]])
# "Like find(), but raise ValueError when the substring is not found."
assert "dogcatmouse".index("cat") == 3
assert untrusted.string("dogcatmouse").index("cat") == 3
assert untrusted.string("dogcatmouse").index(untrusted.string("cat")) == 3
try:
_ = "dogcatmouse".index("tiger")
raise AssertionError
except ValueError:
pass # expected
try:
_ = untrusted.string("dogcatmouse").index("tiger")
raise AssertionError
except ValueError:
pass # expected
try:
_ = untrusted.string("dogcatmouse").index(untrusted.string("tiger"))
raise AssertionError
except ValueError:
pass # expected
try:
_ = "dogcatmouse".index("cat", 4)
raise AssertionError
except ValueError:
pass # expected
try:
_ = untrusted.string("dogcatmouse").index("cat", 4)
raise AssertionError
except ValueError:
pass # expected
try:
_ = untrusted.string("dogcatmouse").index(untrusted.string("cat"), 4)
raise AssertionError
except ValueError:
pass # expected
# str.isalnum()
assert "cat".isalnum()
assert untrusted.string("cat").isalnum()
assert not "£123".isalnum()
assert not untrusted.string("£123").isalnum()
# str.isalpha()
assert "cat".isalpha()
assert untrusted.string("cat").isalpha()
assert not "123".isalpha()
assert not untrusted.string("123").isalpha()
# str.isdecimal()
assert "123".isdecimal()
assert untrusted.string("123").isdecimal()
assert not "cat".isdecimal()
assert not untrusted.string("cat").isdecimal()
# str.isdigit()
assert "2²".isdigit()
assert untrusted.string("2²").isdigit()
# str.isidentifier()
assert "hello".isidentifier()
assert untrusted.string("hello").isidentifier()
assert not "123".isidentifier()
assert not untrusted.string("123").isidentifier()
# str.islower()
assert "hello".islower()
assert untrusted.string("hello").islower()
assert not "Hello".islower()
assert not untrusted.string("Hello").islower()
# str.isnumeric()
assert "123".isnumeric()
assert untrusted.string("123").isnumeric()
assert not "hello".isnumeric()
assert not untrusted.string("hello").isnumeric()
# str.isprintable()
assert "123".isprintable()
assert untrusted.string("123").isprintable()
assert not "\01".isprintable()
assert not untrusted.string("\01").isprintable()
# str.isspace()
assert " \t\r\n".isspace()
assert untrusted.string(" \t\r\n").isspace()
assert not "cat".isspace()
assert not untrusted.string("cat").isspace()
# str.istitle()
assert "Hello World".istitle()
assert untrusted.string("Hello World").istitle()
assert not "hello world".istitle()
assert not untrusted.string("hello world").istitle()
# str.isupper()
assert "CAT".isupper()
assert untrusted.string("CAT").isupper()
assert not "cat".isupper()
assert not untrusted.string("cat").isupper()
# str.join(iterable) - done
# str.ljust(width[, fillchar])
assert same("CAT".ljust(8, "-"), "CAT-----")
assert same(untrusted.string("CAT").ljust(8, "-"), untrusted.string("CAT-----"))
# str.lower()
assert same("Cat".lower(), "cat")
assert same(untrusted.string("Cat").lower(), untrusted.string("cat"))
# str.lstrip([chars])
assert same(" cat".lstrip(), "cat")
assert same(untrusted.string(" cat".lstrip()), untrusted.string("cat"))
assert same(" cat".lstrip(" ca"), "t")
assert same(untrusted.string(" cat").lstrip(" ca"), untrusted.string("t"))
assert same(untrusted.string(" cat").lstrip(untrusted.string(" ca")), untrusted.string("t"))
assert same(untrusted.string(" cat").lstrip(customstring(" ca")), untrusted.string("t"))
# str.partition(sep)
# no result
parts = "cat,dog,mouse".partition("X")
a, b, c = parts
assert same(a, "cat,dog,mouse")
assert same(b, "")
assert same(c, "")
parts = untrusted.string("cat,dog,mouse").partition("X")
a, b, c = parts
assert same(a, untrusted.string("cat,dog,mouse"))
assert same(b, untrusted.string(""))
assert same(c, untrusted.string(""))
parts = untrusted.string("cat,dog,mouse").partition(untrusted.string("X"))
a, b, c = parts
assert same(a, untrusted.string("cat,dog,mouse"))
assert same(b, untrusted.string(""))
assert same(c, untrusted.string(""))
parts = customstring("cat,dog,mouse").partition(untrusted.string("X"))
a, b, c = parts
assert same(a, customstring("cat,dog,mouse"))
assert same(b, customstring(""))
assert same(c, customstring(""))
parts = untrusted.string("cat,dog,mouse").partition(customstring("X"))
a, b, c = parts
assert same(a, untrusted.string("cat,dog,mouse"))
assert same(b, untrusted.string(""))
assert same(c, untrusted.string(""))
# result
parts = "cat,dog,mouse".partition(",")
a, b, c = parts
assert same(a, "cat")
assert same(b, ",")
assert same(c, "dog,mouse")
parts = untrusted.string("cat,dog,mouse").partition(",")
a, b, c = parts
assert same(a, untrusted.string("cat"))
assert same(b, untrusted.string(","))
assert same(c, untrusted.string("dog,mouse"))
parts = untrusted.string("cat,dog,mouse").partition(untrusted.string(","))
a, b, c = parts
assert same(a, untrusted.string("cat"))
assert same(b, untrusted.string(","))
assert same(c, untrusted.string("dog,mouse"))
parts = customstring("cat,dog,mouse").partition(untrusted.string(","))
a, b, c = parts
assert same(a, customstring("cat"))
assert same(b, customstring(","))
assert same(c, customstring("dog,mouse"))
parts = untrusted.string("cat,dog,mouse").partition(customstring(","))
a, b, c = parts
assert same(a, untrusted.string("cat"))
assert same(b, untrusted.string(","))
assert same(c, untrusted.string("dog,mouse"))
# str.replace(old, new[, count])
assert same("cat,dog,hat".replace("at", "ave"), "cave,dog,have")
assert same(untrusted.string("cat,dog,hat").replace("at", "ave"), untrusted.string("cave,dog,have"))
assert same(untrusted.string("cat,dog,hat").replace(untrusted.string("at"), untrusted.string("ave")), untrusted.string("cave,dog,have"))
# str.rfind(sub[, start[, end]])
assert "dogcathat".rfind("at") == 7
assert untrusted.string("dogcathat").rfind("at") == 7
assert untrusted.string("dogcathat").rfind(untrusted.string("at")) == 7
assert "dogcathat".rfind("mouse") == -1
assert untrusted.string("mouse").rfind("at") == -1
assert untrusted.string("mouse").rfind(untrusted.string("at")) == -1
# str.rindex(sub[, start[, end]])
# Like rfind() but raises ValueError when the substring sub is not found.
try:
_ = "dogcatmouse".rindex("tiger")
raise AssertionError
except ValueError:
pass # expected
try:
_ = untrusted.string("dogcatmouse").rindex("tiger")
raise AssertionError
except ValueError:
pass # expected
try:
_ = untrusted.string("dogcatmouse").rindex(untrusted.string("tiger"))
raise AssertionError
except ValueError:
pass # expected
try:
_ = untrusted.string("dogcatmouse").rindex(customstring("tiger"))
raise AssertionError
except ValueError:
pass # expected
# str.rpartition(sep)
# no result
parts = "cat,dog,mouse".rpartition("X")
a, b, c = parts
assert same(a, "")
assert same(b, "")
assert same(c, "cat,dog,mouse")
parts = untrusted.string("cat,dog,mouse").rpartition("X")
a, b, c = parts
assert same(a, untrusted.string(""))
assert same(b, untrusted.string(""))
assert same(c, untrusted.string("cat,dog,mouse"))
parts = untrusted.string("cat,dog,mouse").rpartition(untrusted.string("X"))
a, b, c = parts
assert same(a, untrusted.string(""))
assert same(b, untrusted.string(""))
assert same(c, untrusted.string("cat,dog,mouse"))
parts = customstring("cat,dog,mouse").rpartition(untrusted.string("X"))
a, b, c = parts
assert same(a, customstring(""))
assert same(b, customstring(""))
assert same(c, customstring("cat,dog,mouse"))
parts = untrusted.string("cat,dog,mouse").rpartition(customstring("X"))
a, b, c = parts
assert same(a, untrusted.string(""))
assert same(b, untrusted.string(""))
assert same(c, untrusted.string("cat,dog,mouse"))
# result
parts = "cat,dog,mouse".rpartition(",")
a, b, c = parts
assert same(a, "cat,dog")
assert same(b, ",")
assert same(c, "mouse")
parts = untrusted.string("cat,dog,mouse").rpartition(",")
a, b, c = parts
assert same(a, untrusted.string("cat,dog"))
assert same(b, untrusted.string(","))
assert same(c, untrusted.string("mouse"))
parts = untrusted.string("cat,dog,mouse").rpartition(untrusted.string(","))
a, b, c = parts
assert same(a, untrusted.string("cat,dog"))
assert same(b, untrusted.string(","))
assert same(c, untrusted.string("mouse"))
parts = customstring("cat,dog,mouse").rpartition(untrusted.string(","))
a, b, c = parts
assert same(a, customstring("cat,dog"))
assert same(b, customstring(","))
assert same(c, customstring("mouse"))
parts = untrusted.string("cat,dog,mouse").rpartition(customstring(","))
a, b, c = parts
assert same(a, untrusted.string("cat,dog"))
assert same(b, untrusted.string(","))
assert same(c, untrusted.string("mouse"))
# str.rsplit(sep=None, maxsplit=-1)
parts = "a,b,c,d".rsplit(",", maxsplit=2)
rest,c,d = parts
assert same(rest, "a,b")
assert same(c, "c")
assert same(d, "d")
parts = untrusted.string("a,b,c,d").rsplit(",", maxsplit=2)
rest,c,d = parts
assert same(rest, untrusted.string("a,b"))
assert same(c, untrusted.string("c"))
assert same(d, untrusted.string("d"))
parts = untrusted.string("a,b,c,d").rsplit(untrusted.string(","), maxsplit=2)
rest,c,d = parts
assert same(rest, untrusted.string("a,b"))
assert same(c, untrusted.string("c"))
assert same(d, untrusted.string("d"))
# str.rstrip([chars])
assert same("cat ".rstrip(), "cat")
assert same(untrusted.string("cat ".rstrip()), untrusted.string("cat"))
assert same("cat ".rstrip(" ta"), "c")
assert same(untrusted.string("cat ").rstrip(" ta"), untrusted.string("c"))
assert same(untrusted.string("cat ").rstrip(untrusted.string(" ta")), untrusted.string("c"))
assert same(untrusted.string("cat ").rstrip(customstring(" ta")), untrusted.string("c"))
# str.split(sep=None, maxsplit=-1)
parts = "a,b,c,d".split(",", maxsplit=2)
a,b,rest = parts
assert same(a, "a")
assert same(b, "b")
assert same(rest, "c,d")
parts = untrusted.string("a,b,c,d").split(",", maxsplit=2)
a,b,rest = parts
assert same(a, untrusted.string("a"))
assert same(b, untrusted.string("b"))
assert same(rest, untrusted.string("c,d"))
parts = untrusted.string("a,b,c,d").split(untrusted.string(","), maxsplit=2)
a,b,rest = parts
assert same(a, untrusted.string("a"))
assert same(b, untrusted.string("b"))
assert same(rest, untrusted.string("c,d"))
parts = customstring("a,b,c,d").split(",", maxsplit=2)
a,b,rest = parts
assert same(a, customstring("a"))
assert same(b, customstring("b"))
assert same(rest, customstring("c,d"))
parts = customstring("a,b,c,d").split(untrusted.string(","), maxsplit=2)
a,b,rest = parts
assert same(a, customstring("a"))
assert same(b, customstring("b"))
assert same(rest, customstring("c,d"))
# str.strip([chars])
assert same(" cat ".strip(), "cat")
assert same(untrusted.string(" cat ".strip()), untrusted.string("cat"))
assert same(" cat ".strip(" ct"), "a")
assert same(untrusted.string(" cat ").strip(" ct"), untrusted.string("a"))
assert same(untrusted.string(" cat ").strip(untrusted.string(" ct")), untrusted.string("a"))
assert same(untrusted.string(" cat ").strip(customstring(" ct")), untrusted.string("a"))
# str.swapcase()
assert same("Cat".swapcase(), "cAT")
assert same(untrusted.string("Cat").swapcase(), untrusted.string("cAT"))
assert same(customstring("Cat").swapcase(), customstring("cAT"))
# str.title()
assert same("hello world".title(), "Hello World")
assert same(untrusted.string("hello world").title(), untrusted.string("Hello World"))
# str.upper()
assert same("hello world".upper(), "HELLO WORLD")
assert same(untrusted.string("hello world").upper(), untrusted.string("HELLO WORLD"))
# str.zfill(width)
assert same("42".zfill(5), "00042")
assert same(untrusted.string("42").zfill(5), untrusted.string("00042"))
assert same("-42".zfill(5), "-0042")
assert same(untrusted.string("-42").zfill(5), untrusted.string("-0042"))
# TODO str.translate - not impleemnted
# TODO str.maketrans - not implemented
# hashable: a set of strings
parts = set(["cat", "dog", "tiger"])
assert "cat" in parts
parts = set([untrusted.string("cat"), untrusted.string("dog"), untrusted.string("tiger")])
assert "cat" in parts
assert untrusted.string("cat") in parts
assert customstring("cat") in parts
# %-style format also with a number
assert same("Hello %s aged %d" % ("Grace", 101), "Hello Grace aged 101")
assert same(untrusted.string("Hello %s aged %d") % ("Grace", 101), untrusted.string("Hello Grace aged 101"))
assert same(untrusted.string("Hello %s aged %d") % (untrusted.string("Grace"), 101), untrusted.string("Hello Grace aged 101"))
# %-style dict format (rare) also with with number
assert same("Hello %(name)s aged %(age)d" % {"name": "Grace", "age": 101}, "Hello Grace aged 101")
assert same(untrusted.string("Hello %(name)s aged %(age)d") % {"name": "Grace", "age": 101}, untrusted.string("Hello Grace aged 101"))
assert same(untrusted.string("Hello %(name)s aged %(age)d") % {"name": untrusted.string("Grace"), "age": 101}, untrusted.string("Hello Grace aged 101"))
# An untrusted mapping with untrusted keys is not allowed to format a string
# This is by design!
try:
_ = same(untrusted.string("Hello %(name)s aged %(age)d") % {untrusted.string("name"): untrusted.string("Grace"), "age": 101}, untrusted.string("Hello Grace aged 101"))
raise AssrtionError
except TypeError:
pass # expected
# escape examples
before = "<b>\"Hello\"</b>"
after_qt = "<b>"Hello"</b>"
after_unqt = "<b>\"Hello\"</b>"
assert same(html.escape(before), after_qt)
assert same(untrusted.string(before).escape(html.escape), after_qt)
assert same(customstring(before).escape(html.escape), after_qt)
assert same(untrusted.string(before) / html.escape, after_qt)
assert same(customstring(before) / html.escape, after_qt)
assert same(html.escape(before, quote=False), after_unqt)
assert same(untrusted.string(before).escape(html.escape, quote=False), after_unqt)
assert same(customstring(before).escape(html.escape, quote=False), after_unqt)
assert same(untrusted.string(before) / (html.escape, [], {'quote': False}), after_unqt)
assert same(customstring(before) / (html.escape, [], {'quote': False}), after_unqt)
| [
"untrusted.sequence",
"untrusted.string",
"untrusted.mappingOf",
"untrusted.mapping",
"html.escape"
] | [((3267, 3290), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (3283, 3290), False, 'import untrusted\n'), ((8790, 8813), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (8806, 8813), False, 'import untrusted\n'), ((13504, 13554), 'untrusted.mapping', 'untrusted.mapping', (["{'name': 'Sarah', 'uid': '123'}"], {}), "({'name': 'Sarah', 'uid': '123'})\n", (13521, 13554), False, 'import untrusted\n'), ((845, 868), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (861, 868), False, 'import untrusted\n'), ((870, 893), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (886, 893), False, 'import untrusted\n'), ((1280, 1302), 'untrusted.string', 'untrusted.string', (['None'], {}), '(None)\n', (1296, 1302), False, 'import untrusted\n'), ((1705, 1728), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (1721, 1728), False, 'import untrusted\n'), ((1736, 1759), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (1752, 1759), False, 'import untrusted\n'), ((1873, 1896), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (1889, 1896), False, 'import untrusted\n'), ((1904, 1927), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (1920, 1927), False, 'import untrusted\n'), ((2064, 2087), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (2080, 2087), False, 'import untrusted\n'), ((2149, 2172), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (2165, 2172), False, 'import untrusted\n'), ((2623, 2651), 'untrusted.string', 'untrusted.string', (['"""aligator"""'], {}), "('aligator')\n", (2639, 2651), False, 'import untrusted\n'), ((2675, 2698), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (2691, 2698), False, 'import untrusted\n'), ((2722, 2745), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (2738, 2745), False, 'import untrusted\n'), ((2769, 2794), 'untrusted.string', 'untrusted.string', (['"""mouse"""'], {}), "('mouse')\n", (2785, 2794), False, 'import untrusted\n'), ((2818, 2843), 'untrusted.string', 'untrusted.string', (['"""zebra"""'], {}), "('zebra')\n", (2834, 2843), False, 'import untrusted\n'), ((2952, 2980), 'untrusted.string', 'untrusted.string', (['"""aligator"""'], {}), "('aligator')\n", (2968, 2980), False, 'import untrusted\n'), ((3004, 3027), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (3020, 3027), False, 'import untrusted\n'), ((3051, 3074), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (3067, 3074), False, 'import untrusted\n'), ((3098, 3123), 'untrusted.string', 'untrusted.string', (['"""mouse"""'], {}), "('mouse')\n", (3114, 3123), False, 'import untrusted\n'), ((3147, 3172), 'untrusted.string', 'untrusted.string', (['"""zebra"""'], {}), "('zebra')\n", (3163, 3172), False, 'import untrusted\n'), ((3621, 3644), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (3637, 3644), False, 'import untrusted\n'), ((3652, 3673), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (3668, 3673), False, 'import untrusted\n'), ((3677, 3700), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (3693, 3700), False, 'import untrusted\n'), ((3881, 3912), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (3897, 3912), False, 'import untrusted\n'), ((3920, 3943), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (3936, 3943), False, 'import untrusted\n'), ((3947, 3978), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (3963, 3978), False, 'import untrusted\n'), ((4008, 4031), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (4024, 4031), False, 'import untrusted\n'), ((4039, 4060), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (4055, 4060), False, 'import untrusted\n'), ((4151, 4174), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (4167, 4174), False, 'import untrusted\n'), ((4182, 4203), 'untrusted.string', 'untrusted.string', (['"""b"""'], {}), "('b')\n", (4198, 4203), False, 'import untrusted\n'), ((4211, 4234), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (4227, 4234), False, 'import untrusted\n'), ((4413, 4436), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (4429, 4436), False, 'import untrusted\n'), ((4542, 4568), 'untrusted.string', 'untrusted.string', (['"""catdog"""'], {}), "('catdog')\n", (4558, 4568), False, 'import untrusted\n'), ((4615, 4641), 'untrusted.string', 'untrusted.string', (['"""catdog"""'], {}), "('catdog')\n", (4631, 4641), False, 'import untrusted\n'), ((4706, 4732), 'untrusted.string', 'untrusted.string', (['"""catdog"""'], {}), "('catdog')\n", (4722, 4732), False, 'import untrusted\n'), ((4848, 4870), 'untrusted.string', 'untrusted.string', (['"""ab"""'], {}), "('ab')\n", (4864, 4870), False, 'import untrusted\n'), ((5084, 5113), 'untrusted.string', 'untrusted.string', (['"""catcatcat"""'], {}), "('catcatcat')\n", (5100, 5113), False, 'import untrusted\n'), ((5250, 5279), 'untrusted.string', 'untrusted.string', (['"""catcatcat"""'], {}), "('catcatcat')\n", (5266, 5279), False, 'import untrusted\n'), ((5408, 5428), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (5424, 5428), False, 'import untrusted\n'), ((5494, 5514), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (5510, 5514), False, 'import untrusted\n'), ((5609, 5630), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (5625, 5630), False, 'import untrusted\n'), ((5702, 5723), 'untrusted.string', 'untrusted.string', (['"""t"""'], {}), "('t')\n", (5718, 5723), False, 'import untrusted\n'), ((6048, 6071), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (6064, 6071), False, 'import untrusted\n'), ((6233, 6255), 'untrusted.string', 'untrusted.string', (['"""ct"""'], {}), "('ct')\n", (6249, 6255), False, 'import untrusted\n'), ((6502, 6523), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (6518, 6523), False, 'import untrusted\n'), ((6626, 6647), 'untrusted.string', 'untrusted.string', (['"""t"""'], {}), "('t')\n", (6642, 6647), False, 'import untrusted\n'), ((7601, 7621), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (7617, 7621), False, 'import untrusted\n'), ((7705, 7728), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (7721, 7728), False, 'import untrusted\n'), ((7794, 7817), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (7810, 7817), False, 'import untrusted\n'), ((7949, 7982), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (7965, 7982), False, 'import untrusted\n'), ((8103, 8136), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (8119, 8136), False, 'import untrusted\n'), ((8669, 8692), 'untrusted.string', 'untrusted.string', (['"""tac"""'], {}), "('tac')\n", (8685, 8692), False, 'import untrusted\n'), ((9121, 9144), 'untrusted.string', 'untrusted.string', (['"""Cat"""'], {}), "('Cat')\n", (9137, 9144), False, 'import untrusted\n'), ((9255, 9280), 'untrusted.string', 'untrusted.string', (['"""catss"""'], {}), "('catss')\n", (9271, 9280), False, 'import untrusted\n'), ((9401, 9428), 'untrusted.string', 'untrusted.string', (['""" cat """'], {}), "(' cat ')\n", (9417, 9428), False, 'import untrusted\n'), ((9527, 9554), 'untrusted.string', 'untrusted.string', (['"""--cat--"""'], {}), "('--cat--')\n", (9543, 9554), False, 'import untrusted\n'), ((9626, 9653), 'untrusted.string', 'untrusted.string', (['"""--cat--"""'], {}), "('--cat--')\n", (9642, 9653), False, 'import untrusted\n'), ((10684, 10709), 'untrusted.string', 'untrusted.string', (['"""mouse"""'], {}), "('mouse')\n", (10700, 10709), False, 'import untrusted\n'), ((11042, 11065), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (11058, 11065), False, 'import untrusted\n'), ((11422, 11464), 'untrusted.string', 'untrusted.string', (['""" Hello world!"""'], {}), "(' Hello world!')\n", (11438, 11464), False, 'import untrusted\n'), ((12092, 12136), 'untrusted.string', 'untrusted.string', (['"""Hello Sarah, UserID: 123"""'], {}), "('Hello Sarah, UserID: 123')\n", (12108, 12136), False, 'import untrusted\n'), ((12244, 12288), 'untrusted.string', 'untrusted.string', (['"""Hello Sarah, UserID: 123"""'], {}), "('Hello Sarah, UserID: 123')\n", (12260, 12288), False, 'import untrusted\n'), ((12728, 12772), 'untrusted.string', 'untrusted.string', (['"""Hello Sarah, UserID: 123"""'], {}), "('Hello Sarah, UserID: 123')\n", (12744, 12772), False, 'import untrusted\n'), ((12894, 12938), 'untrusted.string', 'untrusted.string', (['"""Hello Sarah, UserID: 123"""'], {}), "('Hello Sarah, UserID: 123')\n", (12910, 12938), False, 'import untrusted\n'), ((13206, 13250), 'untrusted.string', 'untrusted.string', (['"""Hello Sarah, UserID: 123"""'], {}), "('Hello Sarah, UserID: 123')\n", (13222, 13250), False, 'import untrusted\n'), ((13386, 13430), 'untrusted.string', 'untrusted.string', (['"""Hello Sarah, UserID: 123"""'], {}), "('Hello Sarah, UserID: 123')\n", (13402, 13430), False, 'import untrusted\n'), ((13654, 13698), 'untrusted.string', 'untrusted.string', (['"""Hello Sarah, UserID: 123"""'], {}), "('Hello Sarah, UserID: 123')\n", (13670, 13698), False, 'import untrusted\n'), ((13819, 13874), 'untrusted.mappingOf', 'untrusted.mappingOf', (['untrusted.string', 'untrusted.string'], {}), '(untrusted.string, untrusted.string)\n', (13838, 13874), False, 'import untrusted\n'), ((17256, 17284), 'untrusted.string', 'untrusted.string', (['"""CAT-----"""'], {}), "('CAT-----')\n", (17272, 17284), False, 'import untrusted\n'), ((17380, 17403), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (17396, 17403), False, 'import untrusted\n'), ((17511, 17534), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (17527, 17534), False, 'import untrusted\n'), ((17627, 17648), 'untrusted.string', 'untrusted.string', (['"""t"""'], {}), "('t')\n", (17643, 17648), False, 'import untrusted\n'), ((17720, 17741), 'untrusted.string', 'untrusted.string', (['"""t"""'], {}), "('t')\n", (17736, 17741), False, 'import untrusted\n'), ((17809, 17830), 'untrusted.string', 'untrusted.string', (['"""t"""'], {}), "('t')\n", (17825, 17830), False, 'import untrusted\n'), ((18082, 18115), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (18098, 18115), False, 'import untrusted\n'), ((18132, 18152), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (18148, 18152), False, 'import untrusted\n'), ((18169, 18189), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (18185, 18189), False, 'import untrusted\n'), ((18244, 18265), 'untrusted.string', 'untrusted.string', (['"""X"""'], {}), "('X')\n", (18260, 18265), False, 'import untrusted\n'), ((18298, 18331), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (18314, 18331), False, 'import untrusted\n'), ((18348, 18368), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (18364, 18368), False, 'import untrusted\n'), ((18385, 18405), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (18401, 18405), False, 'import untrusted\n'), ((18456, 18477), 'untrusted.string', 'untrusted.string', (['"""X"""'], {}), "('X')\n", (18472, 18477), False, 'import untrusted\n'), ((18710, 18743), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (18726, 18743), False, 'import untrusted\n'), ((18760, 18780), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (18776, 18780), False, 'import untrusted\n'), ((18797, 18817), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (18813, 18817), False, 'import untrusted\n'), ((19043, 19066), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (19059, 19066), False, 'import untrusted\n'), ((19083, 19104), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (19099, 19104), False, 'import untrusted\n'), ((19121, 19150), 'untrusted.string', 'untrusted.string', (['"""dog,mouse"""'], {}), "('dog,mouse')\n", (19137, 19150), False, 'import untrusted\n'), ((19205, 19226), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (19221, 19226), False, 'import untrusted\n'), ((19259, 19282), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (19275, 19282), False, 'import untrusted\n'), ((19299, 19320), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (19315, 19320), False, 'import untrusted\n'), ((19337, 19366), 'untrusted.string', 'untrusted.string', (['"""dog,mouse"""'], {}), "('dog,mouse')\n", (19353, 19366), False, 'import untrusted\n'), ((19417, 19438), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (19433, 19438), False, 'import untrusted\n'), ((19671, 19694), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (19687, 19694), False, 'import untrusted\n'), ((19711, 19732), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (19727, 19732), False, 'import untrusted\n'), ((19749, 19778), 'untrusted.string', 'untrusted.string', (['"""dog,mouse"""'], {}), "('dog,mouse')\n", (19765, 19778), False, 'import untrusted\n'), ((19946, 19979), 'untrusted.string', 'untrusted.string', (['"""cave,dog,have"""'], {}), "('cave,dog,have')\n", (19962, 19979), False, 'import untrusted\n'), ((20083, 20116), 'untrusted.string', 'untrusted.string', (['"""cave,dog,have"""'], {}), "('cave,dog,have')\n", (20099, 20116), False, 'import untrusted\n'), ((21354, 21374), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (21370, 21374), False, 'import untrusted\n'), ((21391, 21411), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (21407, 21411), False, 'import untrusted\n'), ((21428, 21461), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (21444, 21461), False, 'import untrusted\n'), ((21517, 21538), 'untrusted.string', 'untrusted.string', (['"""X"""'], {}), "('X')\n", (21533, 21538), False, 'import untrusted\n'), ((21571, 21591), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (21587, 21591), False, 'import untrusted\n'), ((21608, 21628), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (21624, 21628), False, 'import untrusted\n'), ((21645, 21678), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (21661, 21678), False, 'import untrusted\n'), ((21730, 21751), 'untrusted.string', 'untrusted.string', (['"""X"""'], {}), "('X')\n", (21746, 21751), False, 'import untrusted\n'), ((21985, 22005), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (22001, 22005), False, 'import untrusted\n'), ((22022, 22042), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (22038, 22042), False, 'import untrusted\n'), ((22059, 22092), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (22075, 22092), False, 'import untrusted\n'), ((22320, 22347), 'untrusted.string', 'untrusted.string', (['"""cat,dog"""'], {}), "('cat,dog')\n", (22336, 22347), False, 'import untrusted\n'), ((22364, 22385), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (22380, 22385), False, 'import untrusted\n'), ((22402, 22427), 'untrusted.string', 'untrusted.string', (['"""mouse"""'], {}), "('mouse')\n", (22418, 22427), False, 'import untrusted\n'), ((22483, 22504), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (22499, 22504), False, 'import untrusted\n'), ((22537, 22564), 'untrusted.string', 'untrusted.string', (['"""cat,dog"""'], {}), "('cat,dog')\n", (22553, 22564), False, 'import untrusted\n'), ((22581, 22602), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (22597, 22602), False, 'import untrusted\n'), ((22619, 22644), 'untrusted.string', 'untrusted.string', (['"""mouse"""'], {}), "('mouse')\n", (22635, 22644), False, 'import untrusted\n'), ((22696, 22717), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (22712, 22717), False, 'import untrusted\n'), ((22951, 22978), 'untrusted.string', 'untrusted.string', (['"""cat,dog"""'], {}), "('cat,dog')\n", (22967, 22978), False, 'import untrusted\n'), ((22995, 23016), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (23011, 23016), False, 'import untrusted\n'), ((23033, 23058), 'untrusted.string', 'untrusted.string', (['"""mouse"""'], {}), "('mouse')\n", (23049, 23058), False, 'import untrusted\n'), ((23320, 23343), 'untrusted.string', 'untrusted.string', (['"""a,b"""'], {}), "('a,b')\n", (23336, 23343), False, 'import untrusted\n'), ((23360, 23381), 'untrusted.string', 'untrusted.string', (['"""c"""'], {}), "('c')\n", (23376, 23381), False, 'import untrusted\n'), ((23398, 23419), 'untrusted.string', 'untrusted.string', (['"""d"""'], {}), "('d')\n", (23414, 23419), False, 'import untrusted\n'), ((23466, 23487), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (23482, 23487), False, 'import untrusted\n'), ((23536, 23559), 'untrusted.string', 'untrusted.string', (['"""a,b"""'], {}), "('a,b')\n", (23552, 23559), False, 'import untrusted\n'), ((23576, 23597), 'untrusted.string', 'untrusted.string', (['"""c"""'], {}), "('c')\n", (23592, 23597), False, 'import untrusted\n'), ((23614, 23635), 'untrusted.string', 'untrusted.string', (['"""d"""'], {}), "('d')\n", (23630, 23635), False, 'import untrusted\n'), ((23744, 23767), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (23760, 23767), False, 'import untrusted\n'), ((23860, 23881), 'untrusted.string', 'untrusted.string', (['"""c"""'], {}), "('c')\n", (23876, 23881), False, 'import untrusted\n'), ((23953, 23974), 'untrusted.string', 'untrusted.string', (['"""c"""'], {}), "('c')\n", (23969, 23974), False, 'import untrusted\n'), ((24042, 24063), 'untrusted.string', 'untrusted.string', (['"""c"""'], {}), "('c')\n", (24058, 24063), False, 'import untrusted\n'), ((24319, 24340), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (24335, 24340), False, 'import untrusted\n'), ((24357, 24378), 'untrusted.string', 'untrusted.string', (['"""b"""'], {}), "('b')\n", (24373, 24378), False, 'import untrusted\n'), ((24398, 24421), 'untrusted.string', 'untrusted.string', (['"""c,d"""'], {}), "('c,d')\n", (24414, 24421), False, 'import untrusted\n'), ((24467, 24488), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (24483, 24488), False, 'import untrusted\n'), ((24534, 24555), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (24550, 24555), False, 'import untrusted\n'), ((24572, 24593), 'untrusted.string', 'untrusted.string', (['"""b"""'], {}), "('b')\n", (24588, 24593), False, 'import untrusted\n'), ((24613, 24636), 'untrusted.string', 'untrusted.string', (['"""c,d"""'], {}), "('c,d')\n", (24629, 24636), False, 'import untrusted\n'), ((24857, 24878), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (24873, 24878), False, 'import untrusted\n'), ((25122, 25145), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (25138, 25145), False, 'import untrusted\n'), ((25238, 25259), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (25254, 25259), False, 'import untrusted\n'), ((25331, 25352), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (25347, 25352), False, 'import untrusted\n'), ((25420, 25441), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (25436, 25441), False, 'import untrusted\n'), ((25546, 25569), 'untrusted.string', 'untrusted.string', (['"""cAT"""'], {}), "('cAT')\n", (25562, 25569), False, 'import untrusted\n'), ((25754, 25785), 'untrusted.string', 'untrusted.string', (['"""Hello World"""'], {}), "('Hello World')\n", (25770, 25785), False, 'import untrusted\n'), ((25905, 25936), 'untrusted.string', 'untrusted.string', (['"""HELLO WORLD"""'], {}), "('HELLO WORLD')\n", (25921, 25936), False, 'import untrusted\n'), ((26039, 26064), 'untrusted.string', 'untrusted.string', (['"""00042"""'], {}), "('00042')\n", (26055, 26064), False, 'import untrusted\n'), ((26149, 26174), 'untrusted.string', 'untrusted.string', (['"""-0042"""'], {}), "('-0042')\n", (26165, 26174), False, 'import untrusted\n'), ((26467, 26490), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (26483, 26490), False, 'import untrusted\n'), ((26714, 26754), 'untrusted.string', 'untrusted.string', (['"""Hello Grace aged 101"""'], {}), "('Hello Grace aged 101')\n", (26730, 26754), False, 'import untrusted\n'), ((26841, 26881), 'untrusted.string', 'untrusted.string', (['"""Hello Grace aged 101"""'], {}), "('Hello Grace aged 101')\n", (26857, 26881), False, 'import untrusted\n'), ((27127, 27167), 'untrusted.string', 'untrusted.string', (['"""Hello Grace aged 101"""'], {}), "('Hello Grace aged 101')\n", (27143, 27167), False, 'import untrusted\n'), ((27280, 27320), 'untrusted.string', 'untrusted.string', (['"""Hello Grace aged 101"""'], {}), "('Hello Grace aged 101')\n", (27296, 27320), False, 'import untrusted\n'), ((27818, 27837), 'html.escape', 'html.escape', (['before'], {}), '(before)\n', (27829, 27837), False, 'import html\n'), ((28115, 28147), 'html.escape', 'html.escape', (['before'], {'quote': '(False)'}), '(before, quote=False)\n', (28126, 28147), False, 'import html\n'), ((911, 934), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (927, 934), False, 'import untrusted\n'), ((936, 959), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (952, 959), False, 'import untrusted\n'), ((978, 1001), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (994, 1001), False, 'import untrusted\n'), ((1033, 1056), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (1049, 1056), False, 'import untrusted\n'), ((1104, 1127), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (1120, 1127), False, 'import untrusted\n'), ((1152, 1175), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (1168, 1175), False, 'import untrusted\n'), ((1414, 1439), 'untrusted.string', 'untrusted.string', (['"""Hello"""'], {}), "('Hello')\n", (1430, 1439), False, 'import untrusted\n'), ((2007, 2030), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (2023, 2030), False, 'import untrusted\n'), ((2032, 2055), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (2048, 2055), False, 'import untrusted\n'), ((2092, 2115), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (2108, 2115), False, 'import untrusted\n'), ((2117, 2140), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (2133, 2140), False, 'import untrusted\n'), ((2873, 2937), 'untrusted.sequence', 'untrusted.sequence', (["['cat', 'dog', 'aligator', 'zebra', 'mouse']"], {}), "(['cat', 'dog', 'aligator', 'zebra', 'mouse'])\n", (2891, 2937), False, 'import untrusted\n'), ((3747, 3770), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (3763, 3770), False, 'import untrusted\n'), ((3784, 3805), 'untrusted.string', 'untrusted.string', (['"""b"""'], {}), "('b')\n", (3800, 3805), False, 'import untrusted\n'), ((3809, 3832), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (3825, 3832), False, 'import untrusted\n'), ((4289, 4312), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (4305, 4312), False, 'import untrusted\n'), ((4326, 4347), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (4342, 4347), False, 'import untrusted\n'), ((4355, 4378), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (4371, 4378), False, 'import untrusted\n'), ((4509, 4532), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (4525, 4532), False, 'import untrusted\n'), ((4590, 4613), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (4606, 4613), False, 'import untrusted\n'), ((4655, 4678), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (4671, 4678), False, 'import untrusted\n'), ((4681, 4704), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (4697, 4704), False, 'import untrusted\n'), ((4805, 4826), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (4821, 4826), False, 'import untrusted\n'), ((4904, 4925), 'untrusted.string', 'untrusted.string', (['"""b"""'], {}), "('b')\n", (4920, 4925), False, 'import untrusted\n'), ((5059, 5082), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (5075, 5082), False, 'import untrusted\n'), ((5223, 5246), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (5239, 5246), False, 'import untrusted\n'), ((5383, 5406), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (5399, 5406), False, 'import untrusted\n'), ((5467, 5490), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (5483, 5490), False, 'import untrusted\n'), ((5581, 5604), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (5597, 5604), False, 'import untrusted\n'), ((5673, 5696), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (5689, 5696), False, 'import untrusted\n'), ((5827, 5850), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (5843, 5850), False, 'import untrusted\n'), ((6010, 6041), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (6026, 6041), False, 'import untrusted\n'), ((6193, 6224), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (6209, 6224), False, 'import untrusted\n'), ((6370, 6393), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (6386, 6393), False, 'import untrusted\n'), ((6476, 6499), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (6492, 6499), False, 'import untrusted\n'), ((6600, 6623), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (6616, 6623), False, 'import untrusted\n'), ((6979, 7002), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (6995, 7002), False, 'import untrusted\n'), ((7161, 7182), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (7177, 7182), False, 'import untrusted\n'), ((7321, 7342), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (7337, 7342), False, 'import untrusted\n'), ((7486, 7508), 'untrusted.string', 'untrusted.string', (['"""at"""'], {}), "('at')\n", (7502, 7508), False, 'import untrusted\n'), ((7768, 7791), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (7784, 7791), False, 'import untrusted\n'), ((8335, 8360), 'untrusted.string', 'untrusted.string', (['"""hello"""'], {}), "('hello')\n", (8351, 8360), False, 'import untrusted\n'), ((9602, 9623), 'untrusted.string', 'untrusted.string', (['"""-"""'], {}), "('-')\n", (9618, 9623), False, 'import untrusted\n'), ((10075, 10098), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (10091, 10098), False, 'import untrusted\n'), ((10159, 10182), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (10175, 10182), False, 'import untrusted\n'), ((10243, 10266), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (10259, 10266), False, 'import untrusted\n'), ((10586, 10617), 'untrusted.string', 'untrusted.string', (['"""catdogmouse"""'], {}), "('catdogmouse')\n", (10602, 10617), False, 'import untrusted\n'), ((10643, 10674), 'untrusted.string', 'untrusted.string', (['"""catdogmouse"""'], {}), "('catdogmouse')\n", (10659, 10674), False, 'import untrusted\n'), ((10864, 10887), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (10880, 10887), False, 'import untrusted\n'), ((10940, 10971), 'untrusted.string', 'untrusted.string', (['"""catdogmouse"""'], {}), "('catdogmouse')\n", (10956, 10971), False, 'import untrusted\n'), ((11001, 11032), 'untrusted.string', 'untrusted.string', (['"""catdogmouse"""'], {}), "('catdogmouse')\n", (11017, 11032), False, 'import untrusted\n'), ((11232, 11255), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (11248, 11255), False, 'import untrusted\n'), ((11638, 11661), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (11654, 11661), False, 'import untrusted\n'), ((11815, 11838), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (11831, 11838), False, 'import untrusted\n'), ((12207, 12232), 'untrusted.string', 'untrusted.string', (['"""Sarah"""'], {}), "('Sarah')\n", (12223, 12232), False, 'import untrusted\n'), ((14024, 14068), 'untrusted.string', 'untrusted.string', (['"""Hello Sarah, UserID: 123"""'], {}), "('Hello Sarah, UserID: 123')\n", (14040, 14068), False, 'import untrusted\n'), ((14577, 14600), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (14593, 14600), False, 'import untrusted\n'), ((14891, 14916), 'untrusted.string', 'untrusted.string', (['"""tiger"""'], {}), "('tiger')\n", (14907, 14916), False, 'import untrusted\n'), ((15268, 15291), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (15284, 15291), False, 'import untrusted\n'), ((15408, 15431), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (15424, 15431), False, 'import untrusted\n'), ((15563, 15586), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (15579, 15586), False, 'import untrusted\n'), ((15720, 15743), 'untrusted.string', 'untrusted.string', (['"""123"""'], {}), "('123')\n", (15736, 15743), False, 'import untrusted\n'), ((15878, 15900), 'untrusted.string', 'untrusted.string', (['"""2²"""'], {}), "('2²')\n", (15894, 15900), False, 'import untrusted\n'), ((15970, 15995), 'untrusted.string', 'untrusted.string', (['"""hello"""'], {}), "('hello')\n", (15986, 15995), False, 'import untrusted\n'), ((16142, 16167), 'untrusted.string', 'untrusted.string', (['"""hello"""'], {}), "('hello')\n", (16158, 16167), False, 'import untrusted\n'), ((16305, 16328), 'untrusted.string', 'untrusted.string', (['"""123"""'], {}), "('123')\n", (16321, 16328), False, 'import untrusted\n'), ((16476, 16499), 'untrusted.string', 'untrusted.string', (['"""123"""'], {}), "('123')\n", (16492, 16499), False, 'import untrusted\n'), ((16648, 16678), 'untrusted.string', 'untrusted.string', (["' \\t\\r\\n'"], {}), "(' \\t\\r\\n')\n", (16664, 16678), False, 'import untrusted\n'), ((16816, 16847), 'untrusted.string', 'untrusted.string', (['"""Hello World"""'], {}), "('Hello World')\n", (16832, 16847), False, 'import untrusted\n'), ((16993, 17016), 'untrusted.string', 'untrusted.string', (['"""CAT"""'], {}), "('CAT')\n", (17009, 17016), False, 'import untrusted\n'), ((17694, 17717), 'untrusted.string', 'untrusted.string', (['""" ca"""'], {}), "(' ca')\n", (17710, 17717), False, 'import untrusted\n'), ((18002, 18035), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (18018, 18035), False, 'import untrusted\n'), ((18200, 18233), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (18216, 18233), False, 'import untrusted\n'), ((18616, 18649), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (18632, 18649), False, 'import untrusted\n'), ((18963, 18996), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (18979, 18996), False, 'import untrusted\n'), ((19161, 19194), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (19177, 19194), False, 'import untrusted\n'), ((19577, 19610), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (19593, 19610), False, 'import untrusted\n'), ((20033, 20055), 'untrusted.string', 'untrusted.string', (['"""at"""'], {}), "('at')\n", (20049, 20055), False, 'import untrusted\n'), ((20057, 20080), 'untrusted.string', 'untrusted.string', (['"""ave"""'], {}), "('ave')\n", (20073, 20080), False, 'import untrusted\n'), ((20285, 20307), 'untrusted.string', 'untrusted.string', (['"""at"""'], {}), "('at')\n", (20301, 20307), False, 'import untrusted\n'), ((20445, 20467), 'untrusted.string', 'untrusted.string', (['"""at"""'], {}), "('at')\n", (20461, 20467), False, 'import untrusted\n'), ((20871, 20896), 'untrusted.string', 'untrusted.string', (['"""tiger"""'], {}), "('tiger')\n", (20887, 20896), False, 'import untrusted\n'), ((21273, 21306), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (21289, 21306), False, 'import untrusted\n'), ((21472, 21505), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (21488, 21505), False, 'import untrusted\n'), ((21890, 21923), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (21906, 21923), False, 'import untrusted\n'), ((22239, 22272), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (22255, 22272), False, 'import untrusted\n'), ((22438, 22471), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (22454, 22471), False, 'import untrusted\n'), ((22856, 22889), 'untrusted.string', 'untrusted.string', (['"""cat,dog,mouse"""'], {}), "('cat,dog,mouse')\n", (22872, 22889), False, 'import untrusted\n'), ((23233, 23260), 'untrusted.string', 'untrusted.string', (['"""a,b,c,d"""'], {}), "('a,b,c,d')\n", (23249, 23260), False, 'import untrusted\n'), ((23431, 23458), 'untrusted.string', 'untrusted.string', (['"""a,b,c,d"""'], {}), "('a,b,c,d')\n", (23447, 23458), False, 'import untrusted\n'), ((23927, 23950), 'untrusted.string', 'untrusted.string', (['""" ta"""'], {}), "(' ta')\n", (23943, 23950), False, 'import untrusted\n'), ((24236, 24263), 'untrusted.string', 'untrusted.string', (['"""a,b,c,d"""'], {}), "('a,b,c,d')\n", (24252, 24263), False, 'import untrusted\n'), ((24433, 24460), 'untrusted.string', 'untrusted.string', (['"""a,b,c,d"""'], {}), "('a,b,c,d')\n", (24449, 24460), False, 'import untrusted\n'), ((25305, 25328), 'untrusted.string', 'untrusted.string', (['""" ct"""'], {}), "(' ct')\n", (25321, 25328), False, 'import untrusted\n'), ((26360, 26383), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (26376, 26383), False, 'import untrusted\n'), ((26385, 26408), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (26401, 26408), False, 'import untrusted\n'), ((26410, 26435), 'untrusted.string', 'untrusted.string', (['"""tiger"""'], {}), "('tiger')\n", (26426, 26435), False, 'import untrusted\n'), ((26659, 26695), 'untrusted.string', 'untrusted.string', (['"""Hello %s aged %d"""'], {}), "('Hello %s aged %d')\n", (26675, 26695), False, 'import untrusted\n'), ((26768, 26804), 'untrusted.string', 'untrusted.string', (['"""Hello %s aged %d"""'], {}), "('Hello %s aged %d')\n", (26784, 26804), False, 'import untrusted\n'), ((27046, 27093), 'untrusted.string', 'untrusted.string', (['"""Hello %(name)s aged %(age)d"""'], {}), "('Hello %(name)s aged %(age)d')\n", (27062, 27093), False, 'import untrusted\n'), ((27181, 27228), 'untrusted.string', 'untrusted.string', (['"""Hello %(name)s aged %(age)d"""'], {}), "('Hello %(name)s aged %(age)d')\n", (27197, 27228), False, 'import untrusted\n'), ((27557, 27597), 'untrusted.string', 'untrusted.string', (['"""Hello Grace aged 101"""'], {}), "('Hello Grace aged 101')\n", (27573, 27597), False, 'import untrusted\n'), ((27994, 28018), 'untrusted.string', 'untrusted.string', (['before'], {}), '(before)\n', (28010, 28018), False, 'import untrusted\n'), ((28336, 28360), 'untrusted.string', 'untrusted.string', (['before'], {}), '(before)\n', (28352, 28360), False, 'import untrusted\n'), ((2459, 2482), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (2475, 2482), False, 'import untrusted\n'), ((2488, 2511), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (2504, 2511), False, 'import untrusted\n'), ((2517, 2545), 'untrusted.string', 'untrusted.string', (['"""aligator"""'], {}), "('aligator')\n", (2533, 2545), False, 'import untrusted\n'), ((2551, 2576), 'untrusted.string', 'untrusted.string', (['"""zebra"""'], {}), "('zebra')\n", (2567, 2576), False, 'import untrusted\n'), ((2582, 2607), 'untrusted.string', 'untrusted.string', (['"""mouse"""'], {}), "('mouse')\n", (2598, 2607), False, 'import untrusted\n'), ((3343, 3364), 'untrusted.string', 'untrusted.string', (['"""c"""'], {}), "('c')\n", (3359, 3364), False, 'import untrusted\n'), ((3377, 3398), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (3393, 3398), False, 'import untrusted\n'), ((3411, 3432), 'untrusted.string', 'untrusted.string', (['"""t"""'], {}), "('t')\n", (3427, 3432), False, 'import untrusted\n'), ((6797, 6820), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (6813, 6820), False, 'import untrusted\n'), ((6884, 6915), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (6900, 6915), False, 'import untrusted\n'), ((6941, 6972), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (6957, 6972), False, 'import untrusted\n'), ((7084, 7107), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (7100, 7107), False, 'import untrusted\n'), ((7131, 7154), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (7147, 7154), False, 'import untrusted\n'), ((7232, 7261), 'untrusted.string', 'untrusted.string', (['"""cataclasm"""'], {}), "('cataclasm')\n", (7248, 7261), False, 'import untrusted\n'), ((7285, 7314), 'untrusted.string', 'untrusted.string', (['"""cataclasm"""'], {}), "('cataclasm')\n", (7301, 7314), False, 'import untrusted\n'), ((7394, 7424), 'untrusted.string', 'untrusted.string', (['"""cat attack"""'], {}), "('cat attack')\n", (7410, 7424), False, 'import untrusted\n'), ((7449, 7479), 'untrusted.string', 'untrusted.string', (['"""cat attack"""'], {}), "('cat attack')\n", (7465, 7479), False, 'import untrusted\n'), ((7570, 7590), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (7586, 7590), False, 'import untrusted\n'), ((7671, 7691), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (7687, 7691), False, 'import untrusted\n'), ((7742, 7762), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (7758, 7762), False, 'import untrusted\n'), ((7896, 7917), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (7912, 7917), False, 'import untrusted\n'), ((7996, 8017), 'untrusted.string', 'untrusted.string', (['""","""'], {}), "(',')\n", (8012, 8017), False, 'import untrusted\n'), ((8024, 8047), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (8040, 8047), False, 'import untrusted\n'), ((8049, 8072), 'untrusted.string', 'untrusted.string', (['"""dog"""'], {}), "('dog')\n", (8065, 8072), False, 'import untrusted\n'), ((8074, 8099), 'untrusted.string', 'untrusted.string', (['"""mouse"""'], {}), "('mouse')\n", (8090, 8099), False, 'import untrusted\n'), ((8607, 8627), 'untrusted.string', 'untrusted.string', (['""""""'], {}), "('')\n", (8623, 8627), False, 'import untrusted\n'), ((8642, 8665), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (8658, 8665), False, 'import untrusted\n'), ((8834, 8855), 'untrusted.string', 'untrusted.string', (['"""c"""'], {}), "('c')\n", (8850, 8855), False, 'import untrusted\n'), ((8868, 8889), 'untrusted.string', 'untrusted.string', (['"""a"""'], {}), "('a')\n", (8884, 8889), False, 'import untrusted\n'), ((8902, 8923), 'untrusted.string', 'untrusted.string', (['"""t"""'], {}), "('t')\n", (8918, 8923), False, 'import untrusted\n'), ((9083, 9106), 'untrusted.string', 'untrusted.string', (['"""cAt"""'], {}), "('cAt')\n", (9099, 9106), False, 'import untrusted\n'), ((9217, 9241), 'untrusted.string', 'untrusted.string', (['"""Catß"""'], {}), "('Catß')\n", (9233, 9241), False, 'import untrusted\n'), ((9366, 9389), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (9382, 9389), False, 'import untrusted\n'), ((9487, 9510), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (9503, 9510), False, 'import untrusted\n'), ((9568, 9591), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (9584, 9591), False, 'import untrusted\n'), ((9838, 9872), 'untrusted.string', 'untrusted.string', (['"""dogcatmousecat"""'], {}), "('dogcatmousecat')\n", (9854, 9872), False, 'import untrusted\n'), ((9904, 9938), 'untrusted.string', 'untrusted.string', (['"""dogcatmousecat"""'], {}), "('dogcatmousecat')\n", (9920, 9938), False, 'import untrusted\n'), ((9970, 10004), 'untrusted.string', 'untrusted.string', (['"""dogcatmousecat"""'], {}), "('dogcatmousecat')\n", (9986, 10004), False, 'import untrusted\n'), ((10034, 10068), 'untrusted.string', 'untrusted.string', (['"""dogcatmousecat"""'], {}), "('dogcatmousecat')\n", (10050, 10068), False, 'import untrusted\n'), ((10118, 10152), 'untrusted.string', 'untrusted.string', (['"""dogcatmousecat"""'], {}), "('dogcatmousecat')\n", (10134, 10152), False, 'import untrusted\n'), ((10202, 10236), 'untrusted.string', 'untrusted.string', (['"""dogcatmousecat"""'], {}), "('dogcatmousecat')\n", (10218, 10236), False, 'import untrusted\n'), ((10395, 10418), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (10411, 10418), False, 'import untrusted\n'), ((10764, 10795), 'untrusted.string', 'untrusted.string', (['"""catdogmouse"""'], {}), "('catdogmouse')\n", (10780, 10795), False, 'import untrusted\n'), ((10823, 10854), 'untrusted.string', 'untrusted.string', (['"""catdogmouse"""'], {}), "('catdogmouse')\n", (10839, 10854), False, 'import untrusted\n'), ((11129, 11160), 'untrusted.string', 'untrusted.string', (['"""catdogmouse"""'], {}), "('catdogmouse')\n", (11145, 11160), False, 'import untrusted\n'), ((11191, 11222), 'untrusted.string', 'untrusted.string', (['"""catdogmouse"""'], {}), "('catdogmouse')\n", (11207, 11222), False, 'import untrusted\n'), ((11372, 11407), 'untrusted.string', 'untrusted.string', (['"""\tHello\tworld!"""'], {}), "('\\tHello\\tworld!')\n", (11388, 11407), False, 'import untrusted\n'), ((11545, 11576), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (11561, 11576), False, 'import untrusted\n'), ((11601, 11632), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (11617, 11632), False, 'import untrusted\n'), ((11718, 11749), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (11734, 11749), False, 'import untrusted\n'), ((11778, 11809), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (11794, 11809), False, 'import untrusted\n'), ((12023, 12065), 'untrusted.string', 'untrusted.string', (['"""Hello {0}, UserID: {1}"""'], {}), "('Hello {0}, UserID: {1}')\n", (12039, 12065), False, 'import untrusted\n'), ((12157, 12199), 'untrusted.string', 'untrusted.string', (['"""Hello {0}, UserID: {1}"""'], {}), "('Hello {0}, UserID: {1}')\n", (12173, 12199), False, 'import untrusted\n'), ((12391, 12416), 'untrusted.string', 'untrusted.string', (['"""Sarah"""'], {}), "('Sarah')\n", (12407, 12416), False, 'import untrusted\n'), ((12645, 12692), 'untrusted.string', 'untrusted.string', (['"""Hello {name}, UserID: {uid}"""'], {}), "('Hello {name}, UserID: {uid}')\n", (12661, 12692), False, 'import untrusted\n'), ((12793, 12840), 'untrusted.string', 'untrusted.string', (['"""Hello {name}, UserID: {uid}"""'], {}), "('Hello {name}, UserID: {uid}')\n", (12809, 12840), False, 'import untrusted\n'), ((12853, 12878), 'untrusted.string', 'untrusted.string', (['"""Sarah"""'], {}), "('Sarah')\n", (12869, 12878), False, 'import untrusted\n'), ((13111, 13158), 'untrusted.string', 'untrusted.string', (['"""Hello {name}, UserID: {uid}"""'], {}), "('Hello {name}, UserID: {uid}')\n", (13127, 13158), False, 'import untrusted\n'), ((13271, 13318), 'untrusted.string', 'untrusted.string', (['"""Hello {name}, UserID: {uid}"""'], {}), "('Hello {name}, UserID: {uid}')\n", (13287, 13318), False, 'import untrusted\n'), ((13339, 13364), 'untrusted.string', 'untrusted.string', (['"""Sarah"""'], {}), "('Sarah')\n", (13355, 13364), False, 'import untrusted\n'), ((13573, 13620), 'untrusted.string', 'untrusted.string', (['"""Hello {name}, UserID: {uid}"""'], {}), "('Hello {name}, UserID: {uid}')\n", (13589, 13620), False, 'import untrusted\n'), ((14482, 14513), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (14498, 14513), False, 'import untrusted\n'), ((14539, 14570), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (14555, 14570), False, 'import untrusted\n'), ((14728, 14759), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (14744, 14759), False, 'import untrusted\n'), ((14853, 14884), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (14869, 14884), False, 'import untrusted\n'), ((15104, 15135), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (15120, 15135), False, 'import untrusted\n'), ((15230, 15261), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (15246, 15261), False, 'import untrusted\n'), ((15481, 15505), 'untrusted.string', 'untrusted.string', (['"""£123"""'], {}), "('£123')\n", (15497, 15505), False, 'import untrusted\n'), ((15635, 15658), 'untrusted.string', 'untrusted.string', (['"""123"""'], {}), "('123')\n", (15651, 15658), False, 'import untrusted\n'), ((15796, 15819), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (15812, 15819), False, 'import untrusted\n'), ((16054, 16077), 'untrusted.string', 'untrusted.string', (['"""123"""'], {}), "('123')\n", (16070, 16077), False, 'import untrusted\n'), ((16218, 16243), 'untrusted.string', 'untrusted.string', (['"""Hello"""'], {}), "('Hello')\n", (16234, 16243), False, 'import untrusted\n'), ((16383, 16408), 'untrusted.string', 'untrusted.string', (['"""hello"""'], {}), "('hello')\n", (16399, 16408), False, 'import untrusted\n'), ((16556, 16580), 'untrusted.string', 'untrusted.string', (['"""\x01"""'], {}), "('\\x01')\n", (16572, 16580), False, 'import untrusted\n'), ((16727, 16750), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (16743, 16750), False, 'import untrusted\n'), ((16904, 16935), 'untrusted.string', 'untrusted.string', (['"""hello world"""'], {}), "('hello world')\n", (16920, 16935), False, 'import untrusted\n'), ((17065, 17088), 'untrusted.string', 'untrusted.string', (['"""cat"""'], {}), "('cat')\n", (17081, 17088), False, 'import untrusted\n'), ((17217, 17240), 'untrusted.string', 'untrusted.string', (['"""CAT"""'], {}), "('CAT')\n", (17233, 17240), False, 'import untrusted\n'), ((17347, 17370), 'untrusted.string', 'untrusted.string', (['"""Cat"""'], {}), "('Cat')\n", (17363, 17370), False, 'import untrusted\n'), ((17587, 17611), 'untrusted.string', 'untrusted.string', (['""" cat"""'], {}), "(' cat')\n", (17603, 17611), False, 'import untrusted\n'), ((17662, 17686), 'untrusted.string', 'untrusted.string', (['""" cat"""'], {}), "(' cat')\n", (17678, 17686), False, 'import untrusted\n'), ((17755, 17779), 'untrusted.string', 'untrusted.string', (['""" cat"""'], {}), "(' cat')\n", (17771, 17779), False, 'import untrusted\n'), ((19892, 19923), 'untrusted.string', 'untrusted.string', (['"""cat,dog,hat"""'], {}), "('cat,dog,hat')\n", (19908, 19923), False, 'import untrusted\n'), ((19993, 20024), 'untrusted.string', 'untrusted.string', (['"""cat,dog,hat"""'], {}), "('cat,dog,hat')\n", (20009, 20024), False, 'import untrusted\n'), ((20195, 20224), 'untrusted.string', 'untrusted.string', (['"""dogcathat"""'], {}), "('dogcathat')\n", (20211, 20224), False, 'import untrusted\n'), ((20249, 20278), 'untrusted.string', 'untrusted.string', (['"""dogcathat"""'], {}), "('dogcathat')\n", (20265, 20278), False, 'import untrusted\n'), ((20362, 20387), 'untrusted.string', 'untrusted.string', (['"""mouse"""'], {}), "('mouse')\n", (20378, 20387), False, 'import untrusted\n'), ((20413, 20438), 'untrusted.string', 'untrusted.string', (['"""mouse"""'], {}), "('mouse')\n", (20429, 20438), False, 'import untrusted\n'), ((20706, 20737), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (20722, 20737), False, 'import untrusted\n'), ((20832, 20863), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (20848, 20863), False, 'import untrusted\n'), ((20976, 21007), 'untrusted.string', 'untrusted.string', (['"""dogcatmouse"""'], {}), "('dogcatmouse')\n", (20992, 21007), False, 'import untrusted\n'), ((23820, 23844), 'untrusted.string', 'untrusted.string', (['"""cat """'], {}), "('cat ')\n", (23836, 23844), False, 'import untrusted\n'), ((23895, 23919), 'untrusted.string', 'untrusted.string', (['"""cat """'], {}), "('cat ')\n", (23911, 23919), False, 'import untrusted\n'), ((23988, 24012), 'untrusted.string', 'untrusted.string', (['"""cat """'], {}), "('cat ')\n", (24004, 24012), False, 'import untrusted\n'), ((25198, 25223), 'untrusted.string', 'untrusted.string', (['""" cat """'], {}), "(' cat ')\n", (25214, 25223), False, 'import untrusted\n'), ((25273, 25298), 'untrusted.string', 'untrusted.string', (['""" cat """'], {}), "(' cat ')\n", (25289, 25298), False, 'import untrusted\n'), ((25366, 25391), 'untrusted.string', 'untrusted.string', (['""" cat """'], {}), "(' cat ')\n", (25382, 25391), False, 'import untrusted\n'), ((25510, 25533), 'untrusted.string', 'untrusted.string', (['"""Cat"""'], {}), "('Cat')\n", (25526, 25533), False, 'import untrusted\n'), ((25713, 25744), 'untrusted.string', 'untrusted.string', (['"""hello world"""'], {}), "('hello world')\n", (25729, 25744), False, 'import untrusted\n'), ((25864, 25895), 'untrusted.string', 'untrusted.string', (['"""hello world"""'], {}), "('hello world')\n", (25880, 25895), False, 'import untrusted\n'), ((26006, 26028), 'untrusted.string', 'untrusted.string', (['"""42"""'], {}), "('42')\n", (26022, 26028), False, 'import untrusted\n'), ((26115, 26138), 'untrusted.string', 'untrusted.string', (['"""-42"""'], {}), "('-42')\n", (26131, 26138), False, 'import untrusted\n'), ((26808, 26833), 'untrusted.string', 'untrusted.string', (['"""Grace"""'], {}), "('Grace')\n", (26824, 26833), False, 'import untrusted\n'), ((27240, 27265), 'untrusted.string', 'untrusted.string', (['"""Grace"""'], {}), "('Grace')\n", (27256, 27265), False, 'import untrusted\n'), ((27440, 27487), 'untrusted.string', 'untrusted.string', (['"""Hello %(name)s aged %(age)d"""'], {}), "('Hello %(name)s aged %(age)d')\n", (27456, 27487), False, 'import untrusted\n'), ((27861, 27885), 'untrusted.string', 'untrusted.string', (['before'], {}), '(before)\n', (27877, 27885), False, 'import untrusted\n'), ((28173, 28197), 'untrusted.string', 'untrusted.string', (['before'], {}), '(before)\n', (28189, 28197), False, 'import untrusted\n'), ((13939, 13986), 'untrusted.string', 'untrusted.string', (['"""Hello {name}, UserID: {uid}"""'], {}), "('Hello {name}, UserID: {uid}')\n", (13955, 13986), False, 'import untrusted\n'), ((27491, 27515), 'untrusted.string', 'untrusted.string', (['"""name"""'], {}), "('name')\n", (27507, 27515), False, 'import untrusted\n'), ((27517, 27542), 'untrusted.string', 'untrusted.string', (['"""Grace"""'], {}), "('Grace')\n", (27533, 27542), False, 'import untrusted\n')] |
import os
import shutil
import tempfile
import mne
from meggie.mainwindow.preferences import PreferencesHandler
from meggie.experiment import initialize_new_experiment
from meggie.experiment import open_existing_experiment
def test_experiment_and_subject():
with tempfile.TemporaryDirectory() as dirpath:
sample_folder = mne.datasets.sample.data_path()
sample_fname = os.path.join(sample_folder, 'MEG', 'sample', 'sample_audvis_raw.fif')
# Create preferences object to store working directory
# Also set prefs_path inside tempdir
prefs = PreferencesHandler()
prefs.workspace = dirpath
prefs.prefs_path = os.path.join(dirpath, '.meggieprefs')
# create experiment (creates experiment directory inside working directory,
# also saves prefs (previous_experiment is set) and saves .exp file)
name = 'Test experiment äö€ê*ë'
author = 'Author'
experiment = initialize_new_experiment(name, author, prefs)
# add two subjects based on sample_audvis_raw
# (makes copy of the file and adds it into subject directory inside experiment directory,
# also creates and adds subject objects to the experiment object)
experiment.create_subject('subject_1', sample_fname)
experiment.create_subject('subject_2', sample_fname)
# save experiment
experiment.save_experiment_settings()
loaded_experiment = open_existing_experiment(prefs,
path=experiment.path)
assert(experiment.name == loaded_experiment.name)
assert(set(experiment.subjects.keys()) == set(loaded_experiment.subjects.keys()))
assert(experiment.subjects['subject_1'].name == loaded_experiment.subjects['subject_1'].name)
| [
"meggie.experiment.initialize_new_experiment",
"tempfile.TemporaryDirectory",
"meggie.mainwindow.preferences.PreferencesHandler",
"meggie.experiment.open_existing_experiment",
"os.path.join",
"mne.datasets.sample.data_path"
] | [((270, 299), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (297, 299), False, 'import tempfile\n'), ((336, 367), 'mne.datasets.sample.data_path', 'mne.datasets.sample.data_path', ([], {}), '()\n', (365, 367), False, 'import mne\n'), ((391, 460), 'os.path.join', 'os.path.join', (['sample_folder', '"""MEG"""', '"""sample"""', '"""sample_audvis_raw.fif"""'], {}), "(sample_folder, 'MEG', 'sample', 'sample_audvis_raw.fif')\n", (403, 460), False, 'import os\n'), ((586, 606), 'meggie.mainwindow.preferences.PreferencesHandler', 'PreferencesHandler', ([], {}), '()\n', (604, 606), False, 'from meggie.mainwindow.preferences import PreferencesHandler\n'), ((668, 705), 'os.path.join', 'os.path.join', (['dirpath', '""".meggieprefs"""'], {}), "(dirpath, '.meggieprefs')\n", (680, 705), False, 'import os\n'), ((955, 1001), 'meggie.experiment.initialize_new_experiment', 'initialize_new_experiment', (['name', 'author', 'prefs'], {}), '(name, author, prefs)\n', (980, 1001), False, 'from meggie.experiment import initialize_new_experiment\n'), ((1453, 1506), 'meggie.experiment.open_existing_experiment', 'open_existing_experiment', (['prefs'], {'path': 'experiment.path'}), '(prefs, path=experiment.path)\n', (1477, 1506), False, 'from meggie.experiment import open_existing_experiment\n')] |
# Generated by Django 3.1.13 on 2021-07-16 21:44
from django.db import migrations, models
import nautobot.extras.models.models
import uuid
class Migration(migrations.Migration):
dependencies = [
("extras", "0010_change_cf_validation_max_min_field_to_bigint"),
]
operations = [
migrations.CreateModel(
name="FileAttachment",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True
),
),
("bytes", models.BinaryField()),
("filename", models.CharField(max_length=255)),
("mimetype", models.CharField(max_length=50)),
],
options={"ordering": ["filename"]},
),
migrations.CreateModel(
name="FileProxy",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True
),
),
("name", models.CharField(max_length=255)),
(
"file",
models.FileField(
storage=nautobot.extras.models.models.database_storage,
upload_to="extras.FileAttachment/bytes/filename/mimetype",
),
),
("uploaded_at", models.DateTimeField(auto_now_add=True)),
],
options={
"get_latest_by": "uploaded_at",
"ordering": ["name"],
"verbose_name_plural": "file proxies",
},
),
migrations.AlterModelOptions(
name="jobresult",
options={"get_latest_by": "created", "ordering": ["-created"]},
),
]
| [
"django.db.models.UUIDField",
"django.db.models.FileField",
"django.db.migrations.AlterModelOptions",
"django.db.models.BinaryField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((1820, 1934), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""jobresult"""', 'options': "{'get_latest_by': 'created', 'ordering': ['-created']}"}), "(name='jobresult', options={'get_latest_by':\n 'created', 'ordering': ['-created']})\n", (1848, 1934), False, 'from django.db import migrations, models\n'), ((454, 558), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)', 'unique': '(True)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False, unique=True)\n', (470, 558), False, 'from django.db import migrations, models\n'), ((647, 667), 'django.db.models.BinaryField', 'models.BinaryField', ([], {}), '()\n', (665, 667), False, 'from django.db import migrations, models\n'), ((699, 731), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (715, 731), False, 'from django.db import migrations, models\n'), ((763, 794), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (779, 794), False, 'from django.db import migrations, models\n'), ((1018, 1122), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)', 'unique': '(True)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False, unique=True)\n', (1034, 1122), False, 'from django.db import migrations, models\n'), ((1210, 1242), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (1226, 1242), False, 'from django.db import migrations, models\n'), ((1311, 1446), 'django.db.models.FileField', 'models.FileField', ([], {'storage': 'nautobot.extras.models.models.database_storage', 'upload_to': '"""extras.FileAttachment/bytes/filename/mimetype"""'}), "(storage=nautobot.extras.models.models.database_storage,\n upload_to='extras.FileAttachment/bytes/filename/mimetype')\n", (1327, 1446), False, 'from django.db import migrations, models\n'), ((1566, 1605), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1586, 1605), False, 'from django.db import migrations, models\n')] |
import os
from flask import Flask
BUILD_DIR = os.path.join(os.path.dirname(__file__), 'build')
app = Flask(__name__, static_url_path='', static_folder=BUILD_DIR)
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=os.environ.get('PORT', 5000), debug=True) | [
"os.path.dirname",
"os.environ.get",
"flask.Flask"
] | [((103, 163), 'flask.Flask', 'Flask', (['__name__'], {'static_url_path': '""""""', 'static_folder': 'BUILD_DIR'}), "(__name__, static_url_path='', static_folder=BUILD_DIR)\n", (108, 163), False, 'from flask import Flask\n'), ((60, 85), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (75, 85), False, 'import os\n'), ((303, 331), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '(5000)'], {}), "('PORT', 5000)\n", (317, 331), False, 'import os\n')] |
# coding: utf-8
# In[1]:
import glob
import os
import re
import pickle
import csv
import pandas as pd
import numpy as np
from unidecode import unidecode
import string
import language_check
import nltk
from nltk.tokenize import word_tokenize
from nltk.stem import WordNetLemmatizer
from nltk.corpus import wordnet, stopwords
import cleaning_functions
# In[2]:
meta_dict = dict()
meta_dict["language_check_tool"] = language_check.LanguageTool('en-US')
meta_dict["lemmatizer"] = WordNetLemmatizer()
meta_dict["stop_words_en"] = set(stopwords.words('english'))
column_names = ['type','len', 'word', 'POS', 'stemmed', 'priorpolarity']
sentiments = pd.read_csv("sentiment/sentiment.txt", sep = " ", header=None, names = column_names)
sentiments['type'] = [x[5:] for x in sentiments['type']]
sentiments['len'] = [x[4:] for x in sentiments['len']]
sentiments['word'] = [x[6:] for x in sentiments['word']]
sentiments['POS'] = [x[5:] for x in sentiments['POS']]
sentiments['stemmed'] = [x[9:] for x in sentiments['stemmed']]
sentiments['priorpolarity'] = [x[14:] for x in sentiments['priorpolarity']]
meta_dict["sentiments"] = sentiments
pickle.dump(meta_dict, open( "meta_dict.p", "wb" ) )
# In[4]:
meme_annotations = dict()
for filename in glob.glob('../collection/annotations 2/*.txt'):
with open(filename, 'r', encoding="utf8") as annotations_file:
annotations_list = annotations_file.read().lower().splitlines()
data = {'id' : [meme[:7] for meme in annotations_list],
'text' : [meme[9:-1] for meme in annotations_list]}
meme_annotations[os.path.basename(filename)[:-4]] = pd.DataFrame(data)
cleaned_data = dict()
# In[5]:
for meme_name, meme_df in meme_annotations.items():
print(meme_name + ":")
meme_cleaned_data = list()
for index, row in meme_df.iterrows():
if(index % 50 == 0): print(index)
lemmas = cleaning_functions.clean_sentence(meta_dict, row['text'][1:])
if(len(lemmas) == 0):
continue
meme_cleaned_data.append(lemmas)
cleaned_data[meme_name] = meme_cleaned_data
# In[7]:
with open('cleaned_memes.tsv', 'w', encoding="utf-8") as output:
output.write("meme")
output.write('\t')
output.write("meme_id")
output.write('\t')
output.write("token")
output.write('\t')
output.write("pos")
output.write('\t')
output.write("sentiment")
output.write('\n')
for meme_name, cleaned_memes in cleaned_data.items():
for counter, meme in enumerate(cleaned_memes):
for token in meme:
output.write(meme_name)
output.write('\t')
output.write(str(counter))
output.write('\t')
output.write(token[0])
output.write('\t')
output.write(token[1])
output.write('\t')
output.write(token[2])
output.write('\n')
| [
"language_check.LanguageTool",
"nltk.corpus.stopwords.words",
"pandas.read_csv",
"nltk.stem.WordNetLemmatizer",
"cleaning_functions.clean_sentence",
"os.path.basename",
"pandas.DataFrame",
"glob.glob"
] | [((426, 462), 'language_check.LanguageTool', 'language_check.LanguageTool', (['"""en-US"""'], {}), "('en-US')\n", (453, 462), False, 'import language_check\n'), ((489, 508), 'nltk.stem.WordNetLemmatizer', 'WordNetLemmatizer', ([], {}), '()\n', (506, 508), False, 'from nltk.stem import WordNetLemmatizer\n'), ((657, 742), 'pandas.read_csv', 'pd.read_csv', (['"""sentiment/sentiment.txt"""'], {'sep': '""" """', 'header': 'None', 'names': 'column_names'}), "('sentiment/sentiment.txt', sep=' ', header=None, names=column_names\n )\n", (668, 742), True, 'import pandas as pd\n'), ((1251, 1297), 'glob.glob', 'glob.glob', (['"""../collection/annotations 2/*.txt"""'], {}), "('../collection/annotations 2/*.txt')\n", (1260, 1297), False, 'import glob\n'), ((542, 568), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (557, 568), False, 'from nltk.corpus import wordnet, stopwords\n'), ((1639, 1657), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (1651, 1657), True, 'import pandas as pd\n'), ((1903, 1964), 'cleaning_functions.clean_sentence', 'cleaning_functions.clean_sentence', (['meta_dict', "row['text'][1:]"], {}), "(meta_dict, row['text'][1:])\n", (1936, 1964), False, 'import cleaning_functions\n'), ((1604, 1630), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (1620, 1630), False, 'import os\n')] |
import numpy as np
def zShift(seq, pos):
"""Return components of Z curve shift.
zCurve[0] = (A+G)-(C+T) # purine/pyrimidine
zCurve[1] = (A+C)-(G+T) # amino/keto
zCurve[2] = (A+T)-(G+C) # weak/strong
"""
if seq[pos] == "A":
return np.array([1, 1, 1])
if seq[pos] == "G":
return np.array([1, -1, -1])
if seq[pos] == "C":
return np.array([-1, 1, -1])
if seq[pos] == "T":
return np.array([-1, -1, 1])
def zCurve(seq):
"""Return 3-dimensional Z curve corresponding to sequence.
zcurve[n] = zcurve[n-1] + zShift[n]
"""
zcurve = np.zeros((len(seq), 3), dtype=int)
zcurve[0] = zShift(seq, 0)
for pos in range(1, len(seq)):
zcurve[pos] = np.add(zcurve[pos - 1], zShift(seq, pos))
return zcurve
| [
"numpy.array"
] | [((265, 284), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (273, 284), True, 'import numpy as np\n'), ((324, 345), 'numpy.array', 'np.array', (['[1, -1, -1]'], {}), '([1, -1, -1])\n', (332, 345), True, 'import numpy as np\n'), ((385, 406), 'numpy.array', 'np.array', (['[-1, 1, -1]'], {}), '([-1, 1, -1])\n', (393, 406), True, 'import numpy as np\n'), ((446, 467), 'numpy.array', 'np.array', (['[-1, -1, 1]'], {}), '([-1, -1, 1])\n', (454, 467), True, 'import numpy as np\n')] |
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append('..')
from auto_scan_test import FusePassAutoScanTest, IgnoreReasons
from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place
import numpy as np
from functools import partial
from typing import Optional, List, Callable, Dict, Any, Set
import unittest
import hypothesis
from hypothesis import given, settings, seed, example, assume, reproduce_failure
from test_elementwise_util import trim_trailing_singular_dims, check_input_shape_available
import hypothesis.strategies as st
class TestElementwiseScaleFuse(FusePassAutoScanTest):
def __init__(self, *args, **kwargs):
FusePassAutoScanTest.__init__(self, *args, **kwargs)
opencl_places = [
Place(TargetType.OpenCL, PrecisionType.FP16,
DataLayoutType.ImageDefault), Place(
TargetType.OpenCL, PrecisionType.FP16,
DataLayoutType.ImageFolder),
Place(TargetType.OpenCL, PrecisionType.FP32, DataLayoutType.NCHW),
Place(TargetType.OpenCL, PrecisionType.Any,
DataLayoutType.ImageDefault), Place(
TargetType.OpenCL, PrecisionType.Any,
DataLayoutType.ImageFolder),
Place(TargetType.OpenCL, PrecisionType.Any, DataLayoutType.NCHW),
Place(TargetType.Host, PrecisionType.FP32)
]
self.enable_testing_on_place(places=opencl_places)
def is_program_valid(self,
program_config: ProgramConfig,
predictor_config: CxxConfig) -> bool:
if len(program_config.inputs["input_data_x"].shape) > 4 or len(
program_config.inputs["input_data_y"].shape
) > 4 or program_config.ops[1].attrs["bias_after_scale"] == False:
return False
return True
def sample_program_configs(self, draw):
in_shape_x = draw(
st.lists(
st.integers(
min_value=1, max_value=20), min_size=2, max_size=5))
in_shape_y = draw(
st.lists(
st.integers(
min_value=1, max_value=20), min_size=2, max_size=5))
axis = draw(
st.integers(
min_value=-1, max_value=max(len(in_shape_x), len(in_shape_y))))
assume(
check_input_shape_available(
in_shape_x=in_shape_x, in_shape_y=in_shape_y, axis=axis) ==
True)
#scale param
scale = draw(st.floats(min_value=0.5, max_value=5))
bias = draw(st.floats(min_value=0, max_value=1))
bias_after_scale = draw(st.sampled_from([False, True]))
elementwise_op = OpConfig(
type='elementwise_mul',
inputs={"X": ["input_data_x"],
"Y": ["input_data_y"]},
outputs={"Out": ["elementwise_output_data"]},
attrs={"data_format": 'nchw',
"axis": axis})
scale_op = OpConfig(
type='scale',
inputs={"X": ["elementwise_output_data"]},
outputs={"Out": ["output_data"]},
attrs={
"scale": scale,
"bias": bias,
"bias_after_scale": bias_after_scale
})
ops = [elementwise_op, scale_op]
program_config = ProgramConfig(
ops=ops,
weights={},
inputs={
"input_data_x": TensorConfig(shape=in_shape_x),
"input_data_y": TensorConfig(shape=in_shape_y)
},
outputs=["output_data"])
return program_config
def sample_predictor_configs(self):
config = CxxConfig()
return self.get_predictor_configs(), ['elementwise_mul'], (1e-5, 1e-5)
def add_ignore_pass_case(self):
pass
def test(self, *args, **kwargs):
self.run_and_statis(
quant=False,
max_examples=1000,
passes=["lite_elementwise_scale_fuse_pass"])
if __name__ == "__main__":
unittest.main(argv=[''])
| [
"hypothesis.strategies.sampled_from",
"hypothesis.strategies.integers",
"program_config.TensorConfig",
"auto_scan_test.FusePassAutoScanTest.__init__",
"hypothesis.strategies.floats",
"program_config.Place",
"unittest.main",
"program_config.OpConfig",
"sys.path.append",
"program_config.CxxConfig",
... | [((621, 642), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (636, 642), False, 'import sys\n'), ((4696, 4720), 'unittest.main', 'unittest.main', ([], {'argv': "['']"}), "(argv=[''])\n", (4709, 4720), False, 'import unittest\n'), ((1289, 1341), 'auto_scan_test.FusePassAutoScanTest.__init__', 'FusePassAutoScanTest.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (1318, 1341), False, 'from auto_scan_test import FusePassAutoScanTest, IgnoreReasons\n'), ((3353, 3541), 'program_config.OpConfig', 'OpConfig', ([], {'type': '"""elementwise_mul"""', 'inputs': "{'X': ['input_data_x'], 'Y': ['input_data_y']}", 'outputs': "{'Out': ['elementwise_output_data']}", 'attrs': "{'data_format': 'nchw', 'axis': axis}"}), "(type='elementwise_mul', inputs={'X': ['input_data_x'], 'Y': [\n 'input_data_y']}, outputs={'Out': ['elementwise_output_data']}, attrs={\n 'data_format': 'nchw', 'axis': axis})\n", (3361, 3541), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((3640, 3824), 'program_config.OpConfig', 'OpConfig', ([], {'type': '"""scale"""', 'inputs': "{'X': ['elementwise_output_data']}", 'outputs': "{'Out': ['output_data']}", 'attrs': "{'scale': scale, 'bias': bias, 'bias_after_scale': bias_after_scale}"}), "(type='scale', inputs={'X': ['elementwise_output_data']}, outputs={\n 'Out': ['output_data']}, attrs={'scale': scale, 'bias': bias,\n 'bias_after_scale': bias_after_scale})\n", (3648, 3824), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((4342, 4353), 'program_config.CxxConfig', 'CxxConfig', ([], {}), '()\n', (4351, 4353), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((1380, 1453), 'program_config.Place', 'Place', (['TargetType.OpenCL', 'PrecisionType.FP16', 'DataLayoutType.ImageDefault'], {}), '(TargetType.OpenCL, PrecisionType.FP16, DataLayoutType.ImageDefault)\n', (1385, 1453), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((1473, 1545), 'program_config.Place', 'Place', (['TargetType.OpenCL', 'PrecisionType.FP16', 'DataLayoutType.ImageFolder'], {}), '(TargetType.OpenCL, PrecisionType.FP16, DataLayoutType.ImageFolder)\n', (1478, 1545), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((1604, 1669), 'program_config.Place', 'Place', (['TargetType.OpenCL', 'PrecisionType.FP32', 'DataLayoutType.NCHW'], {}), '(TargetType.OpenCL, PrecisionType.FP32, DataLayoutType.NCHW)\n', (1609, 1669), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((1683, 1755), 'program_config.Place', 'Place', (['TargetType.OpenCL', 'PrecisionType.Any', 'DataLayoutType.ImageDefault'], {}), '(TargetType.OpenCL, PrecisionType.Any, DataLayoutType.ImageDefault)\n', (1688, 1755), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((1775, 1846), 'program_config.Place', 'Place', (['TargetType.OpenCL', 'PrecisionType.Any', 'DataLayoutType.ImageFolder'], {}), '(TargetType.OpenCL, PrecisionType.Any, DataLayoutType.ImageFolder)\n', (1780, 1846), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((1905, 1969), 'program_config.Place', 'Place', (['TargetType.OpenCL', 'PrecisionType.Any', 'DataLayoutType.NCHW'], {}), '(TargetType.OpenCL, PrecisionType.Any, DataLayoutType.NCHW)\n', (1910, 1969), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((1983, 2025), 'program_config.Place', 'Place', (['TargetType.Host', 'PrecisionType.FP32'], {}), '(TargetType.Host, PrecisionType.FP32)\n', (1988, 2025), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((3167, 3204), 'hypothesis.strategies.floats', 'st.floats', ([], {'min_value': '(0.5)', 'max_value': '(5)'}), '(min_value=0.5, max_value=5)\n', (3176, 3204), True, 'import hypothesis.strategies as st\n'), ((3226, 3261), 'hypothesis.strategies.floats', 'st.floats', ([], {'min_value': '(0)', 'max_value': '(1)'}), '(min_value=0, max_value=1)\n', (3235, 3261), True, 'import hypothesis.strategies as st\n'), ((3295, 3325), 'hypothesis.strategies.sampled_from', 'st.sampled_from', (['[False, True]'], {}), '([False, True])\n', (3310, 3325), True, 'import hypothesis.strategies as st\n'), ((2608, 2646), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(1)', 'max_value': '(20)'}), '(min_value=1, max_value=20)\n', (2619, 2646), True, 'import hypothesis.strategies as st\n'), ((2759, 2797), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(1)', 'max_value': '(20)'}), '(min_value=1, max_value=20)\n', (2770, 2797), True, 'import hypothesis.strategies as st\n'), ((3001, 3089), 'test_elementwise_util.check_input_shape_available', 'check_input_shape_available', ([], {'in_shape_x': 'in_shape_x', 'in_shape_y': 'in_shape_y', 'axis': 'axis'}), '(in_shape_x=in_shape_x, in_shape_y=in_shape_y,\n axis=axis)\n', (3028, 3089), False, 'from test_elementwise_util import trim_trailing_singular_dims, check_input_shape_available\n'), ((4107, 4137), 'program_config.TensorConfig', 'TensorConfig', ([], {'shape': 'in_shape_x'}), '(shape=in_shape_x)\n', (4119, 4137), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n'), ((4171, 4201), 'program_config.TensorConfig', 'TensorConfig', ([], {'shape': 'in_shape_y'}), '(shape=in_shape_y)\n', (4183, 4201), False, 'from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place\n')] |
import requests
import re
import datetime
import functools
from flask import current_app as app
from app import cache
def cache_timeout(f):
@functools.wraps(f)
def decorated_function(*args, **kwargs):
now = datetime.datetime.now()
deadline = now.replace(hour=23, minute=59)
period = (deadline - now)
f.cache_timeout = period.seconds
return f(*args, **kwargs)
return decorated_function
def github_url(name, author):
return "https://api.github.com/repos/" + author + "/" + name
def github_readme(name, author):
return github_url(name, author) + "/readme"
def github_paginated_url(name, author, string, page=None):
url = github_url(name, author) + '/' + string + '?' + 'per_page=5'
if page is not None:
return url + '&page=' + page
return url
def get_readme(name, author, headers):
data = requests.get(github_readme(name, author), headers=headers)
return data.content if data.status_code == requests.codes.ok else ""
def get_count(name, author, string, headers):
url = github_paginated_url(name, author, string)
data = requests.get(url, headers=headers)
if data.links == {}:
return len(data.json())
last_url = data.links['last']['url']
match = re.match(r'.*page=(?P<no>\d+)', last_url)
if match is None:
return len(data.json())
page = match.groupdict()['no']
url = github_paginated_url(name, author, string, page)
data = requests.get(url, headers=headers)
return (int(page) - 1) * 5 + len(data.json())
@cache_timeout
@cache.memoize()
def github_data(name, author, url):
headers = {
"Authorization": "token " + app.config.get("API_KEY"),
"Accept": "application/vnd.github.VERSION.html"
}
if url != "":
match = re.match(r'.*\/(?P<name>.*)\/', url)
if match is not None:
author = match.groupdict()['name']
json_obj = dict()
json_obj['readme'] = get_readme(name, author, headers)
pull_count = get_count(name, author, 'pulls', headers)
issue_count = get_count(name, author, 'issues', headers)
json_obj['issues'] = {
'url': 'https://github.com/' + author + '/' + name + '/issues',
'count': (issue_count - pull_count)
}
json_obj['pull'] = {
'url': 'https://github.com/' + author + '/' + name + '/pulls',
'count': pull_count
}
return json_obj
| [
"re.match",
"app.cache.memoize",
"requests.get",
"functools.wraps",
"datetime.datetime.now",
"flask.current_app.config.get"
] | [((1577, 1592), 'app.cache.memoize', 'cache.memoize', ([], {}), '()\n', (1590, 1592), False, 'from app import cache\n'), ((147, 165), 'functools.wraps', 'functools.wraps', (['f'], {}), '(f)\n', (162, 165), False, 'import functools\n'), ((1126, 1160), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (1138, 1160), False, 'import requests\n'), ((1272, 1313), 're.match', 're.match', (['""".*page=(?P<no>\\\\d+)"""', 'last_url'], {}), "('.*page=(?P<no>\\\\d+)', last_url)\n", (1280, 1313), False, 'import re\n'), ((1474, 1508), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (1486, 1508), False, 'import requests\n'), ((225, 248), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (246, 248), False, 'import datetime\n'), ((1805, 1842), 're.match', 're.match', (['""".*\\\\/(?P<name>.*)\\\\/"""', 'url'], {}), "('.*\\\\/(?P<name>.*)\\\\/', url)\n", (1813, 1842), False, 'import re\n'), ((1681, 1706), 'flask.current_app.config.get', 'app.config.get', (['"""API_KEY"""'], {}), "('API_KEY')\n", (1695, 1706), True, 'from flask import current_app as app\n')] |
""" Services
This module is reponsible to handle all interactions to the database
and bussiness rules
"""
import typing
import environs
import dotenv
import requests
import sqlalchemy.orm
from . import models, schemas, cache
env = environs.Env()
dotenv.load_dotenv()
SECRET_KEY_RECAPTCHA = env("RECAPTCHA_SECRET_KEY", "")
VALIDATE_RECAPTCHA = env.bool("VALIDATE_RECAPTCHA", True)
class ServiceException(Exception):
""" Service Exception
This error is raised when data passed to the function is not valid
"""
class ValidationError(ServiceException):
pass
_cache = cache.get_cache()
class CityService:
""" City Service
Service class designed to provide reusable functionalities relate to city
"""
_db: sqlalchemy.orm.Session
def __init__(self, db: sqlalchemy.orm.Session):
"""
This constructor set database to others methods
"""
self._db = db
def __getstate__(self):
"""
Prevent database connection to be cached
"""
state = self.__dict__.copy()
state.pop("_db") # do not pickle _db session
return state
def get_city_by_id(self, name: str, state: str) -> models.City:
""" Get City By ID
This method is used to get City
Args:
name (str): City's name
state (str): City's state
Returns:
Instance of models.City
"""
db_city = (
self._db.query(models.City)
.filter_by(id=models.City.generate_id(name=name, state=state))
.first()
)
return db_city
def create_city(self, city: schemas.CityBase) -> models.City:
""" Create City
This method is used to create a City
Args:
city (schemas.CityInput): City's fields
Returns:
models.City
"""
if None in city.dict().values():
raise ValidationError("Invalid Post")
db_city = self.get_city_by_id(name=city.name, state=city.state)
if db_city:
raise ValidationError("City already exist")
city = models.City(**city.dict())
city.id = models.City.generate_id(name=city.name, state=city.state)
self._db.add(city)
self._db.commit()
self._db.flush()
self.cached_filter_city.invalidate_all()
return city
def filter_city(self, name: str) -> typing.List[models.City]:
""" Filter City
This method is used to filter a Cities
Args:
name (str): City's name
Returns:
list of cities
"""
query = self._db.query(models.City).filter(
models.City.name.contains(name)
)
return query.all()
@_cache.cache(ttl=60)
def cached_filter_city(self, name: str):
""" Cached Filter City
Cached version of filter_city it prevents from hitting
database for alredy cached queries
Args:
name (str): City's name
Returns:
list of cities
"""
return self.filter_city(name)
class GoogleService:
_RECAPTCHA_SITEVERIFY_URL = (
"https://www.google.com/recaptcha/api/siteverify"
)
def validate_recaptcha(self, response_token: str) -> bool:
if not VALIDATE_RECAPTCHA:
return True
data = {
"response": response_token,
"secret": SECRET_KEY_RECAPTCHA,
}
response = requests.post(self._RECAPTCHA_SITEVERIFY_URL, data=data)
if response.json().get("success") is not True:
return False
return True
| [
"environs.Env",
"requests.post",
"dotenv.load_dotenv"
] | [((237, 251), 'environs.Env', 'environs.Env', ([], {}), '()\n', (249, 251), False, 'import environs\n'), ((252, 272), 'dotenv.load_dotenv', 'dotenv.load_dotenv', ([], {}), '()\n', (270, 272), False, 'import dotenv\n'), ((3496, 3552), 'requests.post', 'requests.post', (['self._RECAPTCHA_SITEVERIFY_URL'], {'data': 'data'}), '(self._RECAPTCHA_SITEVERIFY_URL, data=data)\n', (3509, 3552), False, 'import requests\n')] |
import inspect
def get_func_parameter_index_by_name(func, parameter_name: str) -> str:
parameters = inspect.signature(func).parameters
if parameter_name not in parameters:
raise ValueError("parameter named: `{}`. dose not exists in the decorated function. `{}` ".format(parameter_name, func.__name__))
return list(parameters).index(parameter_name)
def get_func_parameters_as_dict(func, *args, **kwargs) -> dict:
parameters = inspect.signature(func).parameters
parameters_lst = list(parameters)
result = {}
for key, val in parameters.items():
param_index = parameters_lst.index(key)
if param_index < len(args):
result[key] = args[param_index]
elif key in kwargs:
result[key] = kwargs[key]
return result
| [
"inspect.signature"
] | [((106, 129), 'inspect.signature', 'inspect.signature', (['func'], {}), '(func)\n', (123, 129), False, 'import inspect\n'), ((454, 477), 'inspect.signature', 'inspect.signature', (['func'], {}), '(func)\n', (471, 477), False, 'import inspect\n')] |
#-*- coding:utf-8 -*-
import wx
class BlockWindow(wx.Panel):
def __init__(self, parent, ID = -1, label = "", pos = wx.DefaultPosition, size = (100, 25)):
super(BlockWindow, self).__init__(parent, ID, pos, size, wx.RAISED_BORDER, label)
self.lable = label
self.SetBackgroundColour('#FFF')
self.SetMinSize(size)
self.Bind(wx.EVT_PAINT, self.onPaint)
def onPaint(self, event):
sz = self.GetClientSize()
dc = wx.PaintDC(self)
w,h = dc.GetTextExtent(self.lable)
dc.SetFont(self.GetFont())
dc.DrawText(self.lable, (sz.width-w)/2, (sz.height-h)/2)
class GridSizerFrame(wx.Frame):
labels = "one two three four five six seven eight nine".split()
flags = {
"one": wx.ALIGN_BOTTOM,
"two": wx.ALIGN_CENTER,
"four": wx.ALIGN_RIGHT,
"six": wx.EXPAND,
"seven": wx.EXPAND,
"eight": wx.SHAPED
}
def __init__(self):
super(GridSizerFrame, self).__init__(None, -1, "Sizer Demo")
sizer = wx.GridSizer(rows = 3, cols = 3, hgap = 5, vgap = 5)
for label in self.labels:
bw = BlockWindow(self, label = label, size = (300 ,200))
flag = self.flags.get(label, 0)
sizer.Add(bw, 0, flag|wx.ALL, 10)
self.SetSizer(sizer)
self.Fit()
self.SetBackgroundColour('#0000FF')
class GridBagSizerFrame(wx.Frame):
labels = "one two three four five six seven eight nine".split()
def __init__(self):
super(GridBagSizerFrame, self).__init__(None, -1, "Sizer Demo")
sizer = wx.GridBagSizer(hgap = 5, vgap = 5)
for col in range(3):
for row in range(3):
bw = BlockWindow(self, label=self.labels[row*3+col])
sizer.Add(bw, pos=(row, col))
#跨行
bw = BlockWindow(self, label="span 3 rows")
sizer.Add(bw, pos=(0, 3), span=(3, 1), flag=wx.EXPAND)
#跨列
bw = BlockWindow(self, label="all")
sizer.Add(bw, pos=(3, 0), span=(1, 4), flag=wx.EXPAND)
sizer.AddGrowableCol(3)
sizer.AddGrowableRow(3)
self.SetSizer(sizer)
self.Fit()
class App(wx.App):
def OnPreInit(self):
self.frame = GridBagSizerFrame()
self.frame.Centre(True)
self.frame.Show(True)
return True
if __name__ == '__main__':
app = App(False)
app.MainLoop()
| [
"wx.GridSizer",
"wx.PaintDC",
"wx.GridBagSizer"
] | [((473, 489), 'wx.PaintDC', 'wx.PaintDC', (['self'], {}), '(self)\n', (483, 489), False, 'import wx\n'), ((1043, 1087), 'wx.GridSizer', 'wx.GridSizer', ([], {'rows': '(3)', 'cols': '(3)', 'hgap': '(5)', 'vgap': '(5)'}), '(rows=3, cols=3, hgap=5, vgap=5)\n', (1055, 1087), False, 'import wx\n'), ((1601, 1632), 'wx.GridBagSizer', 'wx.GridBagSizer', ([], {'hgap': '(5)', 'vgap': '(5)'}), '(hgap=5, vgap=5)\n', (1616, 1632), False, 'import wx\n')] |
# coding: utf-8 -*-
'''
GFS.py contains utility functions for GFS
'''
__all__ = ['get_akbk',
'get_pcoord',
'read_atcf']
import numpy as _np
import pandas as _pd
def get_akbk():
'''
Returns ak,bk for 64 level GFS model
vcoord is obtained from global_fcst.fd/gfsio_module.f
ak,bk are as computed from treadeo.gfsio.f for
hybrid = .true. and idvc == 2
'''
vcoord = _np.array([1.0000000,0.99467099,0.98863202,0.98180002,0.97408301, \
0.96538502,0.95560300,0.94463098,0.93235999,0.91867799,0.90347999, \
0.88666302,0.86813903,0.84783000,0.82568502,0.80167699,0.77581102, \
0.74813300,0.71872902,0.68773103,0.65531600,0.62170500,0.58715999, \
0.55197400,0.51646298,0.48095500,0.44577801,0.41124901,0.37765899, \
0.34526899,0.31430000,0.28492799,0.25728399,0.23145400,0.20748200, \
0.18537199,0.16509899,0.14660800,0.12982300,0.11465500,0.10100200, \
0.88756002E-01,0.77808000E-01,0.68048999E-01,0.59370000E-01, \
0.51670998E-01,0.44854999E-01,0.38830999E-01,0.33514999E-01, \
0.28829999E-01,0.24707999E-01,0.21083999E-01,0.17901000E-01, \
0.15107000E-01,0.12658000E-01,0.10511000E-01,0.86310003E-02, \
0.69849999E-02,0.55439998E-02,0.42840000E-02,0.31830000E-02, \
0.22199999E-02,0.13780000E-02,0.64200000E-03,0.0000000])
ak = vcoord / 1000.
bk = vcoord / 1.
return ak,bk
def get_pcoord():
'''
Returns the pressure levels in hPa of the native GFS model with 64 levels.
OUTPUT:
pres = pressure levels (hPa) assuming pref=1013.0
'''
ak,bk = get_akbk()
pref = 101.3
pres = ak[:-1] + bk[:-1]*pref
return pres * 10.
def read_atcf(filename):
'''
Read an ATCF file into a dataframe for ease of processing.
INPUT:
filename = ATCF filename
The file contents are specified at:
http://www.nrlmry.navy.mil/atcf_web/docs/database/new/abdeck.html
OUTPUT:
df = DataFrame containing the file contents
'''
def _to_number(s):
tmp = 0.1 * _np.float(s[:-1])
if s[-1] in ['S','W']:
v = -1.0 * tmp if s[-1] in ['S'] else 360.0 - tmp
else:
v = tmp
return v
# column names
names = ['BASIN','CY','YYYYMMDDHH','TECHNUM','TECH','TAU','LAT','LON','VMAX','MSLP','TY','RAD','WINDCODE','RAD1','RAD2','RAD3','RAD4','POUTER','ROUTER','RMW','GUSTS','EYE','SUBREGION','MAXSEAS','INITIALS','DIR','SPEED','STORMNAME','DEPTH','SEAS','SEASCODE','SEAS1','SEAS2','SEAS3','SEAS4','USERDEFINE1','USERDATA1','USERDEFINE2','USERDATA2','USERDEFINE3','USERDATA3','USERDEFINE4','USERDATA4','USERDEFINE5','USERDATA5']
# column datatypes
dtypes = {'BASIN':str,'CY':str,'YYYYMMDDHH':str,'TECHNUM':_np.float,'TECH':str,'TAU':_np.float,'LAT':str,'LON':str,'VMAX':_np.float,'MSLP':_np.float,'TY':str,'RAD':_np.float,'WINDCODE':str,'RAD1':_np.float,'RAD2':_np.float,'RAD3':_np.float,'RAD4':_np.float,'POUTER':_np.float,'ROUTER':_np.float,'RMW':_np.float,'GUSTS':_np.float,'EYE':_np.float,'SUBREGION':str,'MAXSEAS':_np.float,'INITIALS':str,'DIR':_np.float,'SPEED':_np.float,'STORMNAME':str,'DEPTH':str,'SEAS':_np.float,'SEASCODE':str,'SEAS1':_np.float,'SEAS2':_np.float,'SEAS3':_np.float,'SEAS4':_np.float,'USERDEFINE1':str,'USERDATA1':str,'USERDEFINE2':str,'USERDATA2':str,'USERDEFINE3':str,'USERDATA3':str,'USERDEFINE4':str,'USERDATA4':str,'USERDEFINE5':str,'USERDATA5':str}
df = _pd.read_csv(filename,skipinitialspace=True,header=None,names=names,dtype=dtypes)
# convert YYYYMMDDHH into datetime
df['YYYYMMDDHH'] = _pd.to_datetime(df['YYYYMMDDHH'], format='%Y%m%d%H')
# set index columns
index_cols = ['BASIN','CY','YYYYMMDDHH','TECHNUM','TECH','TAU','TY','SUBREGION']
df.set_index(index_cols, inplace=True)
# drop columns that have no information
df.dropna(axis=1,how='all',inplace=True)
# convert Lat/Lon to floats from hemisphere info
df['LAT'] = df['LAT'].apply(lambda f: _to_number(f))
df['LON'] = df['LON'].apply(lambda f: _to_number(f))
return df
| [
"numpy.array",
"pandas.to_datetime",
"numpy.float",
"pandas.read_csv"
] | [((417, 1206), 'numpy.array', '_np.array', (['[1.0, 0.99467099, 0.98863202, 0.98180002, 0.97408301, 0.96538502, 0.955603,\n 0.94463098, 0.93235999, 0.91867799, 0.90347999, 0.88666302, 0.86813903,\n 0.84783, 0.82568502, 0.80167699, 0.77581102, 0.748133, 0.71872902, \n 0.68773103, 0.655316, 0.621705, 0.58715999, 0.551974, 0.51646298, \n 0.480955, 0.44577801, 0.41124901, 0.37765899, 0.34526899, 0.3143, \n 0.28492799, 0.25728399, 0.231454, 0.207482, 0.18537199, 0.16509899, \n 0.146608, 0.129823, 0.114655, 0.101002, 0.088756002, 0.077808, \n 0.068048999, 0.05937, 0.051670998, 0.044854999, 0.038830999, \n 0.033514999, 0.028829999, 0.024707999, 0.021083999, 0.017901, 0.015107,\n 0.012658, 0.010511, 0.0086310003, 0.0069849999, 0.0055439998, 0.004284,\n 0.003183, 0.0022199999, 0.001378, 0.000642, 0.0]'], {}), '([1.0, 0.99467099, 0.98863202, 0.98180002, 0.97408301, 0.96538502,\n 0.955603, 0.94463098, 0.93235999, 0.91867799, 0.90347999, 0.88666302, \n 0.86813903, 0.84783, 0.82568502, 0.80167699, 0.77581102, 0.748133, \n 0.71872902, 0.68773103, 0.655316, 0.621705, 0.58715999, 0.551974, \n 0.51646298, 0.480955, 0.44577801, 0.41124901, 0.37765899, 0.34526899, \n 0.3143, 0.28492799, 0.25728399, 0.231454, 0.207482, 0.18537199, \n 0.16509899, 0.146608, 0.129823, 0.114655, 0.101002, 0.088756002, \n 0.077808, 0.068048999, 0.05937, 0.051670998, 0.044854999, 0.038830999, \n 0.033514999, 0.028829999, 0.024707999, 0.021083999, 0.017901, 0.015107,\n 0.012658, 0.010511, 0.0086310003, 0.0069849999, 0.0055439998, 0.004284,\n 0.003183, 0.0022199999, 0.001378, 0.000642, 0.0])\n', (426, 1206), True, 'import numpy as _np\n'), ((3502, 3591), 'pandas.read_csv', '_pd.read_csv', (['filename'], {'skipinitialspace': '(True)', 'header': 'None', 'names': 'names', 'dtype': 'dtypes'}), '(filename, skipinitialspace=True, header=None, names=names,\n dtype=dtypes)\n', (3514, 3591), True, 'import pandas as _pd\n'), ((3647, 3699), 'pandas.to_datetime', '_pd.to_datetime', (["df['YYYYMMDDHH']"], {'format': '"""%Y%m%d%H"""'}), "(df['YYYYMMDDHH'], format='%Y%m%d%H')\n", (3662, 3699), True, 'import pandas as _pd\n'), ((2125, 2142), 'numpy.float', '_np.float', (['s[:-1]'], {}), '(s[:-1])\n', (2134, 2142), True, 'import numpy as _np\n')] |
# Generated by Django 3.1 on 2020-10-30 11:41
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("attribute", "0001_initial"),
("product", "0137_drop_attribute_models"),
]
operations = [
migrations.AlterModelTable(name="assignedpageattribute", table=None,),
migrations.AlterModelTable(name="assignedproductattribute", table=None,),
migrations.AlterModelTable(name="assignedvariantattribute", table=None,),
migrations.AlterModelTable(name="attribute", table=None,),
migrations.AlterModelTable(name="attributepage", table=None,),
migrations.AlterModelTable(name="attributeproduct", table=None,),
migrations.AlterModelTable(name="attributetranslation", table=None,),
migrations.AlterModelTable(name="attributevalue", table=None,),
migrations.AlterModelTable(name="attributevaluetranslation", table=None,),
migrations.AlterModelTable(name="attributevariant", table=None,),
]
| [
"django.db.migrations.AlterModelTable"
] | [((267, 335), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""assignedpageattribute"""', 'table': 'None'}), "(name='assignedpageattribute', table=None)\n", (293, 335), False, 'from django.db import migrations\n'), ((346, 417), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""assignedproductattribute"""', 'table': 'None'}), "(name='assignedproductattribute', table=None)\n", (372, 417), False, 'from django.db import migrations\n'), ((428, 499), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""assignedvariantattribute"""', 'table': 'None'}), "(name='assignedvariantattribute', table=None)\n", (454, 499), False, 'from django.db import migrations\n'), ((510, 566), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""attribute"""', 'table': 'None'}), "(name='attribute', table=None)\n", (536, 566), False, 'from django.db import migrations\n'), ((577, 637), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""attributepage"""', 'table': 'None'}), "(name='attributepage', table=None)\n", (603, 637), False, 'from django.db import migrations\n'), ((648, 711), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""attributeproduct"""', 'table': 'None'}), "(name='attributeproduct', table=None)\n", (674, 711), False, 'from django.db import migrations\n'), ((722, 789), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""attributetranslation"""', 'table': 'None'}), "(name='attributetranslation', table=None)\n", (748, 789), False, 'from django.db import migrations\n'), ((800, 861), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""attributevalue"""', 'table': 'None'}), "(name='attributevalue', table=None)\n", (826, 861), False, 'from django.db import migrations\n'), ((872, 944), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""attributevaluetranslation"""', 'table': 'None'}), "(name='attributevaluetranslation', table=None)\n", (898, 944), False, 'from django.db import migrations\n'), ((955, 1018), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""attributevariant"""', 'table': 'None'}), "(name='attributevariant', table=None)\n", (981, 1018), False, 'from django.db import migrations\n')] |
from itertools import combinations
from sklearn.metrics.pairwise import cosine_similarity
import numpy as np
embeddings = {}
with open("scripts/etm_w2v_embedding.txt", "r") as file:
for line in file.readlines():
splitted = line.split()
word = splitted[0]
embeddings[word] = np.array([float(n) for n in splitted[1:]])
# print(f'Embeddings: {list(embeddings.keys())[:5]}')
topics = []
with open("scripts/topics.txt", "r") as file:
for line in file.readlines():
topics.append(line.split())
print(f'Topics: {topics[:5]}')
topic_embeddings = [[embeddings[word] for word in topic] for topic in topics]
# print(f'Topic embeddings: {topic_embeddings[:5]}')
print(f'Topic embeddings length: {len(topic_embeddings)}')
combs = list(combinations(range(len(topic_embeddings)), 2))
# print(f'total combinations = {list(combs)}')
print(f'combs length = {len(combs)}')
similarities = np.array([])
for xi, yi in combs:
print(f'xi={xi}')
print(f'yi={yi}')
print(f'topic_embeddings[xi]={topic_embeddings[xi]}')
print(f'topic_embeddings[yi]={topic_embeddings[yi]}')
similarity = np.average(cosine_similarity(topic_embeddings[xi], topic_embeddings[yi]))
print(f'avg similarity = {similarity}')
print(f'avg cos = {np.average(similarity)}')
similarities = np.append(similarities, similarity)
print(f'similarities length = {len(similarities)}')
print(similarities)
max_idx = np.argmax(similarities)
print(f'max idx similarities = {max_idx}')
print(f'max similarity = {similarities[max_idx]}')
first_topic_idx, second_topic_idx = combs[max_idx]
print(f'best comb = first: {first_topic_idx}, second: {second_topic_idx}')
print(f'most similar topics: 1 - {", ".join(topics[first_topic_idx])}\n2 - {", ".join(topics[second_topic_idx])}\n')
# X = np.array([[1, 1, 1], [0.98, 0.1, 0.21], [0, 0, 0], [0.8, 0, 1]])
# Y = np.array([[1, 0, 1], [0, 1, 0], [1, 1, 1], [0.99, 1, 0.7]])
# print(X[0:1])
# similarity = cosine_similarity(X, Y)
# print(f'avg cos = {np.average(similarity)}')
# print(f'cos = {similarity}\n')
| [
"sklearn.metrics.pairwise.cosine_similarity",
"numpy.average",
"numpy.argmax",
"numpy.append",
"numpy.array"
] | [((914, 926), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (922, 926), True, 'import numpy as np\n'), ((1429, 1452), 'numpy.argmax', 'np.argmax', (['similarities'], {}), '(similarities)\n', (1438, 1452), True, 'import numpy as np\n'), ((1311, 1346), 'numpy.append', 'np.append', (['similarities', 'similarity'], {}), '(similarities, similarity)\n', (1320, 1346), True, 'import numpy as np\n'), ((1136, 1197), 'sklearn.metrics.pairwise.cosine_similarity', 'cosine_similarity', (['topic_embeddings[xi]', 'topic_embeddings[yi]'], {}), '(topic_embeddings[xi], topic_embeddings[yi])\n', (1153, 1197), False, 'from sklearn.metrics.pairwise import cosine_similarity\n'), ((1266, 1288), 'numpy.average', 'np.average', (['similarity'], {}), '(similarity)\n', (1276, 1288), True, 'import numpy as np\n')] |
import random
class RPG(object):
"""."""
def __init__(self):
"""."""
self.health = 10
self.stamina = 15
self.strength = 10
self.intelligence = 5
self.dex = 5
self.luck = 2
self.hp = 0
self.attack = None
self.bonus = None
self.mp = 5
class Fighter(RPG):
"""."""
self.health = random.randint(10, 25)
self.hp = self.health * 1.5
self.stamina = self.stamin + self.health
self.dex = 5 * random.randint(1, 4)
moves = ['Slam', 'Rage', 'Slash']
x = random.randit(0, 2)
self.attack = moves[x]
class Mage(RPG):
"""."""
self.hp = self.health * 1.5
self.stamina = self.stamin + self.health
self.mp = random.randint(10, 25)
self.intelligence = self.mp * .9
moves = ['Fireball', 'Ice Orb', 'Tornado', 'Static', 'Heal']
x = random.randit(0, 3)
self.attack = moves[x]
if self.mp > 24:
self.bonus = moves[4]
| [
"random.randint",
"random.randit"
] | [((395, 417), 'random.randint', 'random.randint', (['(10)', '(25)'], {}), '(10, 25)\n', (409, 417), False, 'import random\n'), ((601, 620), 'random.randit', 'random.randit', (['(0)', '(2)'], {}), '(0, 2)\n', (614, 620), False, 'import random\n'), ((789, 811), 'random.randint', 'random.randint', (['(10)', '(25)'], {}), '(10, 25)\n', (803, 811), False, 'import random\n'), ((934, 953), 'random.randit', 'random.randit', (['(0)', '(3)'], {}), '(0, 3)\n', (947, 953), False, 'import random\n'), ((526, 546), 'random.randint', 'random.randint', (['(1)', '(4)'], {}), '(1, 4)\n', (540, 546), False, 'import random\n')] |
import datetime as dt
from functools import singledispatch
__holidays = {}
__cache = set()
__min_year = None
__max_year = None
class _Holiday(object):
'''Container for public holiday meta information.
Most holidays recur on the same day every year so these
only require the month and day. Once-off holidays occur
at a fixed point in time so they require the exact date
to be specified. The exact date of other holidays (such
as Easter) is based on a formula and will differ each
year. These types of holidays can be specified either as
once-off holidays or by specfiying actual the formula.
'''
def __init__(self, name, type_cd, value):
self.__name = name
self.__type_cd = type_cd
self.__value = value
def __str__(self):
return f'{self.__name} with type code {self.__type_cd} with value {self.__value}'
def get_type_cd(self):
return self.__type_cd
def get_name(self):
return self.__name
def get_effective_date(self, year=dt.datetime.today().year):
'''Get the effective date of the holiday (adjusted if the actual date
falls on Sunday).
'''
return self.__get_date(year, True)
def get_actual_date(self, year=dt.datetime.today().year):
'''Get the actual date of the holiday, as specified in the source file.'''
return self.__get_date(year)
def __get_date(self, year, effective=False):
'''Get the date of the holiday, optionally adjusted to reflect the
effective date, if the actual date falls on a Sunday.'''
if self.__type_cd == 'ONCE':
date = dt.datetime.strptime(self.__value, '%Y%m%d')
elif self.__type_cd == 'RECUR':
date = dt.datetime.strptime(f'{year}{self.__value}', '%Y%m%d')
else:
date = dt.datetime.strptime(f'{year}0101', '%Y%m%d')
if effective:
if date.weekday() == 6:
date = date + dt.timedelta(1)
return date
def is_business_day(date):
'''Return true if the given date is a valid business day.'''
__check_and_update(date.year)
return date not in __cache and date.weekday() < 5
def get_business_days(start_date, end_date=dt.datetime.today()):
'''Gets the list of business days between to dates (inclusive).
The end date defaults to the current day.
'''
__check_and_update(start_date.year)
__check_and_update(end_date.year)
dates = list({start_date + dt.timedelta(s) for s in range((end_date - start_date).days + 1)
if (start_date + dt.timedelta(s)).weekday() < 5} - __cache)
dates.sort()
return dates
def get_holidays(year=dt.datetime.today().year):
'''Return the list of named holidays and their corresponding dates.
Defaults to the list of holidays for the current year.
'''
__check_and_update(year)
holidays = list({(h.get_actual_date(year), h.get_name())
for h in __holidays if h.get_actual_date(year).year == year})
holidays.sort()
return holidays
def get_holiday_effective_dates(year=dt.datetime.today().year):
'''Get the list of effective public holiday dates for the given year.
Defaults to the list of holidays for the current year.
The effective date can differ if the public holiday falls on a Sunday,
where the following Monday is then given as the holiday date.
'''
__check_and_update(year)
dates = list({h.get_effective_date(year)
for h in __holidays if h.get_effective_date(year).year == year})
dates.sort()
return dates
def get_previous_business_day(date_val=dt.date.today(), *args):
'''Return business day prior to the date specified.
If called without args then returns the date of the most recent
business day.
The date_val can be either a datetime/date object or a string.
If date_val is a string, then you can optionally pass the format
string as the second parameter (default is ISO 8601 %Y-%m-%d).
'''
return __get_previous_business_day_date(date_val, args)
@singledispatch
def __get_previous_business_day_date(date_val, *args):
'''Internal method to handle date/datetime objects.'''
prev_day = date_val
while True:
prev_day -= dt.timedelta(1)
if is_business_day(prev_day):
break
return prev_day
@__get_previous_business_day_date.register(str)
def _get_previous_business_day_str(date_val, *args):
'''Internal method to handle dates passed as a string.'''
if len(args) and len(args[0]):
date_fmt = args[0][0]
else:
date_fmt = '%Y-%m-%d'
return __get_previous_business_day_date(dt.datetime.strptime(date_val, date_fmt)).strftime(date_fmt)
def __check_year(year):
'''Check whether the given year in is within cached range.'''
return year >= __min_year and year <= __max_year
def __check_and_update(year):
'''Load holidays for year into cache if given year is not within cached range.'''
if not __check_year(year):
__load_holidays_for_year(year)
def __load_holidays_for_year(year):
'''Load holidays for year into cache.'''
global __min_year
__min_year = min(year, __min_year)
global __max_year
__max_year = max(year, __max_year)
for i in range(__max_year - __min_year + 1):
for h in [h for h in __holidays]:
__cache.add(h.get_effective_date(__min_year + i))
def __process_line(line):
parts = line.split(',')
return _Holiday(parts[0], parts[1], parts[2])
with open('public_holidays.csv', 'r') as f:
f.readline()
__holidays = [__process_line(l.strip()) for l in f.readlines() if l.strip()]
__min_year = dt.datetime.today().year
__max_year = __min_year
__load_holidays_for_year(dt.datetime.today().year)
| [
"datetime.datetime.today",
"datetime.date.today",
"datetime.datetime.strptime",
"datetime.timedelta"
] | [((2242, 2261), 'datetime.datetime.today', 'dt.datetime.today', ([], {}), '()\n', (2259, 2261), True, 'import datetime as dt\n'), ((3663, 3678), 'datetime.date.today', 'dt.date.today', ([], {}), '()\n', (3676, 3678), True, 'import datetime as dt\n'), ((5720, 5739), 'datetime.datetime.today', 'dt.datetime.today', ([], {}), '()\n', (5737, 5739), True, 'import datetime as dt\n'), ((2699, 2718), 'datetime.datetime.today', 'dt.datetime.today', ([], {}), '()\n', (2716, 2718), True, 'import datetime as dt\n'), ((3119, 3138), 'datetime.datetime.today', 'dt.datetime.today', ([], {}), '()\n', (3136, 3138), True, 'import datetime as dt\n'), ((4295, 4310), 'datetime.timedelta', 'dt.timedelta', (['(1)'], {}), '(1)\n', (4307, 4310), True, 'import datetime as dt\n'), ((5795, 5814), 'datetime.datetime.today', 'dt.datetime.today', ([], {}), '()\n', (5812, 5814), True, 'import datetime as dt\n'), ((1034, 1053), 'datetime.datetime.today', 'dt.datetime.today', ([], {}), '()\n', (1051, 1053), True, 'import datetime as dt\n'), ((1256, 1275), 'datetime.datetime.today', 'dt.datetime.today', ([], {}), '()\n', (1273, 1275), True, 'import datetime as dt\n'), ((1649, 1693), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['self.__value', '"""%Y%m%d"""'], {}), "(self.__value, '%Y%m%d')\n", (1669, 1693), True, 'import datetime as dt\n'), ((1753, 1808), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['f"""{year}{self.__value}"""', '"""%Y%m%d"""'], {}), "(f'{year}{self.__value}', '%Y%m%d')\n", (1773, 1808), True, 'import datetime as dt\n'), ((1842, 1887), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['f"""{year}0101"""', '"""%Y%m%d"""'], {}), "(f'{year}0101', '%Y%m%d')\n", (1862, 1887), True, 'import datetime as dt\n'), ((4704, 4744), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['date_val', 'date_fmt'], {}), '(date_val, date_fmt)\n', (4724, 4744), True, 'import datetime as dt\n'), ((1977, 1992), 'datetime.timedelta', 'dt.timedelta', (['(1)'], {}), '(1)\n', (1989, 1992), True, 'import datetime as dt\n'), ((2497, 2512), 'datetime.timedelta', 'dt.timedelta', (['s'], {}), '(s)\n', (2509, 2512), True, 'import datetime as dt\n'), ((2597, 2612), 'datetime.timedelta', 'dt.timedelta', (['s'], {}), '(s)\n', (2609, 2612), True, 'import datetime as dt\n')] |
import argparse
import os
import pathlib
import imageio
import matplotlib.pyplot as plt
import numpy as np
import omegaconf
import torch
import mbrl.env.termination_fns
import mbrl.models
import mbrl.planning
import mbrl.util.common
from mbrl.third_party.dmc2gym.wrappers import DMCWrapper
class PlanetVisualizer:
def __init__(
self,
start_step: int,
lookahead: int,
model_dir: str,
env_name: str,
device: torch.device,
seed: int,
):
self.seed = seed
self.start_step = start_step
self.lookahead = lookahead
self.device = device
self.model_dir = pathlib.Path(model_dir)
self.vis_dir = self.model_dir / "diagnostics"
pathlib.Path.mkdir(self.vis_dir, exist_ok=True)
domain, task = env_name.split("___")[1].split("--")
self.env = DMCWrapper(
domain,
task,
task_kwargs={"random": 0},
visualize_reward=False,
height=64,
width=64,
from_pixels=True,
frame_skip=4,
)
self.model = mbrl.models.PlaNetModel(
(3, 64, 64),
1024,
((3, 32, 4, 2), (32, 64, 4, 2), (64, 128, 4, 2), (128, 256, 4, 2)),
(
(1024, 1, 1),
((1024, 128, 5, 2), (128, 64, 5, 2), (64, 32, 6, 2), (32, 3, 6, 2)),
),
30,
self.env.action_space.shape[0],
200,
200,
device,
free_nats=3.0,
kl_scale=1.0,
)
self.model.load(self.model_dir / "planet.pth")
rng = torch.Generator(device=device)
rng.manual_seed(seed)
self.model_env = mbrl.models.ModelEnv(
self.env, self.model, mbrl.env.termination_fns.no_termination, generator=rng
)
agent_cfg = omegaconf.OmegaConf.create(
{
"_target_": "mbrl.planning.TrajectoryOptimizerAgent",
"action_lb": "???",
"action_ub": "???",
"planning_horizon": 12,
"optimizer_cfg": {
"_target_": "mbrl.planning.CEMOptimizer",
"num_iterations": 10,
"elite_ratio": 0.1,
"population_size": 1000,
"alpha": 0.1,
"lower_bound": "???",
"upper_bound": "???",
"return_mean_elites": True,
"device": device,
},
"replan_freq": 1,
"verbose": True,
}
)
self.agent = mbrl.planning.create_trajectory_optim_agent_for_model(
self.model_env, agent_cfg
)
def run(self):
current_step = 0
true_obs = []
true_total_reward = 0.0
actions = []
obs = self.env.reset()
self.agent.reset()
for step in range(self.start_step + self.lookahead):
action = self.agent.act(obs)
next_obs, reward, done, _ = self.env.step(action)
if step >= self.start_step:
true_obs.append(obs)
actions.append(action)
true_total_reward += reward
obs = next_obs
if done:
break
current_step += 1
# Now check what the model thinks will happen with the same sequence of actions
cur_obs = true_obs[0].copy()
pred_total_reward = 0.0
latent = self.model_env.reset(cur_obs[None, :], return_as_np=False)
pred_obs = [self.model.render(latent)[0]]
for a in actions:
latent, reward, *_ = self.model_env.step(a.copy()[None, :])
pred_obs.append(self.model.render(latent)[0])
pred_total_reward += reward.item()
print(
f"True total reward: {true_total_reward}. Predicted total reward: {pred_total_reward}"
)
filenames = []
for idx in range(self.lookahead):
fname = self.vis_dir / f"frame_{idx}.png"
filenames.append(fname)
fig, axs = plt.subplots(1, 2, figsize=(12, 6))
axs[0].imshow(pred_obs[idx].astype(np.uint8))
axs[1].imshow(true_obs[idx].transpose(1, 2, 0))
# save frame
plt.savefig(fname)
plt.close()
with imageio.get_writer(
self.vis_dir
/ f"visualization_{self.start_step}_{self.lookahead}_{self.seed}.gif",
mode="I",
) as writer:
for filename in filenames:
image = imageio.imread(filename)
writer.append_data(image)
# Remove files
for filename in set(filenames):
os.remove(filename)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--model_dir",
type=str,
default=None,
help="The directory where the model was saved.",
)
parser.add_argument("--lookahead", type=int, default=50)
parser.add_argument("--start_step", type=int, default=0)
parser.add_argument("--seed", type=int, default=1234)
parser.add_argument("--env_name", type=str, default="dmcontrol___cheetah--run")
args = parser.parse_args()
visualizer = PlanetVisualizer(
args.start_step,
args.lookahead,
args.model_dir,
args.env_name,
"cuda:0",
args.seed,
)
visualizer.run()
| [
"matplotlib.pyplot.savefig",
"argparse.ArgumentParser",
"pathlib.Path",
"matplotlib.pyplot.close",
"os.remove",
"pathlib.Path.mkdir",
"imageio.imread",
"mbrl.third_party.dmc2gym.wrappers.DMCWrapper",
"omegaconf.OmegaConf.create",
"matplotlib.pyplot.subplots",
"imageio.get_writer",
"torch.Gener... | [((4860, 4885), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4883, 4885), False, 'import argparse\n'), ((653, 676), 'pathlib.Path', 'pathlib.Path', (['model_dir'], {}), '(model_dir)\n', (665, 676), False, 'import pathlib\n'), ((739, 786), 'pathlib.Path.mkdir', 'pathlib.Path.mkdir', (['self.vis_dir'], {'exist_ok': '(True)'}), '(self.vis_dir, exist_ok=True)\n', (757, 786), False, 'import pathlib\n'), ((867, 999), 'mbrl.third_party.dmc2gym.wrappers.DMCWrapper', 'DMCWrapper', (['domain', 'task'], {'task_kwargs': "{'random': 0}", 'visualize_reward': '(False)', 'height': '(64)', 'width': '(64)', 'from_pixels': '(True)', 'frame_skip': '(4)'}), "(domain, task, task_kwargs={'random': 0}, visualize_reward=False,\n height=64, width=64, from_pixels=True, frame_skip=4)\n", (877, 999), False, 'from mbrl.third_party.dmc2gym.wrappers import DMCWrapper\n'), ((1664, 1694), 'torch.Generator', 'torch.Generator', ([], {'device': 'device'}), '(device=device)\n', (1679, 1694), False, 'import torch\n'), ((1892, 2330), 'omegaconf.OmegaConf.create', 'omegaconf.OmegaConf.create', (["{'_target_': 'mbrl.planning.TrajectoryOptimizerAgent', 'action_lb': '???',\n 'action_ub': '???', 'planning_horizon': 12, 'optimizer_cfg': {\n '_target_': 'mbrl.planning.CEMOptimizer', 'num_iterations': 10,\n 'elite_ratio': 0.1, 'population_size': 1000, 'alpha': 0.1,\n 'lower_bound': '???', 'upper_bound': '???', 'return_mean_elites': True,\n 'device': device}, 'replan_freq': 1, 'verbose': True}"], {}), "({'_target_':\n 'mbrl.planning.TrajectoryOptimizerAgent', 'action_lb': '???',\n 'action_ub': '???', 'planning_horizon': 12, 'optimizer_cfg': {\n '_target_': 'mbrl.planning.CEMOptimizer', 'num_iterations': 10,\n 'elite_ratio': 0.1, 'population_size': 1000, 'alpha': 0.1,\n 'lower_bound': '???', 'upper_bound': '???', 'return_mean_elites': True,\n 'device': device}, 'replan_freq': 1, 'verbose': True})\n", (1918, 2330), False, 'import omegaconf\n'), ((4172, 4207), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(12, 6)'}), '(1, 2, figsize=(12, 6))\n', (4184, 4207), True, 'import matplotlib.pyplot as plt\n'), ((4364, 4382), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fname'], {}), '(fname)\n', (4375, 4382), True, 'import matplotlib.pyplot as plt\n'), ((4395, 4406), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (4404, 4406), True, 'import matplotlib.pyplot as plt\n'), ((4421, 4541), 'imageio.get_writer', 'imageio.get_writer', (["(self.vis_dir /\n f'visualization_{self.start_step}_{self.lookahead}_{self.seed}.gif')"], {'mode': '"""I"""'}), "(self.vis_dir /\n f'visualization_{self.start_step}_{self.lookahead}_{self.seed}.gif',\n mode='I')\n", (4439, 4541), False, 'import imageio\n'), ((4798, 4817), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (4807, 4817), False, 'import os\n'), ((4655, 4679), 'imageio.imread', 'imageio.imread', (['filename'], {}), '(filename)\n', (4669, 4679), False, 'import imageio\n')] |
from snippets import Icon, generate
snippets = {
"pl": "print($SEL0)",
"ss": ("self.x = x", "self.$1 = $1"),
"pld": ("pylint disable", "# pylint: disable="),
"main": ('if __name__ == "__main__"', 'if __name__ == "__main__":==>${0:main()}'),
"fi": ("from import", "from $1 import $0"),
"init": ("__init__", "def __init__(self$1):"),
"ie": ("if x else y", "if $1 else $0"),
"lm": "lambda ${1:x}: ${0:None}",
}
completions = {
("Keyword", Icon.KEYWORD): [
"break",
"continue",
"from",
"import",
"pass",
"raise",
"return",
"yield",
],
("Block", Icon.BLOCK): [
"class $1:",
"def ${1:run}($2):",
"elif $1:",
"else:",
"except",
"for $1 in $2:",
"if $1:",
"try:",
"while $1:",
"with $1 as $2:",
],
("Support", Icon.FUNCTION): [
"__init__",
"isinstance",
"super",
],
("Decorator", Icon.META): [
"classmethod",
"contextmanager",
"dataclass",
"property",
"staticmethod",
],
("Constant", Icon.CONSTANT): [
"False",
"None",
"True",
],
("Variable", Icon.VARIABLE): [
"self",
],
}
def expand_colon(content):
return content + "\n\t${0:pass}" if content.endswith(":") else content
generate("source.python", snippets, completions, mutators=[expand_colon])
| [
"snippets.generate"
] | [((1394, 1467), 'snippets.generate', 'generate', (['"""source.python"""', 'snippets', 'completions'], {'mutators': '[expand_colon]'}), "('source.python', snippets, completions, mutators=[expand_colon])\n", (1402, 1467), False, 'from snippets import Icon, generate\n')] |
import pytest
from truman import agent_registration
def fake_agent_factory(env, param_1, param_2):
assert env == "FAKE_ENVIRONMENT"
assert param_1 == "PARAM_1"
assert param_2 == "PARAM_2"
class FakeAgent:
def __init__(self, env, param_1, param_2):
assert env == "FAKE_ENVIRONMENT"
assert param_1 == "PARAM_1"
assert param_2 == "PARAM_2"
@pytest.mark.parametrize(
"entry_point",
[
fake_agent_factory,
FakeAgent,
"tests.test_agent_registration:fake_agent_factory",
"tests.test_agent_registration:FakeAgent",
],
)
def test_registration(entry_point):
registry = agent_registration.AgentRegistry()
registry.register(
id="SomeAgent-v0",
entry_point=entry_point,
kwargs={"param_1": "PARAM_1", "param_2": "PARAM_2"},
)
# We can't use mocks, since we want to test importing via strings
# So we run the asserts in the function
# But we protect against the function never being run (and the test silently passing)
# by also asserting that there _is_ a failure if we run with the wrong environment
registry.make(id="SomeAgent-v0", env="FAKE_ENVIRONMENT")
with pytest.raises(AssertionError):
registry.make(id="SomeAgent-v0", env="WRONG_ENVIRONMENT")
all_specs = list(registry.all())
assert len(all_specs) == 1
assert all_specs[0].id == "SomeAgent-v0"
def test_register_duplicate_id():
registry = agent_registration.AgentRegistry()
registry.register("Agent-v0")
with pytest.raises(ValueError, match=r"Cannot re-register ID"):
registry.register("Agent-v0")
def test_make_missing_agent():
registry = agent_registration.AgentRegistry()
with pytest.raises(KeyError, match=r"No registered agent with ID"):
registry.make("Agent-v0")
def test_make_deprecated_agent():
registry = agent_registration.AgentRegistry()
registry.register("Agent-v0", entry_point=None)
with pytest.raises(ValueError, match=r"Attempting to make deprecated agent"):
registry.make("Agent-v0")
@pytest.mark.parametrize(
"bad_id",
[
"AgentMissingVersionName",
"AgentNoDashBetweenVersionv0",
"user1/user2/AgentTwoUsers-v0",
"AgentNaughty$ymbols-v0",
],
)
def test_malformed_agent_id(bad_id):
registry = agent_registration.AgentRegistry()
with pytest.raises(ValueError, match=r"Attempted to register malformed agent ID"):
registry.register(bad_id)
| [
"pytest.mark.parametrize",
"truman.agent_registration.AgentRegistry",
"pytest.raises"
] | [((385, 559), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""entry_point"""', "[fake_agent_factory, FakeAgent,\n 'tests.test_agent_registration:fake_agent_factory',\n 'tests.test_agent_registration:FakeAgent']"], {}), "('entry_point', [fake_agent_factory, FakeAgent,\n 'tests.test_agent_registration:fake_agent_factory',\n 'tests.test_agent_registration:FakeAgent'])\n", (408, 559), False, 'import pytest\n'), ((2087, 2246), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""bad_id"""', "['AgentMissingVersionName', 'AgentNoDashBetweenVersionv0',\n 'user1/user2/AgentTwoUsers-v0', 'AgentNaughty$ymbols-v0']"], {}), "('bad_id', ['AgentMissingVersionName',\n 'AgentNoDashBetweenVersionv0', 'user1/user2/AgentTwoUsers-v0',\n 'AgentNaughty$ymbols-v0'])\n", (2110, 2246), False, 'import pytest\n'), ((653, 687), 'truman.agent_registration.AgentRegistry', 'agent_registration.AgentRegistry', ([], {}), '()\n', (685, 687), False, 'from truman import agent_registration\n'), ((1463, 1497), 'truman.agent_registration.AgentRegistry', 'agent_registration.AgentRegistry', ([], {}), '()\n', (1495, 1497), False, 'from truman import agent_registration\n'), ((1687, 1721), 'truman.agent_registration.AgentRegistry', 'agent_registration.AgentRegistry', ([], {}), '()\n', (1719, 1721), False, 'from truman import agent_registration\n'), ((1880, 1914), 'truman.agent_registration.AgentRegistry', 'agent_registration.AgentRegistry', ([], {}), '()\n', (1912, 1914), False, 'from truman import agent_registration\n'), ((2341, 2375), 'truman.agent_registration.AgentRegistry', 'agent_registration.AgentRegistry', ([], {}), '()\n', (2373, 2375), False, 'from truman import agent_registration\n'), ((1201, 1230), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (1214, 1230), False, 'import pytest\n'), ((1542, 1598), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Cannot re-register ID"""'}), "(ValueError, match='Cannot re-register ID')\n", (1555, 1598), False, 'import pytest\n'), ((1732, 1792), 'pytest.raises', 'pytest.raises', (['KeyError'], {'match': '"""No registered agent with ID"""'}), "(KeyError, match='No registered agent with ID')\n", (1745, 1792), False, 'import pytest\n'), ((1977, 2047), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Attempting to make deprecated agent"""'}), "(ValueError, match='Attempting to make deprecated agent')\n", (1990, 2047), False, 'import pytest\n'), ((2386, 2461), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Attempted to register malformed agent ID"""'}), "(ValueError, match='Attempted to register malformed agent ID')\n", (2399, 2461), False, 'import pytest\n')] |
import markdown
import re
R_NOFOLLOW = re.compile('<a (?![^>]*rel=["\']nofollow[\'"])')
S_NOFOLLOW = '<a rel="nofollow" '
class NofollowPostprocessor(markdown.postprocessors.Postprocessor):
def run(self, text):
return R_NOFOLLOW.sub(S_NOFOLLOW, text)
class NofollowExtension(markdown.Extension):
""" Add nofollow for links to Markdown. """
def extendMarkdown(self, md, md_globals):
md.postprocessors.add('nofollow', NofollowPostprocessor(md), '_end')
def makeExtension(configs={}):
return NofollowExtension(configs=configs) | [
"re.compile"
] | [((40, 88), 're.compile', 're.compile', (['"""<a (?![^>]*rel=["\']nofollow[\'"])"""'], {}), '(\'<a (?![^>]*rel=["\\\']nofollow[\\\'"])\')\n', (50, 88), False, 'import re\n')] |
from zipfile import ZipFile
import os
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from wordcloud import WordCloud, STOPWORDS
from tqdm import tqdm
import nltk
import re
from nltk.tokenize import word_tokenize
# extract zip file
def extract_zip(file):
with ZipFile(file, "r") as zip:
zip.extractall()
print("Done extracting", file)
# remove zip file after successfully extraction of file
def remove_zip(file):
os.remove(file)
print("Successfully deleted ", file)
zips = ["train.csv.zip"]
for zip in zips:
extract_zip(zip)
remove_zip(zip)
# read csv file
train = pd.read_csv("train.csv")
# count out a target number
target_count = train["target"].value_counts()
# bar plot a target distribution
plt.figure(figsize=(8, 6))
ax = sns.barplot(target_count.index, target_count.values)
ax.set_title("Question distribution of train dataset")
ax.set_xlabel("Question type")
ax.set_ylabel("No of questions")
# split question to count out a question length
train["quest_len"] = train["question_text"].apply(lambda x: len(x.split()))
# create list of question insincere and sincere separately
sincere = train[train["target"] == 0]
insincere = train[train["target"] == 1]
# Plot question length distribution of both type of questions in single graph
plt.figure(figsize=(15, 8))
sns.distplot(sincere["quest_len"], hist=True, label="sincere")
sns.distplot(insincere["quest_len"], hist=True, label="insincere")
plt.legend()
plt.xlabel("Question length")
plt.title("Questions Length Distribution of both question")
# Plot question length distribution of sincere questions
plt.figure(figsize=(15, 8))
sns.distplot(sincere["quest_len"], hist=True, label="sincere")
plt.legend()
plt.xlabel("Question length")
plt.title("Questions Length Distribution of sincere question")
# Plot question length distribution of insincere questions
plt.figure(figsize=(15, 8))
sns.distplot(insincere["quest_len"], hist=True, label="insincere")
plt.legend()
plt.xlabel("Question length")
plt.title("Questions Length Distribution of insincere question")
# pre processing
puncts = [
",",
".",
'"',
":",
")",
"(",
"-",
"!",
"?",
"|",
";",
"'",
"$",
"&",
"/",
"[",
"]",
">",
"%",
"=",
"#",
"*",
"+",
"\\",
"•",
"~",
"@",
"£",
"·",
"_",
"{",
"}",
"©",
"^",
"®",
"`",
"<",
"→",
"°",
"€",
"™",
"›",
"♥",
"←",
"×",
"§",
"″",
"′",
"Â",
"█",
"½",
"à",
"…",
"“",
"★",
"”",
"–",
"●",
"â",
"►",
"−",
"¢",
"²",
"¬",
"░",
"¶",
"↑",
"±",
"¿",
"▾",
"═",
"¦",
"║",
"―",
"¥",
"▓",
"—",
"‹",
"─",
"▒",
":",
"¼",
"⊕",
"▼",
"▪",
"†",
"■",
"’",
"▀",
"¨",
"▄",
"♫",
"☆",
"é",
"¯",
"♦",
"¤",
"▲",
"è",
"¸",
"¾",
"Ã",
"⋅",
"‘",
"∞",
"∙",
")",
"↓",
"、",
"│",
"(",
"»",
",",
"♪",
"╩",
"╚",
"³",
"・",
"╦",
"╣",
"╔",
"╗",
"▬",
"❤",
"ï",
"Ø",
"¹",
"≤",
"‡",
"√",
]
# clean some punctuation
def clean_text(x):
x = str(x)
for punct in puncts:
x = x.replace(punct, "")
return x
one_letter_word = [
" a ",
" b ",
" c ",
" d ",
" e ",
" f ",
" g ",
" h ",
" i ",
" j ",
" k ",
" l ",
" m ",
" n ",
" o ",
" p ",
" q ",
" r ",
" s ",
" t ",
" u ",
" v ",
" w ",
" x ",
" y ",
" z ",
]
# clean one letter words
def clean_one_letter_word(x):
x = str(x)
for punct in one_letter_word:
x = x.replace(punct, "")
return x
# clean numbers
def clean_numbers(x):
x = re.sub("[0-9]{5,}", "", x)
x = re.sub("[0-9]{4}", "", x)
x = re.sub("[0-9]{3}", "", x)
x = re.sub("[0-9]{2}", "", x)
return x
mispell_dict = {
"ain't": "is not",
"aren't": "are not",
"can't": "cannot",
"'cause": "because",
"could've": "could have",
"couldn't": "could not",
"didn't": "did not",
"doesn't": "does not",
"don't": "do not",
"hadn't": "had not",
"hasn't": "has not",
"haven't": "have not",
"he'd": "he would",
"he'll": "he will",
"he's": "he is",
"how'd": "how did",
"how'd'y": "how do you",
"how'll": "how will",
"how's": "how is",
"I'd": "I would",
"I'd've": "I would have",
"I'll": "I will",
"I'll've": "I will have",
"I'm": "I am",
"I've": "I have",
"i'd": "i would",
"i'd've": "i would have",
"i'll": "i will",
"i'll've": "i will have",
"i'm": "i am",
"i've": "i have",
"isn't": "is not",
"it'd": "it would",
"it'd've": "it would have",
"it'll": "it will",
"it'll've": "it will have",
"it's": "it is",
"let's": "let us",
"ma'am": "madam",
"mayn't": "may not",
"might've": "might have",
"mightn't": "might not",
"mightn't've": "might not have",
"must've": "must have",
"mustn't": "must not",
"mustn't've": "must not have",
"needn't": "need not",
"needn't've": "need not have",
"o'clock": "of the clock",
"oughtn't": "ought not",
"oughtn't've": "ought not have",
"shan't": "shall not",
"sha'n't": "shall not",
"shan't've": "shall not have",
"she'd": "she would",
"she'd've": "she would have",
"she'll": "she will",
"she'll've": "she will have",
"she's": "she is",
"should've": "should have",
"shouldn't": "should not",
"shouldn't've": "should not have",
"so've": "so have",
"so's": "so as",
"this's": "this is",
"that'd": "that would",
"that'd've": "that would have",
"that's": "that is",
"there'd": "there would",
"there'd've": "there would have",
"there's": "there is",
"here's": "here is",
"they'd": "they would",
"they'd've": "they would have",
"they'll": "they will",
"they'll've": "they will have",
"they're": "they are",
"they've": "they have",
"to've": "to have",
"wasn't": "was not",
"we'd": "we would",
"we'd've": "we would have",
"we'll": "we will",
"we'll've": "we will have",
"we're": "we are",
"we've": "we have",
"weren't": "were not",
"what'll": "what will",
"what'll've": "what will have",
"what're": "what are",
"what's": "what is",
"what've": "what have",
"when's": "when is",
"when've": "when have",
"where'd": "where did",
"where's": "where is",
"where've": "where have",
"who'll": "who will",
"who'll've": "who will have",
"who's": "who is",
"who've": "who have",
"why's": "why is",
"why've": "why have",
"will've": "will have",
"won't": "will not",
"won't've": "will not have",
"would've": "would have",
"wouldn't": "would not",
"wouldn't've": "would not have",
"y'all": "you all",
"y'all'd": "you all would",
"y'all'd've": "you all would have",
"y'all're": "you all are",
"y'all've": "you all have",
"you'd": "you would",
"you'd've": "you would have",
"you'll": "you will",
"you'll've": "you will have",
"you're": "you are",
"you've": "you have",
"colour": "color",
"centre": "center",
"favourite": "favorite",
"travelling": "traveling",
"counselling": "counseling",
"theatre": "theater",
"cancelled": "canceled",
"labour": "labor",
"organisation": "organization",
"wwii": "world war 2",
"citicise": "criticize",
"youtu ": "youtube ",
"Qoura": "Quora",
"sallary": "salary",
"Whta": "What",
"narcisist": "narcissist",
"howdo": "how do",
"whatare": "what are",
"howcan": "how can",
"howmuch": "how much",
"howmany": "how many",
"whydo": "why do",
"doI": "do I",
"theBest": "the best",
"howdoes": "how does",
"mastrubation": "masturbation",
"mastrubate": "masturbate",
"mastrubating": "masturbating",
"pennis": "penis",
"Etherium": "Ethereum",
"narcissit": "narcissist",
"bigdata": "big data",
"2k17": "2017",
"2k18": "2018",
"qouta": "quota",
"exboyfriend": "ex boyfriend",
"airhostess": "air hostess",
"whst": "what",
"watsapp": "whatsapp",
"demonitisation": "demonetization",
"demonitization": "demonetization",
"demonetisation": "demonetization",
}
def _get_mispell(mispell_dict):
mispell_re = re.compile("(%s)" % "|".join(mispell_dict.keys()))
return mispell_dict, mispell_re
# replace a misspell text
mispellings, mispellings_re = _get_mispell(mispell_dict)
def replace_typical_misspell(text):
def replace(match):
return mispellings[match.group(0)]
return mispellings_re.sub(replace, text)
# Pre process a text
def preprocess(text):
text = text.lower()
text = clean_text(text)
text = clean_one_letter_word(text)
text = clean_numbers(text)
text = replace_typical_misspell(text)
return text
# preprocess a text
tqdm.pandas()
sincere["question_text"] = sincere["question_text"].progress_apply(preprocess)
insincere["question_text"] = insincere["question_text"].progress_apply(preprocess)
# define a word cloud print function
def create_wordcloud(data, title):
nltk.download("punkt")
question_text = data.question_text.str.cat(
sep=" "
) # function to split text into word
tokens = word_tokenize(question_text)
vocabulary = set(tokens)
print(len(vocabulary))
stop_words = set(STOPWORDS)
tokens = [w for w in tokens if w not in stop_words]
frequency_dist = nltk.FreqDist(tokens)
wordcloud = WordCloud(
width=800,
height=800,
background_color="white",
stopwords=stop_words,
min_font_size=10,
).generate_from_frequencies(frequency_dist)
plt.figure(figsize=(8, 8), facecolor=None)
plt.imshow(wordcloud)
plt.title(title)
plt.axis("off")
plt.show()
# create insincere question word cloud
create_wordcloud(insincere, "Insincere question word cloud")
# create sincere question word cloud
create_wordcloud(sincere, "Sincere question word cloud")
| [
"matplotlib.pyplot.imshow",
"pandas.read_csv",
"seaborn.distplot",
"nltk.download",
"matplotlib.pyplot.show",
"matplotlib.pyplot.xlabel",
"nltk.FreqDist",
"zipfile.ZipFile",
"nltk.tokenize.word_tokenize",
"matplotlib.pyplot.axis",
"wordcloud.WordCloud",
"matplotlib.pyplot.figure",
"re.sub",
... | [((634, 658), 'pandas.read_csv', 'pd.read_csv', (['"""train.csv"""'], {}), "('train.csv')\n", (645, 658), True, 'import pandas as pd\n'), ((768, 794), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 6)'}), '(figsize=(8, 6))\n', (778, 794), True, 'import matplotlib.pyplot as plt\n'), ((800, 852), 'seaborn.barplot', 'sns.barplot', (['target_count.index', 'target_count.values'], {}), '(target_count.index, target_count.values)\n', (811, 852), True, 'import seaborn as sns\n'), ((1314, 1341), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 8)'}), '(figsize=(15, 8))\n', (1324, 1341), True, 'import matplotlib.pyplot as plt\n'), ((1342, 1404), 'seaborn.distplot', 'sns.distplot', (["sincere['quest_len']"], {'hist': '(True)', 'label': '"""sincere"""'}), "(sincere['quest_len'], hist=True, label='sincere')\n", (1354, 1404), True, 'import seaborn as sns\n'), ((1405, 1471), 'seaborn.distplot', 'sns.distplot', (["insincere['quest_len']"], {'hist': '(True)', 'label': '"""insincere"""'}), "(insincere['quest_len'], hist=True, label='insincere')\n", (1417, 1471), True, 'import seaborn as sns\n'), ((1472, 1484), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1482, 1484), True, 'import matplotlib.pyplot as plt\n'), ((1485, 1514), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Question length"""'], {}), "('Question length')\n", (1495, 1514), True, 'import matplotlib.pyplot as plt\n'), ((1515, 1574), 'matplotlib.pyplot.title', 'plt.title', (['"""Questions Length Distribution of both question"""'], {}), "('Questions Length Distribution of both question')\n", (1524, 1574), True, 'import matplotlib.pyplot as plt\n'), ((1633, 1660), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 8)'}), '(figsize=(15, 8))\n', (1643, 1660), True, 'import matplotlib.pyplot as plt\n'), ((1661, 1723), 'seaborn.distplot', 'sns.distplot', (["sincere['quest_len']"], {'hist': '(True)', 'label': '"""sincere"""'}), "(sincere['quest_len'], hist=True, label='sincere')\n", (1673, 1723), True, 'import seaborn as sns\n'), ((1724, 1736), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1734, 1736), True, 'import matplotlib.pyplot as plt\n'), ((1737, 1766), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Question length"""'], {}), "('Question length')\n", (1747, 1766), True, 'import matplotlib.pyplot as plt\n'), ((1767, 1829), 'matplotlib.pyplot.title', 'plt.title', (['"""Questions Length Distribution of sincere question"""'], {}), "('Questions Length Distribution of sincere question')\n", (1776, 1829), True, 'import matplotlib.pyplot as plt\n'), ((1890, 1917), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 8)'}), '(figsize=(15, 8))\n', (1900, 1917), True, 'import matplotlib.pyplot as plt\n'), ((1918, 1984), 'seaborn.distplot', 'sns.distplot', (["insincere['quest_len']"], {'hist': '(True)', 'label': '"""insincere"""'}), "(insincere['quest_len'], hist=True, label='insincere')\n", (1930, 1984), True, 'import seaborn as sns\n'), ((1985, 1997), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1995, 1997), True, 'import matplotlib.pyplot as plt\n'), ((1998, 2027), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Question length"""'], {}), "('Question length')\n", (2008, 2027), True, 'import matplotlib.pyplot as plt\n'), ((2028, 2092), 'matplotlib.pyplot.title', 'plt.title', (['"""Questions Length Distribution of insincere question"""'], {}), "('Questions Length Distribution of insincere question')\n", (2037, 2092), True, 'import matplotlib.pyplot as plt\n'), ((9214, 9227), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {}), '()\n', (9225, 9227), False, 'from tqdm import tqdm\n'), ((467, 482), 'os.remove', 'os.remove', (['file'], {}), '(file)\n', (476, 482), False, 'import os\n'), ((3938, 3964), 're.sub', 're.sub', (['"""[0-9]{5,}"""', '""""""', 'x'], {}), "('[0-9]{5,}', '', x)\n", (3944, 3964), False, 'import re\n'), ((3973, 3998), 're.sub', 're.sub', (['"""[0-9]{4}"""', '""""""', 'x'], {}), "('[0-9]{4}', '', x)\n", (3979, 3998), False, 'import re\n'), ((4007, 4032), 're.sub', 're.sub', (['"""[0-9]{3}"""', '""""""', 'x'], {}), "('[0-9]{3}', '', x)\n", (4013, 4032), False, 'import re\n'), ((4041, 4066), 're.sub', 're.sub', (['"""[0-9]{2}"""', '""""""', 'x'], {}), "('[0-9]{2}', '', x)\n", (4047, 4066), False, 'import re\n'), ((9468, 9490), 'nltk.download', 'nltk.download', (['"""punkt"""'], {}), "('punkt')\n", (9481, 9490), False, 'import nltk\n'), ((9610, 9638), 'nltk.tokenize.word_tokenize', 'word_tokenize', (['question_text'], {}), '(question_text)\n', (9623, 9638), False, 'from nltk.tokenize import word_tokenize\n'), ((9804, 9825), 'nltk.FreqDist', 'nltk.FreqDist', (['tokens'], {}), '(tokens)\n', (9817, 9825), False, 'import nltk\n'), ((10034, 10076), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)', 'facecolor': 'None'}), '(figsize=(8, 8), facecolor=None)\n', (10044, 10076), True, 'import matplotlib.pyplot as plt\n'), ((10081, 10102), 'matplotlib.pyplot.imshow', 'plt.imshow', (['wordcloud'], {}), '(wordcloud)\n', (10091, 10102), True, 'import matplotlib.pyplot as plt\n'), ((10107, 10123), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (10116, 10123), True, 'import matplotlib.pyplot as plt\n'), ((10128, 10143), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (10136, 10143), True, 'import matplotlib.pyplot as plt\n'), ((10148, 10158), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10156, 10158), True, 'import matplotlib.pyplot as plt\n'), ((292, 310), 'zipfile.ZipFile', 'ZipFile', (['file', '"""r"""'], {}), "(file, 'r')\n", (299, 310), False, 'from zipfile import ZipFile\n'), ((9842, 9945), 'wordcloud.WordCloud', 'WordCloud', ([], {'width': '(800)', 'height': '(800)', 'background_color': '"""white"""', 'stopwords': 'stop_words', 'min_font_size': '(10)'}), "(width=800, height=800, background_color='white', stopwords=\n stop_words, min_font_size=10)\n", (9851, 9945), False, 'from wordcloud import WordCloud, STOPWORDS\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
import json
import os
import codecs
from collections import Counter
import numpy as np
import tensorflow as tf
from parser.structs.vocabs.base_vocabs import CountVocab
from parser.structs.vocabs.token_vocabs import TokenVocab,GraphTokenVocab
from parser.structs.vocabs.index_vocabs import IndexVocab,GraphIndexVocab
from parser.structs.vocabs.second_order_vocab import GraphSecondIndexVocab
from parser.structs.vocabs.pointer_generator import PointerGenerator
from . import mrp_vocabs as mv
from parser.neural import nn, nonlin, embeddings, classifiers, recurrent
import sys
sys.path.append('./THUMT')
import thumt.layers as layers
from thumt.models.rnnsearch import _decoder as seq2seq_decoder
# from THUMT.thumt.models.rnnsearch import _decoder as seq2seq_decoder
import pdb
class RNNDecoderVocab(TokenVocab):
"""docstring for RNNDecoderVocab"""
#_save_str = 'tokens'
#=============================================================
def __init__(self, *args, **kwargs):
""""""
if 'placeholder_shape' not in kwargs:
kwargs['placeholder_shape'] = [None, None]
super(RNNDecoderVocab, self).__init__(*args, **kwargs)
return
def forward(self, layers, decoder_embeddings, sentence_feat, token_weights, sequence_length, input_feed=None, target_copy_hidden_states=None, coverage=None,\
variable_scope=None, reuse=False, debug=False):
"""
decoder embeddings [batch_size, decoder_seq_length, embedding_size]
layers: outputs of BiLSTM [batch_size, seq_length, hidden_size]
sentence_feat: the final output state of RNN [num_encoder_layers, batch_size, hidden_size]
token_weights: mask
input_feed: None or [batch_size, 1, hidden_size]
target_copy_hidden_states: None or [batch_size, seq_length, hidden_size]
coverage: None or [batch_size, 1, encode_seq_length]
"""
#pdb.set_trace()
with tf.variable_scope('Seq2SeqDecoder'):
with tf.variable_scope('linear'):
sentence_feat = classifiers.hidden(sentence_feat, self.recur_size,hidden_func=self.hidden_func,hidden_keep_prob=self.hidden_keep_prob)
with tf.variable_scope('memory_linear'):
layers = classifiers.hidden(layers, self.recur_size,hidden_func=self.hidden_func,hidden_keep_prob=self.hidden_keep_prob)
with tf.variable_scope('embedding_linear'):
decoder_embeddings = classifiers.hidden(decoder_embeddings, self.recur_size,hidden_func=self.hidden_func,hidden_keep_prob=self.hidden_keep_prob)
result = seq2seq_decoder(self.cell,decoder_embeddings,layers,sequence_length,sentence_feat)
return result
def count(self, mrp):
""""""
# pdb.set_trace()
mrp_file=json.load(open(mrp))
for sentence_id in mrp_file:
for current_data in mrp_file[sentence_id]['nodes']:
token = current_data[self.field]
self._count(token)
self.index_by_counts()
return True
def count_mrp(self, mrp):
""""""
return True
def _count(self, token):
if not self.cased:
token = token.lower()
self.counts[token] += 1
return
def get_root(self):
""""""
return 0
def add_sequence(self,tokens):
indices=[x if x!='' else 0 for x in tokens]
return indices
@property
def recur_size(self):
return self._config.getint(self, 'recur_size')
@property
def get_nodes_path(self):
return self._config.get('BaseNetwork', 'nodes_path')
class Seq2SeqIDVocab(RNNDecoderVocab, mv.NodeIDVocab):
def set_placeholders(self, indices, feed_dict={}):
""""""
feed_dict[self.placeholder] = indices
return feed_dict
#=============================================================
def get_bos(self):
""""""
return 0
#=============================================================
def get_eos(self):
""""""
return 0
class Seq2SeqNodeLabelPredictionVocab(TokenVocab, mv.LabelVocab):
def __init__(self, *args, **kwargs):
""""""
kwargs['placeholder_shape'] = [None, None]
super(Seq2SeqNodeLabelPredictionVocab, self).__init__(*args, **kwargs)
return
#=============================================================
def get_bos(self):
""""""
return '<BOS>'
#=============================================================
def get_eos(self):
""""""
return '<EOS>'
def forward(self, hiddens, source_attentions, target_attentions, pointer_generator_inputs, invalid_indexes=None,\
variable_scope=None, reuse=False, debug=False):
"""
Compute a distribution over the target dictionary
extended by the dynamic dictionary implied by copying target nodes.
:param hiddens: decoder outputs, [batch_size, num_target_nodes, hidden_size]
:param source_attentions: attention of each source node,
[batch_size, num_target_nodes, num_source_nodes]
:param source_attention_maps: a sparse indicator matrix
mapping each source node to its index in the dynamic vocabulary.
[batch_size, num_source_nodes, dynamic_vocab_size]
:param target_attentions: attention of each target node,
[batch_size, num_target_nodes, num_target_nodes]
:param target_attention_maps: a sparse indicator matrix
mapping each target node to its index in the dynamic vocabulary.
[batch_size, num_target_nodes, dynamic_vocab_size]
:param invalid_indexes: indexes which are not considered in prediction.
"""
#pdb.set_trace()
# target=self.placeholder['vocab_targets']
# copy_targets=self.placeholder['copy_targets']
# coref_targets=self.placeholder['coref_targets']
with tf.variable_scope('Seq2SeqNodeLabelPredictionVocab'):
source_attention_maps=pointer_generator_inputs['SrcCopyMap']
target_attention_maps=pointer_generator_inputs['TgtCopyMap'][:,1:]
outputs=self.predictor.forward(hiddens, source_attentions, source_attention_maps, target_attentions, target_attention_maps, invalid_indexes=None,debug=debug)
copy_targets=pointer_generator_inputs['SrcCopyIndices'][:,1:]
coref_targets=pointer_generator_inputs['TgtCopyIndices'][:,1:]
# pdb.set_trace()
loss_outputs = self.predictor.compute_loss(outputs['probabilities'],outputs['predictions'],self.placeholder,copy_targets,outputs['source_dynamic_vocab_size'],coref_targets,outputs['source_dynamic_vocab_size'],None,target_attentions,debug=debug)
outputs.update(loss_outputs)
outputs['loss'] = outputs['loss']*self.loss_interpolation
# outputs['loss']=tf.zeros(1,tf.float32)[0]
# outputs['n_correct_tokens']=tf.zeros(1,tf.float32)[0]
# outputs['n_correct_sequences'] = tf.zeros(1,tf.float32)[0]
return outputs
def decode(self, memory_bank, mask, states, copy_attention_maps, copy_vocabs, tag_luts, invalid_indexes, decoder_inputs):
# [batch_size, 1]
batch_size = tf.shape(memory_bank)[0]
tokens = tt.ones([batch_size, 1]) * self.index('<BOS>')
pos_tags = torch.ones(batch_size, 1) * self.index('<EOS>')
corefs = torch.zeros(batch_size, 1)
decoder_input_history = []
decoder_outputs = []
rnn_outputs = []
copy_attentions = []
coref_attentions = []
predictions = []
coref_indexes = []
decoder_mask = []
input_feed = None
coref_inputs = []
# A sparse indicator matrix mapping each node to its index in the dynamic vocab.
# Here the maximum size of the dynamic vocab is just max_decode_length.
coref_attention_maps = tf.cast(tf.zeros([batch_size, self.max_decode_length, self.max_decode_length + 1]), tf.float32)
# A matrix D where the element D_{ij} is for instance i the real vocab index of
# the generated node at the decoding step `i'.
coref_vocab_maps = tf.zeros([batch_size, self.max_decode_length + 1])
coverage = None
if self.use_coverage:
coverage = memory_bank.new_zeros(batch_size, 1, memory_bank.size(1))
for step_i in range(self.max_decode_length):
# 2. Decode one step.
decoder_output_dict = self.decoder(
decoder_inputs, memory_bank, mask, states, input_feed, coref_inputs, coverage)
_decoder_outputs = decoder_output_dict['decoder_hidden_states']
_rnn_outputs = decoder_output_dict['rnn_hidden_states']
_copy_attentions = decoder_output_dict['source_copy_attentions']
_coref_attentions = decoder_output_dict['target_copy_attentions']
states = decoder_output_dict['last_hidden_state']
input_feed = decoder_output_dict['input_feed']
coverage = decoder_output_dict['coverage']
# 3. Run pointer/generator.
if step_i == 0:
_coref_attention_maps = coref_attention_maps[:, :step_i + 1]
else:
_coref_attention_maps = coref_attention_maps[:, :step_i]
generator_output = self.generator(
_decoder_outputs, _copy_attentions, copy_attention_maps,
_coref_attentions, _coref_attention_maps, invalid_indexes)
_predictions = generator_output['predictions']
# 4. Update maps and get the next token input.
tokens, _predictions, pos_tags, corefs, _mask = self._update_maps_and_get_next_input(
step_i,
generator_output['predictions'].squeeze(1),
generator_output['source_dynamic_vocab_size'],
coref_attention_maps,
coref_vocab_maps,
copy_vocabs,
decoder_mask,
tag_luts,
invalid_indexes
)
# 5. Update variables.
decoder_input_history += [decoder_inputs]
decoder_outputs += [_decoder_outputs]
rnn_outputs += [_rnn_outputs]
copy_attentions += [_copy_attentions]
coref_attentions += [_coref_attentions]
predictions += [_predictions]
# Add the coref info for the next input.
coref_indexes += [corefs]
# Add the mask for the next input.
decoder_mask += [_mask]
# 6. Do the following chunking for the graph decoding input.
# Exclude the hidden state for BOS.
decoder_input_history = torch.cat(decoder_input_history[1:], dim=1)
decoder_outputs = torch.cat(decoder_outputs[1:], dim=1)
rnn_outputs = torch.cat(rnn_outputs[1:], dim=1)
# Exclude coref/mask for EOS.
# TODO: Answer "What if the last one is not EOS?"
predictions = torch.cat(predictions[:-1], dim=1)
coref_indexes = torch.cat(coref_indexes[:-1], dim=1)
decoder_mask = 1 - torch.cat(decoder_mask[:-1], dim=1)
return dict(
# [batch_size, max_decode_length]
predictions=predictions,
coref_indexes=coref_indexes,
decoder_mask=decoder_mask,
# [batch_size, max_decode_length, hidden_size]
decoder_inputs=decoder_input_history,
decoder_memory_bank=decoder_outputs,
decoder_rnn_memory_bank=rnn_outputs,
# [batch_size, max_decode_length, encoder_length]
copy_attentions=copy_attentions,
coref_attentions=coref_attentions
)
class Seq2SeqSrcCopyMapVocab(RNNDecoderVocab, mv.SrcCopyMapVocab):
def __init__(self, *args, **kwargs):
""""""
self._depth=-2
kwargs['placeholder_shape'] = [None, None, None]
super(Seq2SeqSrcCopyMapVocab, self).__init__(*args, **kwargs)
return
class Seq2SeqTgtCopyMapVocab(RNNDecoderVocab, mv.TgtCopyMapVocab):
def __init__(self, *args, **kwargs):
""""""
self._depth=-2
kwargs['placeholder_shape'] = [None, None, None]
super(Seq2SeqTgtCopyMapVocab, self).__init__(*args, **kwargs)
return
class Seq2SeqSrcCopyIndicesVocab(RNNDecoderVocab, mv.SrcCopyIndicesVocab):
def __init__(self, *args, **kwargs):
""""""
kwargs['placeholder_shape'] = [None, None]
super(Seq2SeqSrcCopyIndicesVocab, self).__init__(*args, **kwargs)
return
class Seq2SeqTgtCopyIndicesVocab(RNNDecoderVocab, mv.TgtCopyIndicesVocab):
def __init__(self, *args, **kwargs):
""""""
kwargs['placeholder_shape'] = [None, None]
super(Seq2SeqTgtCopyIndicesVocab, self).__init__(*args, **kwargs)
return
class Seq2SeqDecoderVocab(RNNDecoderVocab, mv.WordVocab):
def __init__(self, *args, **kwargs):
""""""
kwargs['placeholder_shape'] = [None, None]
super(Seq2SeqDecoderVocab, self).__init__(*args, **kwargs)
self.cell = layers.rnn_cell.LegacyGRUCell(self.recur_size)
# self.predictor = PointerGenerator(self, input_size, switch_input_size, vocab_size, vocab_pad_idx, force_copy)
return
#=============================================================
def get_bos(self):
""""""
return 0
#=============================================================
def get_eos(self):
""""""
return 0
def forward(self, layers, decoder_embeddings, sentence_feat, token_weights, sequence_length, input_feed=None, target_copy_hidden_states=None, coverage=None,\
variable_scope=None, reuse=False, debug=False):
"""
decoder embeddings [batch_size, decoder_seq_length, embedding_size]
layers: outputs of BiLSTM [batch_size, seq_length, hidden_size]
sentence_feat: the final output state of RNN [num_encoder_layers, batch_size, hidden_size]
token_weights: mask
input_feed: None or [batch_size, 1, hidden_size]
target_copy_hidden_states: None or [batch_size, seq_length, hidden_size]
coverage: None or [batch_size, 1, encode_seq_length]
"""
with tf.variable_scope('Seq2SeqDecoder'):
with tf.variable_scope('linear'):
sentence_feat = classifiers.hidden(sentence_feat, self.recur_size,hidden_func=self.hidden_func,hidden_keep_prob=self.hidden_keep_prob)
with tf.variable_scope('memory_linear'):
layers = classifiers.hidden(layers, self.recur_size,hidden_func=self.hidden_func,hidden_keep_prob=self.hidden_keep_prob)
with tf.variable_scope('embedding_linear'):
decoder_embeddings = classifiers.hidden(decoder_embeddings, self.recur_size,hidden_func=self.hidden_func,hidden_keep_prob=self.hidden_keep_prob)
result = seq2seq_decoder(self.cell,decoder_embeddings,layers,sequence_length,sentence_feat)
return result
class Seq2SeqAnchorPredictionVocab(RNNDecoderVocab, mv.AnchorVocab):
pass
class Seq2SeqGraphTokenVocab(GraphTokenVocab, mv.SemrelVocab):
def count(self, mrp):
""""""
# pdb.set_trace()
mrp_file=json.load(open(mrp))
for sentence_id in mrp_file:
for current_data in mrp_file[sentence_id]['nodes']:
token = current_data[self.field]
self._count(token)
self.index_by_counts()
return True
def _count(self, node):
if node not in ('_', ''):
node = node.split('|')
for edge in node:
edge = edge.split(':', 1)
head, rel = edge
self.counts[rel] += 1
return
#=============================================================
def get_bos(self):
""""""
return '_'
#=============================================================
def get_eos(self):
""""""
return '_'
#=============================================================
# def add(self, token):
# """"""
# indices=self.index(token)
# indices=[(index[0]+1,index[1]) for index in indices]
# return indices
class Seq2SeqGraphIndexVocab(GraphIndexVocab, mv.SemheadVocab):
def count(self, mrp):
""""""
# pdb.set_trace()
mrp_file=json.load(open(mrp))
for sentence_id in mrp_file:
for current_data in mrp_file[sentence_id]['nodes']:
token = current_data[self.field]
self._count(token)
self.index_by_counts()
return True
def _count(self, node):
if node not in ('_', ''):
node = node.split('|')
for edge in node:
edge = edge.split(':', 1)
head, rel = edge
self.counts[rel] += 1
return
# def add(self, token):
# """"""
# indices=self.index(token)
# indices=[index+1 for index in indices]
# return indices
#=============================================================
def get_bos(self):
""""""
return '_'
#=============================================================
def get_eos(self):
""""""
return '_'
class Seq2SeqSecondOrderGraphIndexVocab(GraphSecondIndexVocab, mv.SemheadVocab):
def count(self, mrp):
""""""
# pdb.set_trace()
mrp_file=json.load(open(mrp))
for sentence_id in mrp_file:
for current_data in mrp_file[sentence_id]['nodes']:
token = current_data[self.field]
self._count(token)
self.index_by_counts()
return True
def _count(self, node):
if node not in ('_', ''):
node = node.split('|')
for edge in node:
edge = edge.split(':', 1)
head, rel = edge
self.counts[rel] += 1
return
#=============================================================
def get_bos(self):
""""""
return '_'
#=============================================================
def get_eos(self):
""""""
return '_'
# def add(self, token):
# """"""
# indices=self.index(token)
# indices=[index+1 for index in indices]
# return indices | [
"tensorflow.shape",
"tensorflow.variable_scope",
"parser.neural.classifiers.hidden",
"thumt.models.rnnsearch._decoder",
"thumt.layers.rnn_cell.LegacyGRUCell",
"sys.path.append",
"tensorflow.zeros"
] | [((721, 747), 'sys.path.append', 'sys.path.append', (['"""./THUMT"""'], {}), "('./THUMT')\n", (736, 747), False, 'import sys\n'), ((7666, 7716), 'tensorflow.zeros', 'tf.zeros', (['[batch_size, self.max_decode_length + 1]'], {}), '([batch_size, self.max_decode_length + 1])\n', (7674, 7716), True, 'import tensorflow as tf\n'), ((11933, 11979), 'thumt.layers.rnn_cell.LegacyGRUCell', 'layers.rnn_cell.LegacyGRUCell', (['self.recur_size'], {}), '(self.recur_size)\n', (11962, 11979), True, 'import thumt.layers as layers\n'), ((1998, 2033), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Seq2SeqDecoder"""'], {}), "('Seq2SeqDecoder')\n", (2015, 2033), True, 'import tensorflow as tf\n'), ((2595, 2685), 'thumt.models.rnnsearch._decoder', 'seq2seq_decoder', (['self.cell', 'decoder_embeddings', 'layers', 'sequence_length', 'sentence_feat'], {}), '(self.cell, decoder_embeddings, layers, sequence_length,\n sentence_feat)\n', (2610, 2685), True, 'from thumt.models.rnnsearch import _decoder as seq2seq_decoder\n'), ((5606, 5658), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Seq2SeqNodeLabelPredictionVocab"""'], {}), "('Seq2SeqNodeLabelPredictionVocab')\n", (5623, 5658), True, 'import tensorflow as tf\n'), ((6809, 6830), 'tensorflow.shape', 'tf.shape', (['memory_bank'], {}), '(memory_bank)\n', (6817, 6830), True, 'import tensorflow as tf\n'), ((7423, 7497), 'tensorflow.zeros', 'tf.zeros', (['[batch_size, self.max_decode_length, self.max_decode_length + 1]'], {}), '([batch_size, self.max_decode_length, self.max_decode_length + 1])\n', (7431, 7497), True, 'import tensorflow as tf\n'), ((13009, 13044), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Seq2SeqDecoder"""'], {}), "('Seq2SeqDecoder')\n", (13026, 13044), True, 'import tensorflow as tf\n'), ((13606, 13696), 'thumt.models.rnnsearch._decoder', 'seq2seq_decoder', (['self.cell', 'decoder_embeddings', 'layers', 'sequence_length', 'sentence_feat'], {}), '(self.cell, decoder_embeddings, layers, sequence_length,\n sentence_feat)\n', (13621, 13696), True, 'from thumt.models.rnnsearch import _decoder as seq2seq_decoder\n'), ((2044, 2071), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""linear"""'], {}), "('linear')\n", (2061, 2071), True, 'import tensorflow as tf\n'), ((2094, 2219), 'parser.neural.classifiers.hidden', 'classifiers.hidden', (['sentence_feat', 'self.recur_size'], {'hidden_func': 'self.hidden_func', 'hidden_keep_prob': 'self.hidden_keep_prob'}), '(sentence_feat, self.recur_size, hidden_func=self.\n hidden_func, hidden_keep_prob=self.hidden_keep_prob)\n', (2112, 2219), False, 'from parser.neural import nn, nonlin, embeddings, classifiers, recurrent\n'), ((2222, 2256), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""memory_linear"""'], {}), "('memory_linear')\n", (2239, 2256), True, 'import tensorflow as tf\n'), ((2272, 2389), 'parser.neural.classifiers.hidden', 'classifiers.hidden', (['layers', 'self.recur_size'], {'hidden_func': 'self.hidden_func', 'hidden_keep_prob': 'self.hidden_keep_prob'}), '(layers, self.recur_size, hidden_func=self.hidden_func,\n hidden_keep_prob=self.hidden_keep_prob)\n', (2290, 2389), False, 'from parser.neural import nn, nonlin, embeddings, classifiers, recurrent\n'), ((2393, 2430), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""embedding_linear"""'], {}), "('embedding_linear')\n", (2410, 2430), True, 'import tensorflow as tf\n'), ((2458, 2588), 'parser.neural.classifiers.hidden', 'classifiers.hidden', (['decoder_embeddings', 'self.recur_size'], {'hidden_func': 'self.hidden_func', 'hidden_keep_prob': 'self.hidden_keep_prob'}), '(decoder_embeddings, self.recur_size, hidden_func=self.\n hidden_func, hidden_keep_prob=self.hidden_keep_prob)\n', (2476, 2588), False, 'from parser.neural import nn, nonlin, embeddings, classifiers, recurrent\n'), ((13055, 13082), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""linear"""'], {}), "('linear')\n", (13072, 13082), True, 'import tensorflow as tf\n'), ((13105, 13230), 'parser.neural.classifiers.hidden', 'classifiers.hidden', (['sentence_feat', 'self.recur_size'], {'hidden_func': 'self.hidden_func', 'hidden_keep_prob': 'self.hidden_keep_prob'}), '(sentence_feat, self.recur_size, hidden_func=self.\n hidden_func, hidden_keep_prob=self.hidden_keep_prob)\n', (13123, 13230), False, 'from parser.neural import nn, nonlin, embeddings, classifiers, recurrent\n'), ((13233, 13267), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""memory_linear"""'], {}), "('memory_linear')\n", (13250, 13267), True, 'import tensorflow as tf\n'), ((13283, 13400), 'parser.neural.classifiers.hidden', 'classifiers.hidden', (['layers', 'self.recur_size'], {'hidden_func': 'self.hidden_func', 'hidden_keep_prob': 'self.hidden_keep_prob'}), '(layers, self.recur_size, hidden_func=self.hidden_func,\n hidden_keep_prob=self.hidden_keep_prob)\n', (13301, 13400), False, 'from parser.neural import nn, nonlin, embeddings, classifiers, recurrent\n'), ((13404, 13441), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""embedding_linear"""'], {}), "('embedding_linear')\n", (13421, 13441), True, 'import tensorflow as tf\n'), ((13469, 13599), 'parser.neural.classifiers.hidden', 'classifiers.hidden', (['decoder_embeddings', 'self.recur_size'], {'hidden_func': 'self.hidden_func', 'hidden_keep_prob': 'self.hidden_keep_prob'}), '(decoder_embeddings, self.recur_size, hidden_func=self.\n hidden_func, hidden_keep_prob=self.hidden_keep_prob)\n', (13487, 13599), False, 'from parser.neural import nn, nonlin, embeddings, classifiers, recurrent\n')] |
# 1)
import os
from random import randint
print("1)")
def create_domains_list():
with open("domains.txt", 'r') as file:
data = []
my_list = []
for line in file.readlines():
data.append(line.strip())
for element in data:
new_element = element.replace(".", "")
my_list.append(new_element)
return my_list
domains_list = create_domains_list() # переменная равная 1й функции
print(domains_list)
#######################################################
# 2)
print("2)")
def create_surnames_list():
with open("names.txt", 'r') as file:
data_1 = []
for line in file.readlines():
data_1.append(line.split()[1])
return data_1
surnames_list = create_surnames_list() # переменная равная 2й функции
print(surnames_list)
#######################################################
# 3)
print("3)")
def create_email():
my_symbol = ""
rand_surname = randint(0, len(surnames_list) - 1) # surnames_list это переменная равная вызову 2й функции
rand_number = randint(100, 999)
for symbol in range(randint(5, 7)):
rand_symbol = chr(randint(97, 122))
my_symbol += rand_symbol
rand_domain = randint(0, len(domains_list) - 1) # domains_list это переменная равная вызову 1й функции
my_email = f"{surnames_list[rand_surname]}.{rand_number}@{my_symbol}.{domains_list[rand_domain]}"
# try и except были так как выпадала иногда ошибка.
# Я ее нашел (в строке 33 и 38 надо было поставить -1 после surnames_list и domains_list). Теперь все работает
return my_email
result = create_email()
print(result) | [
"random.randint"
] | [((1065, 1082), 'random.randint', 'randint', (['(100)', '(999)'], {}), '(100, 999)\n', (1072, 1082), False, 'from random import randint\n'), ((1107, 1120), 'random.randint', 'randint', (['(5)', '(7)'], {}), '(5, 7)\n', (1114, 1120), False, 'from random import randint\n'), ((1149, 1165), 'random.randint', 'randint', (['(97)', '(122)'], {}), '(97, 122)\n', (1156, 1165), False, 'from random import randint\n')] |
from helpers.np import mod_to_num, pp
from helpers.db import get_last_beatmap
from helpers.config import config
import pyosu
api = pyosu.OsuApi(config["osuapikey"])
async def mods(ctx, args):
try:
modlist = args[1]
except:
modlist = 0
map = await get_last_beatmap(ctx.username)
map = map[0][0]
if not map:
return "No recent map!"
mode = await api.get_beatmap(beatmap_id=map)
return await pp(map, mod_to_num(modlist), mode.mode)
aliases = ["with"] | [
"helpers.np.mod_to_num",
"pyosu.OsuApi",
"helpers.db.get_last_beatmap"
] | [((132, 165), 'pyosu.OsuApi', 'pyosu.OsuApi', (["config['osuapikey']"], {}), "(config['osuapikey'])\n", (144, 165), False, 'import pyosu\n'), ((282, 312), 'helpers.db.get_last_beatmap', 'get_last_beatmap', (['ctx.username'], {}), '(ctx.username)\n', (298, 312), False, 'from helpers.db import get_last_beatmap\n'), ((475, 494), 'helpers.np.mod_to_num', 'mod_to_num', (['modlist'], {}), '(modlist)\n', (485, 494), False, 'from helpers.np import mod_to_num, pp\n')] |
from __future__ import print_function, absolute_import
import os.path as osp
import numpy as np
from ..utils.data import Dataset
from ..utils.osutils import mkdir_if_missing
from ..utils.serialization import write_json, read_json
from ..utils.data.dataset import _pluck
class SynergyReID(Dataset):
md5 = '05050b5d9388563021315a81b531db7d'
def __init__(self, root, split_id=0, num_val=100, download=True):
super(SynergyReID, self).__init__(root, split_id=split_id)
if download:
self.download()
if not self._check_integrity():
raise RuntimeError("Dataset not found or corrupted. " +
"You can use download=True to download it.")
self.load(num_val)
def download(self):
if self._check_integrity():
print("Files already downloaded and verified")
return
import hashlib
import shutil
from glob import glob
from zipfile import ZipFile
raw_dir = osp.join(self.root, 'raw')
mkdir_if_missing(raw_dir)
# Open the raw zip file
fpath = osp.join(raw_dir, 'synergyreid_data.zip')
if osp.isfile(fpath) and \
hashlib.md5(open(fpath, 'rb').read()).hexdigest() == self.md5:
print("Using downloaded file: " + fpath)
else:
raise RuntimeError("Please move data to {} "
.format(fpath))
# Extract the file
exdir = osp.join(raw_dir, 'data_reid')
if not osp.isdir(exdir):
print("Extracting zip file")
with ZipFile(fpath) as z:
z.extractall(path=raw_dir)
# Format
images_dir = osp.join(self.root, 'images')
mkdir_if_missing(images_dir)
# 487 identities (+1 for background) with 2 camera views each
# Here we use the convention that camera 0 is for query and
# camera 1 is for gallery
identities = [[[] for _ in range(2)] for _ in range(487)]
def register(subdir):
fpaths = sorted(glob(osp.join(exdir, subdir, '*.jpeg')))
pids = set()
for fpath in fpaths:
fname = osp.basename(fpath)
pid = int(fname.split('_')[0])
cam = 1 if 'gallery' in subdir else 0
pids.add(pid)
fname = ('{:08d}_{:02d}_{:04d}.jpg'
.format(pid, cam, len(identities[pid][cam])))
identities[pid][cam].append(fname)
shutil.copy(fpath, osp.join(images_dir, fname))
return pids
trainval_pids = register('reid_training')
query_val_pids = register('reid_val/query')
gallery_val_pids = register('reid_val/gallery')
assert query_val_pids <= gallery_val_pids
assert trainval_pids.isdisjoint(query_val_pids)
identities_test = [[[] for _ in range(2)] for _ in range(9172)]
def register_test(subdir, n=0):
fpaths = sorted(glob(osp.join(exdir, subdir, '*.jpeg')))
pids = set()
for pindx, fpath in enumerate(fpaths):
fname = osp.basename(fpath)
pid = int(fname.split('.')[0])
cam = 1 if 'gallery' in subdir else 0
pids.add(pid)
fname = ('{:08d}_{:02d}_{:04d}.jpg'
.format(pid, cam, 0))
identities_test[pindx+n][cam].append(fname)
shutil.copy(fpath, osp.join(images_dir, fname))
return pids
query_test_pids = register_test('reid_test/query')
gallery_test_pids = register_test('reid_test/gallery',
n=len(query_test_pids))
# Save the training / val / test splits
splits = [{
'trainval': sorted(list(trainval_pids)),
'query_val': sorted(list(query_val_pids)),
'gallery_val': sorted(list(gallery_val_pids)),
'query_test': sorted(list(query_test_pids)),
'gallery_test': sorted(list(gallery_test_pids))}]
write_json(splits, osp.join(self.root, 'splits.json'))
# Save meta information into a json file
meta = {'name': 'SynergyReID', 'shot': 'multiple', 'num_cameras': 2,
'identities': identities, 'identities_test': identities_test}
write_json(meta, osp.join(self.root, 'meta.json'))
def load(self, verbose=True):
splits = read_json(osp.join(self.root, 'splits.json'))
if self.split_id >= len(splits):
raise ValueError("split_id exceeds total splits {}"
.format(len(splits)))
self.split = splits[self.split_id]
trainval_pids = np.concatenate((np.asarray(self.split['trainval']),
np.asarray(self.split['query_val'])))
def _pluck_val(identities, indices, relabel=False, cam=0):
ret = []
for index, pid in enumerate(indices):
pid_images = identities[pid]
for camid, cam_images in enumerate(pid_images):
if camid == cam:
for fname in cam_images:
name = osp.splitext(fname)[0]
x, y, _ = map(int, name.split('_'))
assert pid == x and camid == y
if relabel:
ret.append((fname, index, camid))
else:
ret.append((fname, pid, camid))
return ret
def _pluck_test(identities, indices, n=0):
ret = []
for index, pid in enumerate(indices):
pid_images = identities[index+n]
for camid, cam_images in enumerate(pid_images):
for fname in cam_images:
ret.append((fname, pid, camid))
return ret
self.meta = read_json(osp.join(self.root, 'meta.json'))
identities = self.meta['identities']
identities_test = self.meta['identities_test']
self.train = _pluck(identities, self.split['trainval'], relabel=True)
self.trainval = _pluck(identities, trainval_pids, relabel=True)
self.query_val = _pluck_val(identities, self.split['query_val'], cam=0)
self.gallery_val = _pluck_val(identities, self.split['gallery_val'], cam=1)
self.query_test = _pluck_test(identities_test, self.split['query_test'])
self.gallery_test = _pluck_test(identities_test, self.split['gallery_test'], n=len(self.split['query_test']))
self.num_train_ids = len(self.split['trainval'])
self.num_val_ids = len(self.split['query_val'])
self.num_trainval_ids = len(trainval_pids)
if verbose:
print(self.__class__.__name__, "dataset loaded")
print(" subset | # ids | # images")
print(" ---------------------------")
print(" train | {:5d} | {:8d}"
.format(self.num_train_ids, len(self.train)))
print(" query val | {:5d} | {:8d}"
.format(len(self.split['query_val']), len(self.query_val)))
print(" gallery val | {:5d} | {:8d}"
.format(len(self.split['gallery_val']), len(self.gallery_val)))
print(" trainval | {:5d} | {:8d}"
.format(self.num_trainval_ids, len(self.trainval)))
print(" ---------------------------")
print(" query test | {:5d} | {:8d}"
.format(len(self.split['query_test']), len(self.query_test)))
print(" gallery test | {:5d} | {:8d}"
.format(len(self.split['gallery_test']), len(self.gallery_test)))
| [
"zipfile.ZipFile",
"os.path.join",
"numpy.asarray",
"os.path.splitext",
"os.path.isfile",
"os.path.isdir",
"os.path.basename"
] | [((1017, 1043), 'os.path.join', 'osp.join', (['self.root', '"""raw"""'], {}), "(self.root, 'raw')\n", (1025, 1043), True, 'import os.path as osp\n'), ((1127, 1168), 'os.path.join', 'osp.join', (['raw_dir', '"""synergyreid_data.zip"""'], {}), "(raw_dir, 'synergyreid_data.zip')\n", (1135, 1168), True, 'import os.path as osp\n'), ((1493, 1523), 'os.path.join', 'osp.join', (['raw_dir', '"""data_reid"""'], {}), "(raw_dir, 'data_reid')\n", (1501, 1523), True, 'import os.path as osp\n'), ((1718, 1747), 'os.path.join', 'osp.join', (['self.root', '"""images"""'], {}), "(self.root, 'images')\n", (1726, 1747), True, 'import os.path as osp\n'), ((1180, 1197), 'os.path.isfile', 'osp.isfile', (['fpath'], {}), '(fpath)\n', (1190, 1197), True, 'import os.path as osp\n'), ((1539, 1555), 'os.path.isdir', 'osp.isdir', (['exdir'], {}), '(exdir)\n', (1548, 1555), True, 'import os.path as osp\n'), ((4136, 4170), 'os.path.join', 'osp.join', (['self.root', '"""splits.json"""'], {}), "(self.root, 'splits.json')\n", (4144, 4170), True, 'import os.path as osp\n'), ((4402, 4434), 'os.path.join', 'osp.join', (['self.root', '"""meta.json"""'], {}), "(self.root, 'meta.json')\n", (4410, 4434), True, 'import os.path as osp\n'), ((4498, 4532), 'os.path.join', 'osp.join', (['self.root', '"""splits.json"""'], {}), "(self.root, 'splits.json')\n", (4506, 4532), True, 'import os.path as osp\n'), ((6020, 6052), 'os.path.join', 'osp.join', (['self.root', '"""meta.json"""'], {}), "(self.root, 'meta.json')\n", (6028, 6052), True, 'import os.path as osp\n'), ((1615, 1629), 'zipfile.ZipFile', 'ZipFile', (['fpath'], {}), '(fpath)\n', (1622, 1629), False, 'from zipfile import ZipFile\n'), ((2206, 2225), 'os.path.basename', 'osp.basename', (['fpath'], {}), '(fpath)\n', (2218, 2225), True, 'import os.path as osp\n'), ((3168, 3187), 'os.path.basename', 'osp.basename', (['fpath'], {}), '(fpath)\n', (3180, 3187), True, 'import os.path as osp\n'), ((4774, 4808), 'numpy.asarray', 'np.asarray', (["self.split['trainval']"], {}), "(self.split['trainval'])\n", (4784, 4808), True, 'import numpy as np\n'), ((4849, 4884), 'numpy.asarray', 'np.asarray', (["self.split['query_val']"], {}), "(self.split['query_val'])\n", (4859, 4884), True, 'import numpy as np\n'), ((2088, 2121), 'os.path.join', 'osp.join', (['exdir', 'subdir', '"""*.jpeg"""'], {}), "(exdir, subdir, '*.jpeg')\n", (2096, 2121), True, 'import os.path as osp\n'), ((2566, 2593), 'os.path.join', 'osp.join', (['images_dir', 'fname'], {}), '(images_dir, fname)\n', (2574, 2593), True, 'import os.path as osp\n'), ((3032, 3065), 'os.path.join', 'osp.join', (['exdir', 'subdir', '"""*.jpeg"""'], {}), "(exdir, subdir, '*.jpeg')\n", (3040, 3065), True, 'import os.path as osp\n'), ((3513, 3540), 'os.path.join', 'osp.join', (['images_dir', 'fname'], {}), '(images_dir, fname)\n', (3521, 3540), True, 'import os.path as osp\n'), ((5256, 5275), 'os.path.splitext', 'osp.splitext', (['fname'], {}), '(fname)\n', (5268, 5275), True, 'import os.path as osp\n')] |
import colored
from time import strftime, localtime
def colorize(text, color = 'green'):
return colored.stylize(text, colored.fg(color))
def time_colored(color = "gold_1", reverse = True):
return colored.stylize(
text = strftime('[%Y-%m-%d %H:%M:%S]', localtime()),
styles = [colored.fg(color), colored.attr('reverse')] if reverse else colored.fg(color)
)
| [
"time.localtime",
"colored.fg",
"colored.attr"
] | [((124, 141), 'colored.fg', 'colored.fg', (['color'], {}), '(color)\n', (134, 141), False, 'import colored\n'), ((272, 283), 'time.localtime', 'localtime', ([], {}), '()\n', (281, 283), False, 'from time import strftime, localtime\n'), ((364, 381), 'colored.fg', 'colored.fg', (['color'], {}), '(color)\n', (374, 381), False, 'import colored\n'), ((304, 321), 'colored.fg', 'colored.fg', (['color'], {}), '(color)\n', (314, 321), False, 'import colored\n'), ((323, 346), 'colored.attr', 'colored.attr', (['"""reverse"""'], {}), "('reverse')\n", (335, 346), False, 'import colored\n')] |
#!/usr/bin/python3
""" Test Bed for Diyhas System Status class """
import time
import datetime
from threading import Thread
import socket
from Adafruit_Python_LED_Backpack.Adafruit_LED_Backpack import SevenSegment
TIME_MODE = 0
WHO_MODE = 1
COUNT_MODE = 2
MAXIMUM_COUNT = 9999
class TimeDisplay:
""" display time """
def __init__(self, display):
""" initialize special feature and display format """
self.seven_segment = display
self.colon = False
self.alarm = False
self.time_format = "%l%M"
def set_format(self, hour_format):
""" set the time display in 12 or 24 hour format """
self.time_format = hour_format
def set_alarm(self, alarm):
""" set alarm indictor pixel """
self.alarm = alarm
def display(self,):
""" display time of day in 12 or 24 hour format """
digit_string = time.strftime(self.time_format)
self.seven_segment.clear()
self.seven_segment.print_number_str(digit_string)
self.seven_segment.set_colon(self.colon)
if self.colon:
self.colon = False
else:
self.colon = True
self.seven_segment.set_decimal(3, self.alarm)
now = datetime.datetime.now()
if now.hour > 11:
self.seven_segment.set_decimal(1, True)
else:
self.seven_segment.set_decimal(1, False)
self.seven_segment.write_display()
class WhoDisplay:
""" display IP address in who mode """
def __init__(self, display):
""" prepare to show ip address on who message """
self.seven_segment = display
self.iterations = 0
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.connect(("8.8.8.8", 80))
host_ip = sock.getsockname()[0]
sock.close()
self.ip_address = host_ip.split(".")
def display(self,):
""" display 3 digits of ip address """
self.seven_segment.clear()
self.seven_segment.set_brightness(15)
self.seven_segment.print_number_str(self.ip_address[self.iterations])
self.iterations += 1
if self.iterations >= 4:
self.iterations = 0
self.seven_segment.write_display()
class CountdownDisplay:
""" display countdown in countdown mode """
def __init__(self, display):
""" prepare to show counting down """
self.seven_segment = display
self.iterations = MAXIMUM_COUNT
self.max_count = MAXIMUM_COUNT
def display(self,):
""" display 3 digits of ip address """
self.iterations -= 1
digits = '{0:d}'.format(self.iterations)
self.seven_segment.print_number_str(digits)
if self.iterations == 0:
self.iterations = self.max_count
self.seven_segment.write_display()
def set_maximum(self, new_maximum):
""" set a new count down maximum less than 1000 """
if new_maximum <= MAXIMUM_COUNT:
self.iterations = new_maximum
self.max_count = new_maximum
class LedClock:
""" LED seven segment display object """
def __init__(self, lock):
"""Create display instance on default I2C address (0x70) and bus number"""
self.bus_lock = lock
self.display = SevenSegment.SevenSegment(address=0x71)
# Initialize the display. Must be called once before using the display.
self.display.begin()
self.brightness = 15
self.display.set_brightness(self.brightness)
self.display.set_blink(0)
self.mode = TIME_MODE
self.clock = TimeDisplay(self.display)
self.who = WhoDisplay(self.display)
self.count = CountdownDisplay(self.display)
self.tu_thread = Thread(target=self.time_update_thread)
self.tu_thread.daemon = True
def time_update_thread(self,):
""" print "started timeUpdateThread """
while True:
time.sleep(1.0)
self.bus_lock.acquire(True)
if self.mode == TIME_MODE:
self.clock.display()
elif self.mode == COUNT_MODE:
self.count.display()
else:
self.who.display()
self.bus_lock.release()
def set_mode(self, mode):
""" set alarm indicator """
self.mode = mode
def set_hour_format(self, hour_format=True):
""" set 12 or 24 hour clock format """
if hour_format:
self.clock.set_format("%I%M")
else:
self.clock.set_format("%l%M")
def set_alarm(self, alarm):
""" set alarm indicator """
self.clock.alarm = alarm
def set_brightness(self, val):
""" set brightness in range from 1 to 15 """
# print("set brightness="+str(val))
self.brightness = val
self.bus_lock.acquire(True)
self.display.set_brightness(self.brightness)
self.bus_lock.release()
def increase_brightness(self,):
""" increase brightness by 1 """
self.brightness = self.brightness + 1
if self.brightness > 15:
self.brightness = 15
self.bus_lock.acquire(True)
self.display.set_brightness(self.brightness)
self.bus_lock.release()
def decrease_brightness(self,):
""" decrease brightness by 1 """
self.brightness = self.brightness - 1
if self.brightness < 0:
self.brightness = 0
self.bus_lock.acquire(True)
self.display.set_brightness(self.brightness)
self.bus_lock.release()
def run(self,):
""" start the clock thread """
self.tu_thread.start()
if __name__ == '__main__':
exit()
| [
"socket.socket",
"time.strftime",
"Adafruit_Python_LED_Backpack.Adafruit_LED_Backpack.SevenSegment.SevenSegment",
"time.sleep",
"datetime.datetime.now",
"threading.Thread"
] | [((896, 927), 'time.strftime', 'time.strftime', (['self.time_format'], {}), '(self.time_format)\n', (909, 927), False, 'import time\n'), ((1236, 1259), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1257, 1259), False, 'import datetime\n'), ((1682, 1730), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (1695, 1730), False, 'import socket\n'), ((3288, 3326), 'Adafruit_Python_LED_Backpack.Adafruit_LED_Backpack.SevenSegment.SevenSegment', 'SevenSegment.SevenSegment', ([], {'address': '(113)'}), '(address=113)\n', (3313, 3326), False, 'from Adafruit_Python_LED_Backpack.Adafruit_LED_Backpack import SevenSegment\n'), ((3751, 3789), 'threading.Thread', 'Thread', ([], {'target': 'self.time_update_thread'}), '(target=self.time_update_thread)\n', (3757, 3789), False, 'from threading import Thread\n'), ((3943, 3958), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (3953, 3958), False, 'import time\n')] |
#!/usr/bin/env python3
import os
import signal
import time
import subprocess
import sys
from amp.player import PlayerBackend
_thisPlayer = None
def _skip(signum, frame):
self = _thisPlayer
self.stopped = 0
self.tell("stop\n")
print("Skipping song...")
def _stop(signum, frame):
self = _thisPlayer
self.stopped = 1
self.tell("stop\n")
self.db.DeletePlayer(self.player_id)
def _pause(signum, frame):
self = _thisPlayer
self.tell("pause\n")
def _volume(signum, frame):
self = _thisPlayer
self.updatePlayer()
self.tell("volume %d 1\n" % self.player["volume"])
self.tell("get_volume\n")
class PlayerImpl(PlayerBackend):
def start(self):
"""
To start a player instance for MPlayer, we fork to the background,
set up the local id of the player to be our PID, and then start
playing music through mplayer. We also maintain a pipe to our
mplayer instance so we can safely quit, adjust volume, etc.
"""
if self.player:
print("There seems to already be a player (with pid %d). Please try 'stop' before running 'start', or use 'zap' if that doesn't help things." %
int(self.player["local_id"]))
return
pid = os.fork()
if pid == 0:
os.setsid()
self.start_player()
sys.exit(0)
print("Started backgrounded player instance with PID#%d" % pid)
return
def zap(self, player):
print("Zap! Deleting %s" % player)
self.db.DeletePlayer(player)
def start_player(self):
self.pid = os.getpid()
print("> I am the backgrounded process. My PID is %d" % self.pid)
print("> My player_id is %s" % self.player_id)
self.db.CreatePlayer(self.player_id, self.pid, 80)
self.updatePlayer()
while True:
self.loop()
def loop(self):
global _thisPlayer
_thisPlayer = self
self.stopped = 0
# XXX: Need to actually get the next song CORRECTLY!
song = self.db.NextSong(self.player_id)
start_time = time.time()
self.db.UpdatePlayer(self.player_id, {
"song_id": song["song_id"],
"song_start": start_time
})
self.song_id = song["song_id"]
print("Playing %s from PID... %d" % (song['path'], os.getpid()))
self.mplayer = subprocess.Popen(
['mplayer', '-vc', 'null', '-vo', 'null', '-novideo', '-slave', '-really-quiet', '-input',
'nodefault-bindings', '-noconfig', 'all', '-volume', str(self.player['volume']), song['path']], stdin=subprocess.PIPE, stdout=None)
signal.signal(signal.SIGHUP, _skip)
signal.signal(signal.SIGINT, _stop)
signal.signal(signal.SIGUSR1, _volume)
signal.signal(signal.SIGUSR2, _pause)
print("Waiting for player to finish...")
self.mplayer.wait()
print("Player has finished.")
if self.stopped:
self.db.DeletePlayer(self.player_id)
sys.exit(0)
if 'who' in song:
for i in song['who']:
self.db.SetPlayed(self.song_id, self.player_id, i, start_time)
self.db.DeleteVotes(self.song_id, self.player_id)
def tell(self, string):
self.mplayer.stdin.write(string.encode("utf-8"))
self.mplayer.stdin.flush()
def skip(self):
self.send_signal(signal.SIGHUP)
def stop(self):
self.send_signal(signal.SIGINT)
def pause(self):
self.send_signal(signal.SIGUSR2)
def volume(self, val):
print("Setting volume to %d" % int(val))
if int(val) < 0:
val = 0
elif int(val) > 100:
val = 100
self.db.UpdatePlayer(self.player_id, {'volume': int(val)})
self.send_signal(signal.SIGUSR1)
def send_signal(self, sig):
if not self.player:
return
pid = int(self.player["local_id"])
if not pid:
return
print("Sending signal %d to %d" % (sig, pid))
os.kill(pid, sig)
| [
"signal.signal",
"os.kill",
"os.getpid",
"sys.exit",
"os.fork",
"time.time",
"os.setsid"
] | [((1307, 1316), 'os.fork', 'os.fork', ([], {}), '()\n', (1314, 1316), False, 'import os\n'), ((1661, 1672), 'os.getpid', 'os.getpid', ([], {}), '()\n', (1670, 1672), False, 'import os\n'), ((2164, 2175), 'time.time', 'time.time', ([], {}), '()\n', (2173, 2175), False, 'import time\n'), ((2746, 2781), 'signal.signal', 'signal.signal', (['signal.SIGHUP', '_skip'], {}), '(signal.SIGHUP, _skip)\n', (2759, 2781), False, 'import signal\n'), ((2790, 2825), 'signal.signal', 'signal.signal', (['signal.SIGINT', '_stop'], {}), '(signal.SIGINT, _stop)\n', (2803, 2825), False, 'import signal\n'), ((2834, 2872), 'signal.signal', 'signal.signal', (['signal.SIGUSR1', '_volume'], {}), '(signal.SIGUSR1, _volume)\n', (2847, 2872), False, 'import signal\n'), ((2881, 2918), 'signal.signal', 'signal.signal', (['signal.SIGUSR2', '_pause'], {}), '(signal.SIGUSR2, _pause)\n', (2894, 2918), False, 'import signal\n'), ((4140, 4157), 'os.kill', 'os.kill', (['pid', 'sig'], {}), '(pid, sig)\n', (4147, 4157), False, 'import os\n'), ((1350, 1361), 'os.setsid', 'os.setsid', ([], {}), '()\n', (1359, 1361), False, 'import os\n'), ((1406, 1417), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1414, 1417), False, 'import sys\n'), ((3120, 3131), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3128, 3131), False, 'import sys\n'), ((2409, 2420), 'os.getpid', 'os.getpid', ([], {}), '()\n', (2418, 2420), False, 'import os\n')] |
import threading
import queue
from loguru import logger
from typing import Callable, Any, Iterator, Iterable
class SimpleThreadsRunner:
"""
A simple ThreadsRunner. This runs multiple threads to do the I/O;
Performance is at least as good as Queue producer/consumer, which works in an analogous fashion.
Empty the queue after use.
"""
SENTINEL = object()
def __init__(self):
self._queue = queue.Queue()
self._lock = threading.RLock()
self._threads = []
def prepare_threads(self, num_workers: int, fn: Callable[..., Any]) -> None:
"""
Threads are created only function is called, and terminate before it returns.
They are there primarily to parallelize I/O
(i.e.fetching web pages, download picture, scroll elasticsearch).
"""
for i in range(num_workers):
t = threading.Thread(target=self.fetch, args=(fn,), name=f"child_thread_{i}")
t.setDaemon(True)
t.start()
self._threads.append(t)
def wait_threads(self):
"""
Tell all the threads to terminate (by sending a sentinel value) and
wait for them to do so.
"""
# Note that you need two loops, since you can't say which
# thread will get each sentinel
for _ in self._threads:
self._queue.put(self.SENTINEL) # sentinel
for t in self._threads:
t.join()
self._threads = []
def fetch(self, fn: Callable[..., Any]) -> None:
"""
Get a Data to fetch from the work _queue.
This is a handy method to run in a thread.
"""
while True:
try:
_data: Iterable = self._queue.get_nowait()
i = self._queue.qsize()
except Exception as e:
logger.error(e)
break
logger.info('Current Thread Name Running %s ...' % threading.currentThread().name)
try:
if _data is self.SENTINEL:
return
fn(_data)
except Exception as e:
raise f'function: {fn.__name__} execution: {e}'
self._queue.task_done()
logger.info(f"Tasks left:{i}")
def q_producer(self, _data):
self._queue.put(_data)
def get_qsize(self) -> int:
"""Get current size of queue, be aware this value is changed frequently
as multiple threads may produce/consume data to the queue"""
return self._queue.qsize()
def q_consumer(self, num_workers: int, fn: Callable[..., Any]):
"""
Function can be used separately with q_producer
"""
with self._lock:
try:
self.prepare_threads(num_workers, fn)
finally:
self.wait_threads()
def run_threads(self, num_workers: int, fn: Callable[..., Any], iter_data: Iterator[Any], batch_size: int = None):
"""Add batch_size params in case iter_data is huge number"""
for _ in iter_data:
self.q_producer(_)
if batch_size:
_qsize = self.get_qsize()
if _qsize >= batch_size:
self.q_consumer(num_workers, fn)
_qsize = self.get_qsize()
if _qsize != 0:
self.q_consumer(num_workers, fn)
| [
"threading.currentThread",
"loguru.logger.info",
"threading.RLock",
"loguru.logger.error",
"threading.Thread",
"queue.Queue"
] | [((427, 440), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (438, 440), False, 'import queue\n'), ((462, 479), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (477, 479), False, 'import threading\n'), ((878, 951), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.fetch', 'args': '(fn,)', 'name': 'f"""child_thread_{i}"""'}), "(target=self.fetch, args=(fn,), name=f'child_thread_{i}')\n", (894, 951), False, 'import threading\n'), ((2233, 2263), 'loguru.logger.info', 'logger.info', (['f"""Tasks left:{i}"""'], {}), "(f'Tasks left:{i}')\n", (2244, 2263), False, 'from loguru import logger\n'), ((1840, 1855), 'loguru.logger.error', 'logger.error', (['e'], {}), '(e)\n', (1852, 1855), False, 'from loguru import logger\n'), ((1941, 1966), 'threading.currentThread', 'threading.currentThread', ([], {}), '()\n', (1964, 1966), False, 'import threading\n')] |
from PIL import Image
import numpy as np
import os
def main():
img = Image(os.path.join('..', 'img', 'paras_prf_pic.jpeg'))
aray = np.array(img)
r, g, b = np.split(aray, 3, axis = 2)
r = r.reshape(-1)
g = g.reshape(-1)
b = b.reshape(-1)
bitmap = list(map(lambda x: 0.299*x[0]+0.587*x[1]+0.114*x[2], zip(r,g,b)))
bitmap = np.array(bitmap).reshape([aray.shape[0], aray.shape[1]])
bitmap = np.dot((bitmap > 128).astype(float),255)
im = Image.fromarray(bitmap.astype(np.uint8))
if __init__ == "__main__":
main()
| [
"numpy.array",
"numpy.split",
"os.path.join"
] | [((140, 153), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (148, 153), True, 'import numpy as np\n'), ((168, 193), 'numpy.split', 'np.split', (['aray', '(3)'], {'axis': '(2)'}), '(aray, 3, axis=2)\n', (176, 193), True, 'import numpy as np\n'), ((80, 127), 'os.path.join', 'os.path.join', (['""".."""', '"""img"""', '"""paras_prf_pic.jpeg"""'], {}), "('..', 'img', 'paras_prf_pic.jpeg')\n", (92, 127), False, 'import os\n'), ((354, 370), 'numpy.array', 'np.array', (['bitmap'], {}), '(bitmap)\n', (362, 370), True, 'import numpy as np\n')] |
import json
from webargs import fields as webargs_fields
from webargs.flaskparser import use_kwargs
from dataactbroker import exception_handler
def test_exception_handler(test_app):
exception_handler.add_exception_handlers(test_app.application)
@test_app.application.route("/endpoint/")
@use_kwargs({'param1': webargs_fields.Int(),
'param2': webargs_fields.String(required=True),
'param3': webargs_fields.Int(required=True)
})
def handle(param1, param2):
pass
result = test_app.get('/endpoint/?param1=not-a-string¶m3=3')
assert result.status_code == 400
result = json.loads(result.data.decode('UTF-8'))
assert result['message'] == ('param1: Not a valid integer. '
'param2: Missing data for required field.')
| [
"webargs.fields.String",
"webargs.fields.Int",
"dataactbroker.exception_handler.add_exception_handlers"
] | [((190, 252), 'dataactbroker.exception_handler.add_exception_handlers', 'exception_handler.add_exception_handlers', (['test_app.application'], {}), '(test_app.application)\n', (230, 252), False, 'from dataactbroker import exception_handler\n'), ((327, 347), 'webargs.fields.Int', 'webargs_fields.Int', ([], {}), '()\n', (345, 347), True, 'from webargs import fields as webargs_fields\n'), ((376, 412), 'webargs.fields.String', 'webargs_fields.String', ([], {'required': '(True)'}), '(required=True)\n', (397, 412), True, 'from webargs import fields as webargs_fields\n'), ((441, 474), 'webargs.fields.Int', 'webargs_fields.Int', ([], {'required': '(True)'}), '(required=True)\n', (459, 474), True, 'from webargs import fields as webargs_fields\n')] |
"""
Author: [<NAME>](https://github.com/russelljjarvis)
"""
import shelve
import streamlit as st
import os
import pandas as pd
import pickle
import streamlit as st
from holoviews import opts, dim
from collections import Iterable
import networkx
#import bokeh_chart
from auxillary_methods import author_to_coauthor_network, network # ,try_again
import holoviews as hv
from auxillary_methods import (
push_frame_to_screen#,
#plotly_sized,
) # , data_shade, draw_wstate_tree
import chord2
import shelve
def streamlit_maps():
data = pd.DataFrame(
{
"awesome cities": ["Chicago", "Minneapolis", "Louisville", "Topeka"],
"latitude": [41.868171, 44.979840, 38.257972, 39.030575],
"longitude": [-87.667458, -93.272474, -85.765187, -95.702548],
}
)
st.map(data)
def user_manual_fix_missing(list_of_dicts):
st.sidebar.title("Add new or replace author location in dataframe")
name = st.sidebar.text_input("Enter Author Name")
address = st.sidebar.text_input("Insitution Address")
longitude = st.sidebar.text_input("longitude")
latitude = st.sidebar.text_input("latitude")
if st.button("Add row"):
list_of_dicts.append(
{
"name": name,
"address": address,
"longitude": longitude,
"latitude": latitude,
}
)
st.write(pd.DataFrame(get_data()))
st.sidebar.markdown(get_table_download_link_csv(df), unsafe_allow_html=True)
#def disable_logo(plot, element):
# plot.state.toolbar.logo = None
hv.extension("bokeh", logo=False)
hv.output(size=300)
#hv.plotting.bokeh.ElementPlot.finalize_hooks.append(disable_logo)
# "<NAME>",
import geopandas
import plotly.graph_objects as go
import pandas as pd
import geopandas
import streamlit as st
import numpy as np
import pickle
from netgeovis2 import (
main_plot_routine,
identify_find_missing,
remove_missing_persons_from_big_net,
)
with open("both_sets_locations.p", "rb") as f:
both_sets_locations = pickle.load(f)
sirg_author_list = [
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
]
def get_table_download_link_csv(df):
csv = df.to_csv().encode()
b64 = base64.b64encode(csv).decode()
href = f'<a href="data:file/csv;base64,{b64}" download="authors.csv" target="_blank">Download csv file</a>'
return href
def big_plot_job():
if os.path.exists("missing_person.p"):
with open("missing_person.p", "rb") as f:
temp = pickle.load(f)
[
mg,
both_sets_locations,
missing_person_name,
missing_person_location,
both_sets_locations_missing,
sirg_author_list,
] = temp
else:
(
mg,
both_sets_locations,
missing_person_name,
missing_person_location,
both_sets_locations_missing,
sirg_author_list,
) = identify_find_missing()
# except:
# both_sets_locations,missing_person_name,missing_person_location,both_sets_locations_missing = identify_find_missing()
temp = [
mg,
both_sets_locations,
missing_person_name,
missing_person_location,
both_sets_locations_missing,
sirg_author_list,
]
with open("missing_person.p", "wb") as f:
pickle.dump(temp, f)
# both_sets_locations
# both_sets_locations.keys()
node_positions = list(both_sets_locations.values())
long_lat = [np[1] for np in node_positions if np[1] is not None]
lat = [coord[0] for coord in long_lat]
long = [coord[1] for coord in long_lat]
node_location_name = [np[0] for np in node_positions if np[1] is not None]
node_person = list([k for k, v in both_sets_locations.items() if v[0] is not None])
# if False:
if os.path.exists("big_g_locations.p"):
try:
with open("big_g_locations.p", "rb") as f:
g_locations = pickle.load(f)
both_sets_locations.update(g_locations)
missing_person_name = list(
set(missing_person_name) - set(g_locations.keys())
)
except:
pass
plt_unbundled, plt_bundled, ax3,second = main_plot_routine(
both_sets_locations, missing_person_name, node_location_name
)
# main_plot_routine(both_sets_locations, missing_person_name, node_location_name)
from PIL import Image
def main():
st.markdown("""--------------""")
st.title(
"""Geo Geographic Maps for whole SIRG network are time intensive to compute."""
)
image = Image.open("bundled_graph_static.png")
st.markdown(
"""Big image try scrolling down..."""
)
st.image(
image,
caption="a cached: Bundled Geographic Network map of greater SIRG network")
# use_column_width=False,
# width=None
#)
st.markdown(
"""Recomputing graphs and making an interactive version in case data was revised. In the meantime we will populate the screen while you wait with other stuff while we re-build them..."""
)
#identify_find_missing()
with open("mega_net.p", "rb") as f:
mg = pickle.load(f)
figure_size = 100
hv.output(size=figure_size)
graph = hv.Graph.from_networkx(mg, networkx.layout.fruchterman_reingold_layout)
graph.opts(
color_index="circle",
width=450,
height=450,
show_frame=False,
xaxis=None,
yaxis=None,
tools=["hover", "tap"],
node_size=10,
cmap=["blue", "orange"],
)
label = "Coauthorship Network for whole SIRG network: "
st.markdown(
"<h3 style='text-align: left; color: black;'>" + label + "</h3>",
unsafe_allow_html=True,
)
st.write(hv.render(graph, backend="bokeh"))
# st.markdown("""Geo Geographic Maps computing now, this will take time""")
#st.markdown(
# "<h1 style='text-align: left; color: black;'>"
# + str(
# "Geographic Maps for whole sirg network computing now, this will take time"
# )
# + "</h1>",
# unsafe_allow_html=True,
#)
st.markdown("""geo plots computing...""")
if os.path.exists("missing_person.p"):
with open("missing_person.p", "rb") as f:
temp = pickle.load(f)
[
mg,
both_sets_locations,
missing_person_name,
missing_person_location,
both_sets_locations_missing,
sirg_author_list,
] = temp
# list_of_dicts = [ list(v) for k,v in both_sets_locations_missing.items()]
# df = pd.DataFrame(list_of_dicts)
# st.dataframe(df)
big_plot_job()
# st.markdown(
# """[My other science information dashboard app](https://agile-reaches-20338.herokuapp.com/)"""
# )
# """
# [Source Code:](https://github.com/russelljjarvis/CoauthorNetVis)
# """
if __name__ == "__main__":
main()
| [
"streamlit.sidebar.title",
"os.path.exists",
"streamlit.markdown",
"streamlit.sidebar.text_input",
"PIL.Image.open",
"holoviews.extension",
"netgeovis2.main_plot_routine",
"streamlit.image",
"holoviews.Graph.from_networkx",
"pickle.dump",
"streamlit.button",
"holoviews.output",
"pickle.load"... | [((1597, 1630), 'holoviews.extension', 'hv.extension', (['"""bokeh"""'], {'logo': '(False)'}), "('bokeh', logo=False)\n", (1609, 1630), True, 'import holoviews as hv\n'), ((1631, 1650), 'holoviews.output', 'hv.output', ([], {'size': '(300)'}), '(size=300)\n', (1640, 1650), True, 'import holoviews as hv\n'), ((548, 760), 'pandas.DataFrame', 'pd.DataFrame', (["{'awesome cities': ['Chicago', 'Minneapolis', 'Louisville', 'Topeka'],\n 'latitude': [41.868171, 44.97984, 38.257972, 39.030575], 'longitude': [\n -87.667458, -93.272474, -85.765187, -95.702548]}"], {}), "({'awesome cities': ['Chicago', 'Minneapolis', 'Louisville',\n 'Topeka'], 'latitude': [41.868171, 44.97984, 38.257972, 39.030575],\n 'longitude': [-87.667458, -93.272474, -85.765187, -95.702548]})\n", (560, 760), True, 'import pandas as pd\n'), ((819, 831), 'streamlit.map', 'st.map', (['data'], {}), '(data)\n', (825, 831), True, 'import streamlit as st\n'), ((882, 949), 'streamlit.sidebar.title', 'st.sidebar.title', (['"""Add new or replace author location in dataframe"""'], {}), "('Add new or replace author location in dataframe')\n", (898, 949), True, 'import streamlit as st\n'), ((961, 1003), 'streamlit.sidebar.text_input', 'st.sidebar.text_input', (['"""Enter Author Name"""'], {}), "('Enter Author Name')\n", (982, 1003), True, 'import streamlit as st\n'), ((1018, 1061), 'streamlit.sidebar.text_input', 'st.sidebar.text_input', (['"""Insitution Address"""'], {}), "('Insitution Address')\n", (1039, 1061), True, 'import streamlit as st\n'), ((1078, 1112), 'streamlit.sidebar.text_input', 'st.sidebar.text_input', (['"""longitude"""'], {}), "('longitude')\n", (1099, 1112), True, 'import streamlit as st\n'), ((1128, 1161), 'streamlit.sidebar.text_input', 'st.sidebar.text_input', (['"""latitude"""'], {}), "('latitude')\n", (1149, 1161), True, 'import streamlit as st\n'), ((1169, 1189), 'streamlit.button', 'st.button', (['"""Add row"""'], {}), "('Add row')\n", (1178, 1189), True, 'import streamlit as st\n'), ((2068, 2082), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2079, 2082), False, 'import pickle\n'), ((2487, 2521), 'os.path.exists', 'os.path.exists', (['"""missing_person.p"""'], {}), "('missing_person.p')\n", (2501, 2521), False, 'import os\n'), ((3981, 4016), 'os.path.exists', 'os.path.exists', (['"""big_g_locations.p"""'], {}), "('big_g_locations.p')\n", (3995, 4016), False, 'import os\n'), ((4383, 4462), 'netgeovis2.main_plot_routine', 'main_plot_routine', (['both_sets_locations', 'missing_person_name', 'node_location_name'], {}), '(both_sets_locations, missing_person_name, node_location_name)\n', (4400, 4462), False, 'from netgeovis2 import main_plot_routine, identify_find_missing, remove_missing_persons_from_big_net\n'), ((4606, 4635), 'streamlit.markdown', 'st.markdown', (['"""--------------"""'], {}), "('--------------')\n", (4617, 4635), True, 'import streamlit as st\n'), ((4644, 4739), 'streamlit.title', 'st.title', (['"""Geo Geographic Maps for whole SIRG network are time intensive to compute."""'], {}), "(\n 'Geo Geographic Maps for whole SIRG network are time intensive to compute.'\n )\n", (4652, 4739), True, 'import streamlit as st\n'), ((4760, 4798), 'PIL.Image.open', 'Image.open', (['"""bundled_graph_static.png"""'], {}), "('bundled_graph_static.png')\n", (4770, 4798), False, 'from PIL import Image\n'), ((4803, 4849), 'streamlit.markdown', 'st.markdown', (['"""Big image try scrolling down..."""'], {}), "('Big image try scrolling down...')\n", (4814, 4849), True, 'import streamlit as st\n'), ((4877, 4973), 'streamlit.image', 'st.image', (['image'], {'caption': '"""a cached: Bundled Geographic Network map of greater SIRG network"""'}), "(image, caption=\n 'a cached: Bundled Geographic Network map of greater SIRG network')\n", (4885, 4973), True, 'import streamlit as st\n'), ((5051, 5256), 'streamlit.markdown', 'st.markdown', (['"""Recomputing graphs and making an interactive version in case data was revised. In the meantime we will populate the screen while you wait with other stuff while we re-build them..."""'], {}), "(\n 'Recomputing graphs and making an interactive version in case data was revised. In the meantime we will populate the screen while you wait with other stuff while we re-build them...'\n )\n", (5062, 5256), True, 'import streamlit as st\n'), ((5390, 5417), 'holoviews.output', 'hv.output', ([], {'size': 'figure_size'}), '(size=figure_size)\n', (5399, 5417), True, 'import holoviews as hv\n'), ((5430, 5501), 'holoviews.Graph.from_networkx', 'hv.Graph.from_networkx', (['mg', 'networkx.layout.fruchterman_reingold_layout'], {}), '(mg, networkx.layout.fruchterman_reingold_layout)\n', (5452, 5501), True, 'import holoviews as hv\n'), ((5810, 5915), 'streamlit.markdown', 'st.markdown', (['("<h3 style=\'text-align: left; color: black;\'>" + label + \'</h3>\')'], {'unsafe_allow_html': '(True)'}), '("<h3 style=\'text-align: left; color: black;\'>" + label +\n \'</h3>\', unsafe_allow_html=True)\n', (5821, 5915), True, 'import streamlit as st\n'), ((6318, 6355), 'streamlit.markdown', 'st.markdown', (['"""geo plots computing..."""'], {}), "('geo plots computing...')\n", (6329, 6355), True, 'import streamlit as st\n'), ((6369, 6403), 'os.path.exists', 'os.path.exists', (['"""missing_person.p"""'], {}), "('missing_person.p')\n", (6383, 6403), False, 'import os\n'), ((3047, 3070), 'netgeovis2.identify_find_missing', 'identify_find_missing', ([], {}), '()\n', (3068, 3070), False, 'from netgeovis2 import main_plot_routine, identify_find_missing, remove_missing_persons_from_big_net\n'), ((5349, 5363), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (5360, 5363), False, 'import pickle\n'), ((5948, 5981), 'holoviews.render', 'hv.render', (['graph'], {'backend': '"""bokeh"""'}), "(graph, backend='bokeh')\n", (5957, 5981), True, 'import holoviews as hv\n'), ((2592, 2606), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2603, 2606), False, 'import pickle\n'), ((3496, 3516), 'pickle.dump', 'pickle.dump', (['temp', 'f'], {}), '(temp, f)\n', (3507, 3516), False, 'import pickle\n'), ((6474, 6488), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (6485, 6488), False, 'import pickle\n'), ((4116, 4130), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4127, 4130), False, 'import pickle\n')] |
"""App signals.
"""
import logging
from django.db.models.signals import post_save
from django.dispatch import receiver
from ..azure_projects.models import Project
from .models import TrainingStatus
logger = logging.getLogger(__name__)
@receiver(
signal=post_save,
sender=Project,
dispatch_uid="training_status_project_created_listener",
)
def training_status_project_created_listener(**kwargs):
"""Project create change."""
instance = kwargs["instance"]
created = kwargs["created"]
if not created:
logger.info("Project not created. Pass...")
return
logger.info("Azure Project created. Create TrainingStatus object.")
TrainingStatus.objects.update_or_create(
project_id=instance.id,
defaults={
"status": "ok",
"log": "Status : Has not configured",
"performance": "{}",
},
)
| [
"logging.getLogger",
"django.dispatch.receiver"
] | [((211, 238), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (228, 238), False, 'import logging\n'), ((242, 346), 'django.dispatch.receiver', 'receiver', ([], {'signal': 'post_save', 'sender': 'Project', 'dispatch_uid': '"""training_status_project_created_listener"""'}), "(signal=post_save, sender=Project, dispatch_uid=\n 'training_status_project_created_listener')\n", (250, 346), False, 'from django.dispatch import receiver\n')] |
import pytest
from democrasite.users.models import User
from democrasite.users.tests.factories import UserFactory
@pytest.fixture(autouse=True)
def media_storage(settings, tmpdir):
settings.MEDIA_ROOT = tmpdir.strpath
@pytest.fixture(autouse=True)
def enable_db_access_for_all_tests(db): # pylint: disable=unused-argument
pass
@pytest.fixture
def user() -> User:
return UserFactory()
| [
"pytest.fixture",
"democrasite.users.tests.factories.UserFactory"
] | [((118, 146), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (132, 146), False, 'import pytest\n'), ((228, 256), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (242, 256), False, 'import pytest\n'), ((390, 403), 'democrasite.users.tests.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (401, 403), False, 'from democrasite.users.tests.factories import UserFactory\n')] |
import sublime
import sublime_plugin
from .collections import DottedDict
from .css import load as load_css
from .css import unload as unload_css
from .handlers import LanguageHandler
from .logging import set_debug_logging, set_exception_logging
from .panels import destroy_output_panels
from .protocol import Response
from .protocol import WorkspaceFolder
from .registry import windows
from .rpc import method2attr
from .sessions import AbstractPlugin
from .sessions import register_plugin
from .sessions import Session
from .settings import client_configs
from .settings import load_settings
from .settings import unload_settings
from .settings import userprefs
from .transports import kill_all_subprocesses
from .types import ClientConfig
from .typing import Optional, List, Type, Callable, Dict, Tuple
import weakref
def _get_final_subclasses(derived: List[Type], results: List[Type]) -> None:
"""
This function should be removed: https://github.com/sublimelsp/LSP/issues/899
"""
for d in derived:
d_subclasses = d.__subclasses__()
if len(d_subclasses) > 0:
_get_final_subclasses(d_subclasses, results)
else:
results.append(d)
def _forcefully_register_plugins() -> None:
"""
This function should be removed: https://github.com/sublimelsp/LSP/issues/899
"""
plugin_classes = [] # type: List[Type[AbstractPlugin]]
_get_final_subclasses(AbstractPlugin.__subclasses__(), plugin_classes)
for plugin_class in plugin_classes:
register_plugin(plugin_class)
language_handler_classes = [] # type: List[Type[LanguageHandler]]
_get_final_subclasses(LanguageHandler.__subclasses__(), language_handler_classes)
for language_handler_class in language_handler_classes:
# Create an ephemeral plugin that stores an instance of the LanguageHandler as a class instance. Custom requests
# and notifications will work.
class LanguageHandlerTransition(AbstractPlugin):
handler = language_handler_class()
@classmethod
def name(cls) -> str:
return cls.handler.name # type: ignore
@classmethod
def configuration(cls) -> Tuple[sublime.Settings, str]:
file_base_name = cls.name()
if file_base_name.startswith("lsp-"):
file_base_name = "LSP-" + file_base_name[len("lsp-"):]
settings = sublime.load_settings("{}.sublime-settings".format(file_base_name))
cfg = cls.handler.config # type: ignore
settings.set("command", cfg.command)
settings.set("settings", cfg.settings.get(None))
if isinstance(cfg.init_options, DottedDict):
init_options = cfg.init_options.get()
elif isinstance(cfg.init_options, dict):
init_options = cfg.init_options
settings.set("initializationOptions", init_options)
langs = [] # type: List[Dict[str, str]]
for language in cfg.languages:
langs.append({
"languageId": language.id,
"document_selector": language.document_selector,
"feature_selector": language.feature_selector
})
settings.set("languages", langs)
return settings, "Packages/{0}/{0}.sublime-settings".format(file_base_name)
@classmethod
def can_start(cls, window: sublime.Window, initiating_view: sublime.View,
workspace_folders: List[WorkspaceFolder], configuration: ClientConfig) -> Optional[str]:
if hasattr(cls.handler, 'on_start'):
if not cls.handler.on_start(window): # type: ignore
return "{} cannot start".format(cls.name())
return None
def __init__(self, weaksession: 'weakref.ref[Session]') -> None:
super().__init__(weaksession)
if hasattr(self.handler, 'on_initialized'):
self.handler.on_initialized(self) # type: ignore
def on_notification(self, method: str, handler: Callable) -> None:
setattr(self, method2attr(method), handler)
def on_request(self, method: str, handler: Callable) -> None:
setattr(self, method2attr(method), handler)
def send_response(self, response: Response) -> None:
session = self.weaksession()
if session:
session.send_response(response)
register_plugin(LanguageHandlerTransition)
def plugin_loaded() -> None:
load_settings()
load_css()
set_debug_logging(userprefs().log_debug)
set_exception_logging(True)
_forcefully_register_plugins() # Remove this function: https://github.com/sublimelsp/LSP/issues/899
client_configs.update_configs()
def plugin_unloaded() -> None:
# Also needs to handle package being disabled or removed
# https://github.com/sublimelsp/LSP/issues/375
unload_css()
unload_settings()
# TODO: Move to __del__ methods
for window in sublime.windows():
destroy_output_panels(window) # references and diagnostics panels
class Listener(sublime_plugin.EventListener):
def _register_windows(self) -> None:
for w in sublime.windows():
windows.lookup(w)
def __del__(self) -> None:
for w in sublime.windows():
windows.discard(w)
def on_init(self, views: List[sublime.View]) -> None:
for view in views:
window = view.window()
if window:
windows.lookup(window)
def on_exit(self) -> None:
kill_all_subprocesses()
def on_load_project_async(self, w: sublime.Window) -> None:
windows.lookup(w).on_load_project_async()
def on_new_window_async(self, w: sublime.Window) -> None:
sublime.set_timeout(lambda: windows.lookup(w))
def on_pre_close_window(self, w: sublime.Window) -> None:
windows.discard(w)
| [
"sublime.windows"
] | [((5225, 5242), 'sublime.windows', 'sublime.windows', ([], {}), '()\n', (5240, 5242), False, 'import sublime\n'), ((5425, 5442), 'sublime.windows', 'sublime.windows', ([], {}), '()\n', (5440, 5442), False, 'import sublime\n'), ((5523, 5540), 'sublime.windows', 'sublime.windows', ([], {}), '()\n', (5538, 5540), False, 'import sublime\n')] |
from pathlib import Path
DEBUG = True
USE_TZ = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "very-secret"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": "db.sqlite3"}}
ROOT_URLCONF = "tests.urls"
DJANGO_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.messages",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.staticfiles",
]
THIRD_PARTY_APPS = [
"django_extensions",
"allauth_ui",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.github",
"allauth.socialaccount.providers.facebook",
"allauth.socialaccount.providers.linkedin",
"allauth.socialaccount.providers.digitalocean",
"widget_tweaks",
"django_browser_reload",
"debug_toolbar",
]
LOCAL_APPS = ["tests"]
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"django_browser_reload.middleware.BrowserReloadMiddleware",
"debug_toolbar.middleware.DebugToolbarMiddleware",
]
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
AUTHENTICATION_BACKENDS = [
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
]
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
INTERNAL_IPS = ["127.0.0.1"]
ALLOWED_HOSTS = ["*"]
SITE_ID = 1
STATIC_URL = "/static/"
MEDIA_URL = "/media/"
MEDIA_ROOT = Path(__file__).parent / "media"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_LOGIN_ATTEMPTS_LIMIT = 1000
| [
"pathlib.Path"
] | [((2327, 2341), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2331, 2341), False, 'from pathlib import Path\n')] |
from numba import cuda
import numba
from numba import float32
@numba.cuda.jit("void(float32[:,:], float32[:,:], float32[:,:])")
def naive_matrix_mult(A, B, C):
n = A.shape[0]
x, y = cuda.grid(2)
if x >= n or y >= n:
return
C[y, x] = 0
for i in range(n):
C[y, x] += A[y, i] * B[i, x]
@numba.cuda.jit("void(float32[:,:], float32[:,:], float32[:,:])")
def optimised_matrix_mult(A, B, C):
n = A.shape[0]
threads_per_block = 32
shared_mem_size = (threads_per_block, threads_per_block)
blocks_per_grid = int(n / threads_per_block)
# Declare shared memory
sA = cuda.shared.array(shape=shared_mem_size, dtype=float32)
sB = cuda.shared.array(shape=shared_mem_size, dtype=float32)
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
x, y = cuda.grid(2)
acc = 0
for i in range(blocks_per_grid):
if x < n and y < n:
# Prefill cache
sA[ty, tx] = A[y, tx + i * threads_per_block]
sB[ty, tx] = B[ty + i * threads_per_block, x]
# Synchronize all threads in the block
cuda.syncthreads()
if x < n and y < n:
# Compute product
for j in range(threads_per_block):
acc += sA[ty, j] * sB[j, tx]
# Wait until all threads finish the computation
cuda.syncthreads()
if x < n and y < n:
C[y, x] = acc
if __name__ == '__main__':
import pytest
pytest.main()
| [
"numba.cuda.grid",
"numba.cuda.jit",
"pytest.main",
"numba.cuda.shared.array",
"numba.cuda.syncthreads"
] | [((65, 129), 'numba.cuda.jit', 'numba.cuda.jit', (['"""void(float32[:,:], float32[:,:], float32[:,:])"""'], {}), "('void(float32[:,:], float32[:,:], float32[:,:])')\n", (79, 129), False, 'import numba\n'), ((327, 391), 'numba.cuda.jit', 'numba.cuda.jit', (['"""void(float32[:,:], float32[:,:], float32[:,:])"""'], {}), "('void(float32[:,:], float32[:,:], float32[:,:])')\n", (341, 391), False, 'import numba\n'), ((194, 206), 'numba.cuda.grid', 'cuda.grid', (['(2)'], {}), '(2)\n', (203, 206), False, 'from numba import cuda\n'), ((623, 678), 'numba.cuda.shared.array', 'cuda.shared.array', ([], {'shape': 'shared_mem_size', 'dtype': 'float32'}), '(shape=shared_mem_size, dtype=float32)\n', (640, 678), False, 'from numba import cuda\n'), ((688, 743), 'numba.cuda.shared.array', 'cuda.shared.array', ([], {'shape': 'shared_mem_size', 'dtype': 'float32'}), '(shape=shared_mem_size, dtype=float32)\n', (705, 743), False, 'from numba import cuda\n'), ((808, 820), 'numba.cuda.grid', 'cuda.grid', (['(2)'], {}), '(2)\n', (817, 820), False, 'from numba import cuda\n'), ((1451, 1464), 'pytest.main', 'pytest.main', ([], {}), '()\n', (1462, 1464), False, 'import pytest\n'), ((1099, 1117), 'numba.cuda.syncthreads', 'cuda.syncthreads', ([], {}), '()\n', (1115, 1117), False, 'from numba import cuda\n'), ((1334, 1352), 'numba.cuda.syncthreads', 'cuda.syncthreads', ([], {}), '()\n', (1350, 1352), False, 'from numba import cuda\n')] |
from sqlalchemy.ext.declarative import as_declarative
from sqlalchemy import Column, Integer
@as_declarative()
class Base:
__table_args__ = {'schema': 'public'}
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
| [
"sqlalchemy.Column",
"sqlalchemy.ext.declarative.as_declarative"
] | [((96, 112), 'sqlalchemy.ext.declarative.as_declarative', 'as_declarative', ([], {}), '()\n', (110, 112), False, 'from sqlalchemy.ext.declarative import as_declarative\n'), ((177, 242), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'index': '(True)', 'autoincrement': '(True)'}), '(Integer, primary_key=True, index=True, autoincrement=True)\n', (183, 242), False, 'from sqlalchemy import Column, Integer\n')] |
from bson import ObjectId
import mock
from tests import base
from d1_common.env import D1_ENV_DICT
def setUpModule():
base.enabledPlugins.append('wholetale')
base.startServer()
global JobStatus, Tale
from girder.plugins.jobs.constants import JobStatus
from girder.plugins.wholetale.models.tale import Tale
def tearDownModule():
base.stopServer()
class FakeJob:
job = {}
def delay(self, *args, **kwargs):
return self.job
class PublishTestCase(base.TestCase):
def setUp(self):
super(PublishTestCase, self).setUp()
users = (
{
'email': '<EMAIL>',
'login': 'admin',
'firstName': 'Root',
'lastName': '<NAME>',
'password': '<PASSWORD>',
},
{
'email': '<EMAIL>',
'login': 'joeregular',
'firstName': 'Joe',
'lastName': 'Regular',
'password': '<PASSWORD>',
},
)
self.admin, self.user = [
self.model('user').createUser(**user) for user in users
]
self.tale = self.model('tale', 'wholetale').createTale(
{'_id': ObjectId()},
data=[],
authors=self.user['firstName'] + ' ' + self.user['lastName'],
creator=self.user,
public=True,
description="blah",
title="Test",
illustration='linkToImage',
)
def testPublish(self):
with mock.patch('gwvolman.tasks.publish.apply_async'), mock.patch(
'gwvolman.tasks.publish.delay'
) as dl:
dl.return_value = FakeJob()
remoteMemberNode = 'remoteMemberURL'
authToken = '<KEY>'
resp = self.request(
path='/publish/dataone',
method='GET',
user=self.user,
params={
'taleId': str(self.tale['_id']),
'remoteMemberNode': remoteMemberNode,
'authToken': authToken,
'coordinatingNode': D1_ENV_DICT['dev']['base_url']
},
)
self.assertStatusOk(resp)
job_call = dl.call_args_list[-1][-1]
job_call.pop('girder_client_token')
self.assertDictEqual(
job_call,
(
{
'dataone_auth_token': authToken,
'dataone_node': remoteMemberNode,
'tale': str(self.tale['_id']),
'user_id': str(self.user['_id']),
'coordinating_node': D1_ENV_DICT['dev']['base_url']
}
),
)
def tearDown(self):
self.model('user').remove(self.user)
self.model('user').remove(self.admin)
super(PublishTestCase, self).tearDown()
| [
"tests.base.startServer",
"mock.patch",
"tests.base.enabledPlugins.append",
"tests.base.stopServer",
"bson.ObjectId"
] | [((124, 163), 'tests.base.enabledPlugins.append', 'base.enabledPlugins.append', (['"""wholetale"""'], {}), "('wholetale')\n", (150, 163), False, 'from tests import base\n'), ((168, 186), 'tests.base.startServer', 'base.startServer', ([], {}), '()\n', (184, 186), False, 'from tests import base\n'), ((357, 374), 'tests.base.stopServer', 'base.stopServer', ([], {}), '()\n', (372, 374), False, 'from tests import base\n'), ((1550, 1598), 'mock.patch', 'mock.patch', (['"""gwvolman.tasks.publish.apply_async"""'], {}), "('gwvolman.tasks.publish.apply_async')\n", (1560, 1598), False, 'import mock\n'), ((1600, 1642), 'mock.patch', 'mock.patch', (['"""gwvolman.tasks.publish.delay"""'], {}), "('gwvolman.tasks.publish.delay')\n", (1610, 1642), False, 'import mock\n'), ((1237, 1247), 'bson.ObjectId', 'ObjectId', ([], {}), '()\n', (1245, 1247), False, 'from bson import ObjectId\n')] |
from tyr.lexer import *
import unittest
class TestConsolidate(unittest.TestCase):
def setUp(self):
from random import Random
self.random = Random(42)
def test_consolidate_char(self):
for i in range(2*26):
char = chr(i + ord('a')) if i < 26 else chr(i - 26 + ord('A'))
with self.subTest(char=char):
self.assertEqual(Token("_CHAR", 1, char, char).consolidate(), char)
for char in ['\\n', '\\\\']:
with self.subTest(char=char):
self.assertEqual(Token("_CHAR", 2, char, char).consolidate(), char)
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"random.Random"
] | [((579, 594), 'unittest.main', 'unittest.main', ([], {}), '()\n', (592, 594), False, 'import unittest\n'), ((150, 160), 'random.Random', 'Random', (['(42)'], {}), '(42)\n', (156, 160), False, 'from random import Random\n')] |
import pickle
import pandas as pd
method_columns = ['model_class', 'config', 'loss_function', 'q_dist', 'sample_from_q',
'detach', 'add_noise', 'noise_type', 'warm_up', 'is_loaded', 'method_name']
hparam_columns = ['grad_l1_penalty', 'grad_weight_decay',
'lamb', 'loss_function_param', 'noise_std', 'lr', 'weight_decay']
data_columns = ['dataset', 'label_noise_level', 'label_noise_type', 'num_train_examples',
'remove_prob', 'transform_function', 'data_augmentation']
ignore_columns = ['device', 'batch_size', 'epochs', 'stopping_param', 'save_iter', 'vis_iter',
'clean_validation', 'pretrained_arg', 'load_from']
not_listed_columns = ['seed', 'log_dir']
method_order = {
'CE': 0,
'CE-noisy-grad-Gaussian': 0.1,
'CE-noisy-grad-Laplace': 0.2,
'MAE': 1,
'FW': 2,
'DMI': 3,
'Penalize': 3.5,
'Predict-Gaussian': 4,
'Predict-Gaussian-sample': 4.1,
'Predict-Laplace': 5,
'Predict-Laplace-sample': 5.1,
'Predict-Gaussian-loaded': 6,
'Predict-Laplace-loaded': 7
}
def load_result_tables(list_of_datasets):
""" Loads results datasets from stored .pkl files. """
datasets = []
df = None
for dataset_path in list_of_datasets:
with open(dataset_path, 'rb') as f:
df = pickle.load(f)
datasets.append(df)
df = df.drop(labels=ignore_columns, axis=1) # drop columns that do not matter
df = pd.concat(datasets, sort=False).reset_index(drop=True)
df['num_train_examples'].fillna('N/A', inplace=True)
df['transform_function'].fillna('N/A', inplace=True)
df['detach'].fillna(1.0, inplace=True)
df['load_from'].fillna('N/A', inplace=True)
df['is_loaded'] = (df.load_from != 'N/A')
df['pretrained_arg'].fillna('N/A', inplace=True)
df['lr'].fillna('1e-3', inplace=True)
if 'warm_up' in df.columns:
df['warm_up'].fillna(0, inplace=True)
else:
df['warm_up'] = 0
if 'weight_decay' is df.columns:
df['weight_decay'].fillna(0.0, inplace=True)
else:
df['weight_decay'] = 0.0
df['method_name'] = 'unknown'
return df
def infer_method_name(row):
if row.model_class == 'StandardClassifier':
if row.loss_function == 'dmi':
return 'DMI'
if row.loss_function == 'fw':
return 'FW'
if row.loss_function == 'mae':
return 'MAE'
assert row.loss_function == 'ce'
if row.add_noise == 1.0:
return 'CE-noisy-grad-{}'.format(row.noise_type)
return 'CE'
if row.model_class == 'PredictGradOutput':
ret = 'Predict'
ret += f"-{row.q_dist}"
if row.sample_from_q:
ret += '-sample'
if row.loss_function != 'ce':
ret += f"-{row.loss_function}"
if row.detach == 0.0:
ret += '-nodetach'
if row.is_loaded:
ret += '-loaded'
if row.warm_up != 0:
ret += f"-warm_up{row['warm_up']}"
return ret
if row.model_class == 'PenalizeLastLayerFixedForm':
return 'Penalize'
return 'unknown'
def fill_short_names(df):
for idx, row in df.iterrows():
df.at[idx, 'method_name'] = infer_method_name(row)
return df
def get_agg_results(df):
""" Takes a dataframe containing all results and computes aggregate results. """
grouped = df.groupby(method_columns + hparam_columns + data_columns)
total_size = 0
for key, item in grouped:
group = grouped.get_group(key)
assert len(group) <= 5 # less than 5 seeds always
assert len(set(group['seed'])) == len(group) # all seeds are distinct
if item.dataset.iloc[0] == 'mnist' and item.label_noise_type.iloc[0] == 'error':
if item.sample_from_q.iloc[0] == True:
assert len(group) == 3
elif item.model_class.iloc[0] == 'PenalizeLastLayerFixedForm':
assert len(group) == 3
else:
assert len(group) == 5
total_size += len(group)
assert total_size == len(df)
agg_results = grouped.agg({'test_accuracy': ['mean', 'std'], 'val_accuracy': ['mean', 'std']})
agg_results = agg_results.reset_index()
agg_results.columns = ['_'.join(tup).rstrip('_') for tup in agg_results.columns.values]
return agg_results
def do_model_selection_by_val_score(df):
""" Takes aggregate results and selects best model by val_accuracy_mean. """
def select(group):
idx = group['val_accuracy_mean'].idxmax()
return group.loc[idx]
grouped = df.groupby(method_columns + data_columns)
best_results = grouped.apply(select)
best_results = best_results.reset_index(drop=True)
return best_results
| [
"pickle.load",
"pandas.concat"
] | [((1322, 1336), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1333, 1336), False, 'import pickle\n'), ((1457, 1488), 'pandas.concat', 'pd.concat', (['datasets'], {'sort': '(False)'}), '(datasets, sort=False)\n', (1466, 1488), True, 'import pandas as pd\n')] |
import os
import json
from metis.CondorTask import CondorTask
from metis.Constants import Constants
import metis.Utils as Utils
import traceback
class CMSSWTask(CondorTask):
def __init__(self, **kwargs):
"""
:kwarg pset_args: extra arguments to pass to cmsRun along with pset
:kwarg is_tree_output: is the output file of the job a tree?
:kwarg other_outputs: list of other output files to copy back (in addition to output_name)
:kwarg publish_to_dis: publish the sample information to DIS upon completion
:kwarg report_every: MessageLogger reporting every N events
"""
self.pset = kwargs.get("pset", None)
self.pset_args = kwargs.get("pset_args", "print")
self.check_expectedevents = kwargs.get("check_expectedevents", True)
self.is_data = kwargs.get("is_data", False)
self.input_executable = kwargs.get("executable", self.get_metis_base() + "metis/executables/condor_cmssw_exe.sh")
self.other_outputs = kwargs.get("other_outputs", [])
self.output_is_tree = kwargs.get("is_tree_output", True)
self.dont_check_tree = kwargs.get("dont_check_tree", False)
self.dont_edit_pset = kwargs.get("dont_edit_pset", False)
self.publish_to_dis = kwargs.get("publish_to_dis", False)
self.report_every = kwargs.get("report_every", 1000)
# Pass all of the kwargs to the parent class
super(CMSSWTask, self).__init__(**kwargs)
# If we didn't get a globaltag, use the one from DBS
# NOTE: This is declared as something to backup and placed after the
# self.load() so that we don't spam get_globaltag() as it makes a
# DIS query each time. Would be smarter to remove need to back up
# and put maybe a caching decorator for the config query in the
# SamplesDBS class!
if not self.read_only:
if not self.global_tag:
self.global_tag = self.sample.get_globaltag()
def info_to_backup(self):
# Declare which variables we want to backup to avoid recalculation
return ["io_mapping", "executable_path", "pset_path",
"package_path", "prepared_inputs",
"job_submission_history", "global_tag", "queried_nevents"]
def handle_done_output(self, out):
out.set_status(Constants.DONE)
self.logger.debug("This output ({0}) exists, skipping the processing".format(out))
# If MC and file is done, calculate negative events to use later for metadata
# NOTE Can probably speed this up if it's not an NLO sample
if not self.is_data and self.output_is_tree:
self.logger.debug("Calculating negative events for this file")
try:
out.get_nevents_negative()
except Exception as e:
self.logger.info("{}\nSomething wrong with this file. Delete it by hand. {}{}".format(
"-"*50, traceback.format_exc(), "-"*50,
))
def finalize(self):
"""
Take care of task-dependent things after
jobs are completed
"""
d_metadata = self.get_legacy_metadata()
self.write_metadata(d_metadata)
if self.publish_to_dis:
self.update_dis(d_metadata)
def submit_multiple_condor_jobs(self, v_ins, v_out, fake=False, optimizer=None):
outdir = self.output_dir
outname_noext = self.output_name.rsplit(".", 1)[0]
v_inputs_commasep = [",".join(map(lambda x: x.get_name(), ins)) for ins in v_ins]
v_index = [out.get_index() for out in v_out]
pset_full = os.path.abspath(self.pset_path)
pset_basename = os.path.basename(self.pset_path)
cmssw_ver = self.cmssw_version
scramarch = self.scram_arch
max_nevents_per_job = self.kwargs.get("max_nevents_per_job", -1)
nevts = max_nevents_per_job
v_firstevt = [-1 for out in v_out]
v_expectedevents = [-1 for out in v_out]
if self.check_expectedevents:
v_expectedevents = [out.get_nevents() for out in v_out]
if max_nevents_per_job > 0:
v_expectedevents = [max_nevents_per_job for out in v_out]
if self.split_within_files:
nevts = self.events_per_output
v_firstevt = [1 + (out.get_index() - 1) * (self.events_per_output+1) for out in v_out]
v_expectedevents = [-1 for out in v_out]
v_inputs_commasep = ["dummyfile" for ins in v_ins]
pset_args = self.pset_args
executable = self.executable_path
other_outputs = ",".join(self.other_outputs) or "None"
# note that pset_args must be the last argument since it can have spaces
# check executables/condor_cmssw_exe.sh to see why
v_arguments = [[outdir, outname_noext, inputs_commasep,
index, pset_basename, cmssw_ver, scramarch,
nevts, firstevt, expectedevents, other_outputs, pset_args]
for (index,inputs_commasep,firstevt,expectedevents) in zip(v_index,v_inputs_commasep,v_firstevt,v_expectedevents)]
if optimizer:
v_sites = optimizer.get_sites(self, v_ins, v_out)
v_selection_pairs = [
[
["taskname", self.unique_name],
["jobnum", index],
["tag", self.tag],
["metis_retries", len(self.job_submission_history.get(index,[]))],
["DESIRED_Sites", sites],
]
for index,sites in zip(v_index,v_sites)
]
else:
v_selection_pairs = [
[
["taskname", self.unique_name],
["jobnum", index],
["tag", self.tag],
["metis_retries", len(self.job_submission_history.get(index,[]))],
]
for index in v_index
]
logdir_full = os.path.abspath("{0}/logs/".format(self.get_taskdir()))
package_full = os.path.abspath(self.package_path)
input_files = [package_full, pset_full] if self.tarfile else [pset_full]
input_files += self.additional_input_files
extra = self.kwargs.get("condor_submit_params", {})
if self.dont_check_tree:
extra["classads"] = extra.get("classads",[]) + [["metis_dontchecktree",1]]
return Utils.condor_submit(
executable=executable, arguments=v_arguments,
inputfiles=input_files, logdir=logdir_full,
selection_pairs=v_selection_pairs,
multiple=True,
fake=fake, **extra
)
# def submit_condor_job(self, ins, out, fake=False):
# outdir = self.output_dir
# outname_noext = self.output_name.rsplit(".", 1)[0]
# inputs_commasep = ",".join(map(lambda x: x.get_name(), ins))
# index = out.get_index()
# pset_full = os.path.abspath(self.pset_path)
# pset_basename = os.path.basename(self.pset_path)
# cmssw_ver = self.cmssw_version
# scramarch = self.scram_arch
# max_nevents_per_job = self.kwargs.get("max_nevents_per_job", -1)
# nevts = max_nevents_per_job
# firstevt = -1
# expectedevents = -1
# if self.check_expectedevents:
# expectedevents = out.get_nevents()
# if max_nevents_per_job > 0:
# expectedevents = max_nevents_per_job
# if self.split_within_files:
# nevts = self.events_per_output
# firstevt = 1 + (index - 1) * (self.events_per_output+1)
# expectedevents = -1
# inputs_commasep = "dummyfile"
# pset_args = self.pset_args
# executable = self.executable_path
# other_outputs = ",".join(self.other_outputs) or "None"
# # note that pset_args must be the last argument since it can have spaces
# # check executables/condor_cmssw_exe.sh to see why
# arguments = [outdir, outname_noext, inputs_commasep,
# index, pset_basename, cmssw_ver, scramarch,
# nevts, firstevt, expectedevents, other_outputs, pset_args]
# logdir_full = os.path.abspath("{0}/logs/".format(self.get_taskdir()))
# package_full = os.path.abspath(self.package_path)
# input_files = [package_full, pset_full] if self.tarfile else [pset_full]
# extra = self.kwargs.get("condor_submit_params", {})
# return Utils.condor_submit(
# executable=executable, arguments=arguments,
# inputfiles=input_files, logdir=logdir_full,
# selection_pairs=[["taskname", self.unique_name], ["jobnum", index], ["tag", self.tag]],
# fake=fake, **extra
# )
def prepare_inputs(self):
# need to take care of executable, tarfile, and pset
self.executable_path = "{0}/executable.sh".format(self.get_taskdir())
self.package_path = "{0}/package.tar.gz".format(self.get_taskdir())
self.pset_path = "{0}/pset.py".format(self.get_taskdir())
# see if the path was given relative to $METIS_BASE
if not os.path.exists(self.input_executable):
to_check = os.path.join(self.get_metis_base(),self.input_executable)
if os.path.exists(to_check):
self.input_executable = to_check
# take care of executable. easy.
Utils.do_cmd("cp {0} {1}".format(self.input_executable, self.executable_path))
# add some stuff to end of pset (only tags and dataset name.
# rest is done within the job in the executable)
pset_location_in = self.pset
pset_location_out = self.pset_path
with open(pset_location_in, "r") as fhin:
data_in = fhin.read()
with open(pset_location_out, "w") as fhin:
fhin.write(data_in)
if not self.dont_edit_pset:
fhin.write("""
if hasattr(process,"eventMaker"):
process.eventMaker.CMS3tag = cms.string('{tag}')
process.eventMaker.datasetName = cms.string('{dsname}')
process.out.dropMetaData = cms.untracked.string("NONE")
if hasattr(process,"GlobalTag"):
process.GlobalTag.globaltag = "{gtag}"
if hasattr(process,"MessageLogger"):
process.MessageLogger.cerr.FwkReport.reportEvery = {reportevery}
import os
major_ver = int(os.getenv("CMSSW_RELEASE_BASE",os.getenv("CMSSW_BASE","CMSSW_5")).split("CMSSW_",1)[1].split("_",1)[0])
if major_ver >= 8:
process.add_(cms.Service("CondorStatusService", updateIntervalSeconds=cms.untracked.uint32(2700)))
def set_output_name(outputname):
to_change = []
for attr in dir(process):
if not hasattr(process,attr): continue
if (type(getattr(process,attr)) != cms.OutputModule) and (attr not in ["TFileService"]): continue
to_change.append([process,attr])
for i in range(len(to_change)):
getattr(to_change[i][0],to_change[i][1]).fileName = outputname
\n\n""".format(tag=self.tag, dsname=self.get_sample().get_datasetname(), gtag=self.global_tag, reportevery=self.report_every)
)
if self.sparms:
sparms = ['"{0}"'.format(sparm) for sparm in self.sparms]
fhin.write("\nprocess.sParmMaker.vsparms = cms.untracked.vstring(\n{0}\n)\n\n".format(",\n".join(sparms)))
# for LHE where we want to split within files,
# we specify all the files at once, and then shove them in the pset
# later on we will then tell each job the number of events to process
# and the first event to start with (firstEvent)
if self.split_within_files:
if self.kwargs.get("condor_submit_params", {}).get("sites") == "T2_US_UCSD":
fnames = ['"{0}"'.format(fo.get_name().replace("/ceph/cms","file:/ceph/cms")) for fo in self.get_inputs(flatten=True)]
else:
fnames = ['"{0}"'.format(fo.get_name().replace("/ceph/cms","").replace("/store/","root://cmsxrootd.fnal.gov//store/")) for fo in self.get_inputs(flatten=True)]
fnames = sorted(list(set(fnames)))
with open(pset_location_out, "a") as fhin:
# hard limit at 255 input files since that's the max CMSSW allows in process.source
fhin.write("\nif hasattr(process.source,\"fileNames\"): process.source.fileNames = cms.untracked.vstring([\n{0}\n][:255])\n\n".format(",\n".join(fnames)))
fhin.write("\nif hasattr(process,\"RandomNumberGeneratorService\"): process.RandomNumberGeneratorService.generator.initialSeed = cms.untracked.uint32(int(__import__('random').getrandbits(28)))\n\n") # max accepted by CMSSW is 29 bits or so. Try higher and you'll see.
fhin.write("\nif hasattr(process,\"RandomNumberGeneratorService\"): process.RandomNumberGeneratorService.externalLHEProducer.initialSeed = cms.untracked.uint32(int(__import__('random').getrandbits(17)))\n\n") # cmssw IOMC/RandomEngine/python/IOMC_cff.py
# take care of package tar file. easy.
Utils.do_cmd("cp {0} {1}".format(self.tarfile, self.package_path))
self.prepared_inputs = True
def get_legacy_metadata(self):
d_metadata = {}
d_metadata["ijob_to_miniaod"] = {}
d_metadata["ijob_to_nevents"] = {}
done_nevents = 0
for ins, out in self.get_io_mapping():
if out.get_status() != Constants.DONE:
continue
d_metadata["ijob_to_miniaod"][out.get_index()] = list(map(lambda x: x.get_name(), ins))
nevents = out.get_nevents()
nevents_pos = out.get_nevents_positive() if self.output_is_tree else 0
nevents_eff = nevents_pos - (nevents - nevents_pos)
d_metadata["ijob_to_nevents"][out.get_index()] = [nevents, nevents_eff]
done_nevents += out.get_nevents()
d_metadata["basedir"] = os.path.abspath(self.get_basedir())
d_metadata["taskdir"] = os.path.abspath(self.get_taskdir())
d_metadata["tag"] = self.tag
d_metadata["dataset"] = self.get_sample().get_datasetname()
d_metadata["gtag"] = self.global_tag
d_metadata["pset"] = self.pset
d_metadata["pset_args"] = self.pset_args
d_metadata["cmsswver"] = self.cmssw_version
# NOTE this makes a DIS query every single time, cache it somehow
# for closed datasets? or only make metadata once at the end?
d_metadata["nevents_DAS"] = done_nevents if not self.open_dataset else self.get_sample().get_nevents()
d_metadata["nevents_merged"] = done_nevents
d_metadata["finaldir"] = self.get_outputdir()
d_metadata["efact"] = self.sample.info["efact"]
d_metadata["kfact"] = self.sample.info["kfact"]
d_metadata["xsec"] = self.sample.info["xsec"]
return d_metadata
def write_metadata(self, d_metadata):
metadata_file = d_metadata["finaldir"] + "/metadata.json"
with open(metadata_file, "w") as fhout:
json.dump(d_metadata, fhout, sort_keys=True, indent=4)
# self.logger.info("Dumped metadata to {0}".format(metadata_file))
Utils.do_cmd("cp {0}/backup.pkl {1}/".format(self.get_taskdir(), d_metadata["finaldir"]))
self.logger.info("Dumped metadata and backup pickle")
def supplement_task_summary(self, task_summary):
"""
To be overloaded by subclassers
This allows putting extra stuff into the task summary
"""
task_summary["pset"] = self.pset
task_summary["pset_args"] = self.pset_args
return task_summary
def update_dis(self, d_metadata):
self.sample.info["nevents_in"] = d_metadata["nevents_DAS"]
self.sample.info["nevents"] = d_metadata["nevents_merged"]
self.sample.info["location"] = d_metadata["finaldir"]
self.sample.info["tag"] = d_metadata["tag"]
self.sample.info["gtag"] = d_metadata["gtag"]
self.sample.do_update_dis()
if __name__ == "__main__":
pass
| [
"os.path.exists",
"traceback.format_exc",
"os.path.basename",
"metis.Utils.condor_submit",
"os.path.abspath",
"json.dump"
] | [((3649, 3680), 'os.path.abspath', 'os.path.abspath', (['self.pset_path'], {}), '(self.pset_path)\n', (3664, 3680), False, 'import os\n'), ((3705, 3737), 'os.path.basename', 'os.path.basename', (['self.pset_path'], {}), '(self.pset_path)\n', (3721, 3737), False, 'import os\n'), ((6180, 6214), 'os.path.abspath', 'os.path.abspath', (['self.package_path'], {}), '(self.package_path)\n', (6195, 6214), False, 'import os\n'), ((6542, 6730), 'metis.Utils.condor_submit', 'Utils.condor_submit', ([], {'executable': 'executable', 'arguments': 'v_arguments', 'inputfiles': 'input_files', 'logdir': 'logdir_full', 'selection_pairs': 'v_selection_pairs', 'multiple': '(True)', 'fake': 'fake'}), '(executable=executable, arguments=v_arguments,\n inputfiles=input_files, logdir=logdir_full, selection_pairs=\n v_selection_pairs, multiple=True, fake=fake, **extra)\n', (6561, 6730), True, 'import metis.Utils as Utils\n'), ((9393, 9430), 'os.path.exists', 'os.path.exists', (['self.input_executable'], {}), '(self.input_executable)\n', (9407, 9430), False, 'import os\n'), ((9528, 9552), 'os.path.exists', 'os.path.exists', (['to_check'], {}), '(to_check)\n', (9542, 9552), False, 'import os\n'), ((15264, 15318), 'json.dump', 'json.dump', (['d_metadata', 'fhout'], {'sort_keys': '(True)', 'indent': '(4)'}), '(d_metadata, fhout, sort_keys=True, indent=4)\n', (15273, 15318), False, 'import json\n'), ((2966, 2988), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2986, 2988), False, 'import traceback\n')] |
from django.urls import path
from .views import (
video_list_view,
video_detail_view,
video_create_view,
video_edit_view,
video_delete_view
)
urlpatterns = [
path('create-view/', video_create_view, name='video-create'),
path('list-view/', video_list_view, name='video-list'),
path('detail-view/<str:host_id>/<str:db_id>/', video_detail_view, name='video-detail'),
path('edit-view/<str:host_id>/<str:db_id>/', video_edit_view, name='video-edit'),
path('delete-view/<str:host_id>/<str:db_id>/', video_delete_view, name='video-delete'),
] | [
"django.urls.path"
] | [((184, 244), 'django.urls.path', 'path', (['"""create-view/"""', 'video_create_view'], {'name': '"""video-create"""'}), "('create-view/', video_create_view, name='video-create')\n", (188, 244), False, 'from django.urls import path\n'), ((250, 304), 'django.urls.path', 'path', (['"""list-view/"""', 'video_list_view'], {'name': '"""video-list"""'}), "('list-view/', video_list_view, name='video-list')\n", (254, 304), False, 'from django.urls import path\n'), ((310, 401), 'django.urls.path', 'path', (['"""detail-view/<str:host_id>/<str:db_id>/"""', 'video_detail_view'], {'name': '"""video-detail"""'}), "('detail-view/<str:host_id>/<str:db_id>/', video_detail_view, name=\n 'video-detail')\n", (314, 401), False, 'from django.urls import path\n'), ((402, 487), 'django.urls.path', 'path', (['"""edit-view/<str:host_id>/<str:db_id>/"""', 'video_edit_view'], {'name': '"""video-edit"""'}), "('edit-view/<str:host_id>/<str:db_id>/', video_edit_view, name='video-edit'\n )\n", (406, 487), False, 'from django.urls import path\n'), ((488, 579), 'django.urls.path', 'path', (['"""delete-view/<str:host_id>/<str:db_id>/"""', 'video_delete_view'], {'name': '"""video-delete"""'}), "('delete-view/<str:host_id>/<str:db_id>/', video_delete_view, name=\n 'video-delete')\n", (492, 579), False, 'from django.urls import path\n')] |
from setuptools import setup
setup(
name = 'mqttflask_app',
version = '0.1',
packages = [ 'site' ],
install_requires = [
'wheel',
'uwsgi',
'flask',
'flask_restful',
'flask_httpauth',
'python_dotenv',
'simplejson',
'paho-mqtt',
],
license = 'MIT',
description = 'A website for messing around with mqtt',
)
| [
"setuptools.setup"
] | [((30, 298), 'setuptools.setup', 'setup', ([], {'name': '"""mqttflask_app"""', 'version': '"""0.1"""', 'packages': "['site']", 'install_requires': "['wheel', 'uwsgi', 'flask', 'flask_restful', 'flask_httpauth',\n 'python_dotenv', 'simplejson', 'paho-mqtt']", 'license': '"""MIT"""', 'description': '"""A website for messing around with mqtt"""'}), "(name='mqttflask_app', version='0.1', packages=['site'],\n install_requires=['wheel', 'uwsgi', 'flask', 'flask_restful',\n 'flask_httpauth', 'python_dotenv', 'simplejson', 'paho-mqtt'], license=\n 'MIT', description='A website for messing around with mqtt')\n", (35, 298), False, 'from setuptools import setup\n')] |
import configparser
class Configer(object):
def __init__(self, config_file=None):
if config_file is None:
config_file = 'app.properties'
self._config = configparser.ConfigParser()
self._config.read(config_file)
def get(self, property):
_split_properties = property.split('.')
if len(_split_properties) > 2:
raise AttributeError("property only support as: <section>.<property>")
if len(_split_properties) == 1:
_section = 'DEFAULT'
_prop = _split_properties[0]
else:
_section = _split_properties[0]
_prop = _split_properties[1]
val = self._config.get(_section, _prop)
val = [e.strip() for e in val.split(',')]
if len(val) > 1:
return val
return val[0]
| [
"configparser.ConfigParser"
] | [((182, 209), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (207, 209), False, 'import configparser\n')] |
import io
import time
import pytest
import swiftclient
@pytest.fixture
def test_data(mock_swift):
containername = f"functional-tests-container-{int(time.time())}"
objectname = f"functional-tests-object-{int(time.time())}"
yield {
"test_data": b"42" * 10,
"etag": "2704306ec982238d85d4b235c925d58e",
"containername": containername,
"containername_2": f"{containername}_second",
"containername_3": f"{containername}_third",
"objectname": objectname,
"objectname_2": f"{objectname}_second",
"connection": mock_swift,
}
class TestRunner:
def __init__(self, connection, data):
self.conn = connection
self.data = data
def __getattribute__(self, key):
try:
return super().__getattribute__("data")[key]
except (KeyError, AttributeError):
return super().__getattribute__(key)
def check_account_headers(self, headers):
headers_to_check = [
"content-length",
"x-account-object-count",
"x-timestamp",
"x-trans-id",
"date",
"x-account-bytes-used",
"x-account-container-count",
"content-type",
"accept-ranges",
]
for h in headers_to_check:
assert h in headers
assert bool(headers[h]) is True
def check_container_headers(self, headers):
header_keys = [
"content-length",
"x-container-object-count",
"x-timestamp",
"x-trans-id",
"date",
"x-container-bytes-used",
"content-type",
"accept-ranges",
]
for header in header_keys:
assert header in headers
assert headers.get(header) is not None
@pytest.fixture
def test_cls(test_data):
conn = test_data.pop("connection")
testclass = TestRunner(conn, test_data)
testclass.conn.put_container(testclass.containername)
testclass.conn.put_container(testclass.containername_2)
testclass.conn.put_object(
testclass.containername, testclass.objectname, testclass.test_data
)
testclass.conn.put_object(
testclass.containername, testclass.objectname_2, testclass.test_data
)
testclass.conn._connection._retry.reset_mock()
yield testclass
for obj in [testclass.objectname, testclass.objectname_2]:
try:
testclass.conn.delete_object(testclass.containername, obj)
except swiftclient.ClientException:
pass
for container in [
testclass.containername,
testclass.containername_2,
testclass.containername_3,
testclass.containername + "_segments",
]:
try:
testclass.conn.delete_container(container)
except swiftclient.ClientException:
pass
| [
"time.time"
] | [((155, 166), 'time.time', 'time.time', ([], {}), '()\n', (164, 166), False, 'import time\n'), ((218, 229), 'time.time', 'time.time', ([], {}), '()\n', (227, 229), False, 'import time\n')] |
import subprocess
import PIL
from PIL import Image
import numpy as np
import os
import shutil
import re
script_path = os.path.dirname(os.path.realpath(__file__))
temp_img_dir_path = os.path.join(script_path, 'temp_imgs')
def arr_to_mp4(arr, output_path, framerate=30, resolution_str=None, temp_dir=temp_img_dir_path):
'''
arr shape should be (frames, height, width, 3)
'''
use_res = resolution_str != None
if use_res:
match = re.match(r'\d+x\d+', resolution_str)
if not match:
use_res = False
try:
os.mkdir(temp_dir)
except Exception as e:
print(e)
arr = arr.astype('uint8')
for i in range(arr.shape[0]):
imgarr = arr[i]
img = Image.fromarray(imgarr)
img.save(os.path.join(temp_dir, str(i)+'.png'))
cmd = ''
if use_res:
cmd = 'ffmpeg -framerate {0} -i {1}/%d.png -pix_fmt yuv420p -s {2} {3}'.format(framerate, temp_dir, resolution_str, output_path)
else:
cmd = 'ffmpeg -framerate {0} -i {1}/%d.png -pix_fmt yuv420p {2}'.format(framerate, temp_dir, output_path)
subprocess.call(cmd.split(' '))
shutil.rmtree(temp_dir)
if __name__ == "__main__":
arr = np.random.randint(0, 255, (120, 256, 256, 3), dtype="uint8")
arr_to_mp4(arr, 'out1.mp4', resolution_str="256x256")
# produces out.mp4 which is 4 seconds long of image noise
| [
"PIL.Image.fromarray",
"os.path.join",
"re.match",
"os.path.realpath",
"numpy.random.randint",
"os.mkdir",
"shutil.rmtree"
] | [((182, 220), 'os.path.join', 'os.path.join', (['script_path', '"""temp_imgs"""'], {}), "(script_path, 'temp_imgs')\n", (194, 220), False, 'import os\n'), ((134, 160), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (150, 160), False, 'import os\n'), ((1134, 1157), 'shutil.rmtree', 'shutil.rmtree', (['temp_dir'], {}), '(temp_dir)\n', (1147, 1157), False, 'import shutil\n'), ((1196, 1256), 'numpy.random.randint', 'np.random.randint', (['(0)', '(255)', '(120, 256, 256, 3)'], {'dtype': '"""uint8"""'}), "(0, 255, (120, 256, 256, 3), dtype='uint8')\n", (1213, 1256), True, 'import numpy as np\n'), ((455, 492), 're.match', 're.match', (['"""\\\\d+x\\\\d+"""', 'resolution_str'], {}), "('\\\\d+x\\\\d+', resolution_str)\n", (463, 492), False, 'import re\n'), ((559, 577), 'os.mkdir', 'os.mkdir', (['temp_dir'], {}), '(temp_dir)\n', (567, 577), False, 'import os\n'), ((724, 747), 'PIL.Image.fromarray', 'Image.fromarray', (['imgarr'], {}), '(imgarr)\n', (739, 747), False, 'from PIL import Image\n')] |
from interactiongrader import Answer
from interactiongrader import ChangeType
from fuzzywuzzy import fuzz
def test_calculate_ranges():
ans = Answer()
ranges = ans.calculate_ranges()
assert ans.sentence == ''
assert ranges[ChangeType.FLIP] == 0.75
def test_random_change_type():
ans = Answer()
change = ans.random_change_type()
assert change in ChangeType
def test_misspell():
ans = Answer('Sample test')
misspelled = ans.misspell()
score = fuzz.ratio(misspelled, ans.sentence)
assert ans.sentence == 'Sample test'
assert ans.sentence != misspelled
assert score >= ans.minimum_fuzzy_score
def test_is_misspelling():
ans = Answer('<NAME>')
assert ans.is_misspelling('<NAME>')
ans = Answer('<NAME>')
assert ans.is_misspelling('<NAME>')
ans = Answer('<NAME>')
assert ans.is_misspelling('<NAME>')
| [
"fuzzywuzzy.fuzz.ratio",
"interactiongrader.Answer"
] | [((146, 154), 'interactiongrader.Answer', 'Answer', ([], {}), '()\n', (152, 154), False, 'from interactiongrader import Answer\n'), ((307, 315), 'interactiongrader.Answer', 'Answer', ([], {}), '()\n', (313, 315), False, 'from interactiongrader import Answer\n'), ((419, 440), 'interactiongrader.Answer', 'Answer', (['"""Sample test"""'], {}), "('Sample test')\n", (425, 440), False, 'from interactiongrader import Answer\n'), ((485, 521), 'fuzzywuzzy.fuzz.ratio', 'fuzz.ratio', (['misspelled', 'ans.sentence'], {}), '(misspelled, ans.sentence)\n', (495, 521), False, 'from fuzzywuzzy import fuzz\n'), ((684, 700), 'interactiongrader.Answer', 'Answer', (['"""<NAME>"""'], {}), "('<NAME>')\n", (690, 700), False, 'from interactiongrader import Answer\n'), ((752, 768), 'interactiongrader.Answer', 'Answer', (['"""<NAME>"""'], {}), "('<NAME>')\n", (758, 768), False, 'from interactiongrader import Answer\n'), ((820, 836), 'interactiongrader.Answer', 'Answer', (['"""<NAME>"""'], {}), "('<NAME>')\n", (826, 836), False, 'from interactiongrader import Answer\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'rsrc/ui/mainwindow.ui'
#
# Created: Tue May 6 19:08:29 2014
# by: PyQt4 UI code generator 4.6.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(843, 747)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName("verticalLayout")
self.filterLayout = QtGui.QHBoxLayout()
self.filterLayout.setObjectName("filterLayout")
self.dataFilterLayout = QtGui.QFormLayout()
self.dataFilterLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.dataFilterLayout.setObjectName("dataFilterLayout")
self.lblSelectAReport = QtGui.QLabel(self.centralwidget)
self.lblSelectAReport.setObjectName("lblSelectAReport")
self.dataFilterLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.lblSelectAReport)
self.cbReport = QtGui.QComboBox(self.centralwidget)
self.cbReport.setEnabled(True)
self.cbReport.setObjectName("cbReport")
self.dataFilterLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.cbReport)
self.displayNHoursLabel = QtGui.QLabel(self.centralwidget)
self.displayNHoursLabel.setObjectName("displayNHoursLabel")
self.dataFilterLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.displayNHoursLabel)
self.lenghtLayout = QtGui.QHBoxLayout()
self.lenghtLayout.setObjectName("lenghtLayout")
self.slLength = QtGui.QSlider(self.centralwidget)
self.slLength.setMinimum(1)
self.slLength.setMaximum(168)
self.slLength.setProperty("value", 24)
self.slLength.setOrientation(QtCore.Qt.Horizontal)
self.slLength.setObjectName("slLength")
self.lenghtLayout.addWidget(self.slLength)
self.spLength = QtGui.QSpinBox(self.centralwidget)
self.spLength.setMinimum(1)
self.spLength.setMaximum(168)
self.spLength.setSingleStep(1)
self.spLength.setProperty("value", 24)
self.spLength.setObjectName("spLength")
self.lenghtLayout.addWidget(self.spLength)
self.dataFilterLayout.setLayout(1, QtGui.QFormLayout.FieldRole, self.lenghtLayout)
self.lblUntil = QtGui.QLabel(self.centralwidget)
self.lblUntil.setObjectName("lblUntil")
self.dataFilterLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.lblUntil)
self.endLayout = QtGui.QHBoxLayout()
self.endLayout.setObjectName("endLayout")
self.chkNow = QtGui.QCheckBox(self.centralwidget)
self.chkNow.setChecked(True)
self.chkNow.setObjectName("chkNow")
self.endLayout.addWidget(self.chkNow)
self.dtEndDate = QtGui.QDateTimeEdit(self.centralwidget)
self.dtEndDate.setEnabled(False)
self.dtEndDate.setObjectName("dtEndDate")
self.endLayout.addWidget(self.dtEndDate)
self.dataFilterLayout.setLayout(2, QtGui.QFormLayout.FieldRole, self.endLayout)
self.filterLayout.addLayout(self.dataFilterLayout)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.filterLayout.addItem(spacerItem)
self.verticalLayout.addLayout(self.filterLayout)
self.optionsLayout = QtGui.QHBoxLayout()
self.optionsLayout.setObjectName("optionsLayout")
self.reportGroupBox = QtGui.QVBoxLayout()
self.reportGroupBox.setObjectName("reportGroupBox")
self.gbJobUsage = QtGui.QGroupBox(self.centralwidget)
self.gbJobUsage.setMaximumSize(QtCore.QSize(300, 16777215))
self.gbJobUsage.setObjectName("gbJobUsage")
self.formLayout = QtGui.QFormLayout(self.gbJobUsage)
self.formLayout.setObjectName("formLayout")
self.cbTrack = QtGui.QComboBox(self.gbJobUsage)
self.cbTrack.setObjectName("cbTrack")
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.cbTrack)
self.label = QtGui.QLabel(self.gbJobUsage)
self.label.setObjectName("label")
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label)
self.groupByLabel = QtGui.QLabel(self.gbJobUsage)
self.groupByLabel.setObjectName("groupByLabel")
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.groupByLabel)
self.cbGroupBy = QtGui.QComboBox(self.gbJobUsage)
self.cbGroupBy.setObjectName("cbGroupBy")
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.cbGroupBy)
self.reportGroupBox.addWidget(self.gbJobUsage)
self.gbRnUsage = QtGui.QGroupBox(self.centralwidget)
self.gbRnUsage.setMaximumSize(QtCore.QSize(300, 16777215))
self.gbRnUsage.setFlat(False)
self.gbRnUsage.setObjectName("gbRnUsage")
self.horizontalLayout_12 = QtGui.QHBoxLayout(self.gbRnUsage)
self.horizontalLayout_12.setObjectName("horizontalLayout_12")
self.chkWorking = QtGui.QCheckBox(self.gbRnUsage)
self.chkWorking.setChecked(True)
self.chkWorking.setObjectName("chkWorking")
self.horizontalLayout_12.addWidget(self.chkWorking)
self.chkPaused = QtGui.QCheckBox(self.gbRnUsage)
self.chkPaused.setChecked(True)
self.chkPaused.setObjectName("chkPaused")
self.horizontalLayout_12.addWidget(self.chkPaused)
self.chkIdle = QtGui.QCheckBox(self.gbRnUsage)
self.chkIdle.setChecked(True)
self.chkIdle.setObjectName("chkIdle")
self.horizontalLayout_12.addWidget(self.chkIdle)
self.chkOffline = QtGui.QCheckBox(self.gbRnUsage)
self.chkOffline.setChecked(True)
self.chkOffline.setObjectName("chkOffline")
self.horizontalLayout_12.addWidget(self.chkOffline)
self.reportGroupBox.addWidget(self.gbRnUsage)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.reportGroupBox.addItem(spacerItem1)
self.optionsLayout.addLayout(self.reportGroupBox)
self.displayGroupBox = QtGui.QGroupBox(self.centralwidget)
self.displayGroupBox.setMaximumSize(QtCore.QSize(269, 16777215))
self.displayGroupBox.setObjectName("displayGroupBox")
self.formLayout_4 = QtGui.QFormLayout(self.displayGroupBox)
self.formLayout_4.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_4.setObjectName("formLayout_4")
self.lblResolution = QtGui.QLabel(self.displayGroupBox)
self.lblResolution.setObjectName("lblResolution")
self.formLayout_4.setWidget(0, QtGui.QFormLayout.LabelRole, self.lblResolution)
self.resolutionLayout = QtGui.QHBoxLayout()
self.resolutionLayout.setObjectName("resolutionLayout")
self.slResolution = QtGui.QSlider(self.displayGroupBox)
self.slResolution.setMinimum(8)
self.slResolution.setMaximum(60)
self.slResolution.setSingleStep(1)
self.slResolution.setProperty("value", 30)
self.slResolution.setOrientation(QtCore.Qt.Horizontal)
self.slResolution.setObjectName("slResolution")
self.resolutionLayout.addWidget(self.slResolution)
self.spResolution = QtGui.QSpinBox(self.displayGroupBox)
self.spResolution.setMinimum(8)
self.spResolution.setMaximum(60)
self.spResolution.setSingleStep(1)
self.spResolution.setProperty("value", 30)
self.spResolution.setObjectName("spResolution")
self.resolutionLayout.addWidget(self.spResolution)
self.formLayout_4.setLayout(0, QtGui.QFormLayout.FieldRole, self.resolutionLayout)
self.lblGraphType = QtGui.QLabel(self.displayGroupBox)
self.lblGraphType.setObjectName("lblGraphType")
self.formLayout_4.setWidget(1, QtGui.QFormLayout.LabelRole, self.lblGraphType)
self.cbGraphType = QtGui.QComboBox(self.displayGroupBox)
self.cbGraphType.setObjectName("cbGraphType")
self.cbGraphType.addItem("")
self.cbGraphType.addItem("")
self.formLayout_4.setWidget(1, QtGui.QFormLayout.FieldRole, self.cbGraphType)
self.lblScaleType = QtGui.QLabel(self.displayGroupBox)
self.lblScaleType.setObjectName("lblScaleType")
self.formLayout_4.setWidget(2, QtGui.QFormLayout.LabelRole, self.lblScaleType)
self.cbScaleType = QtGui.QComboBox(self.displayGroupBox)
self.cbScaleType.setObjectName("cbScaleType")
self.cbScaleType.addItem("")
self.cbScaleType.addItem("")
self.formLayout_4.setWidget(2, QtGui.QFormLayout.FieldRole, self.cbScaleType)
self.lblStyle = QtGui.QLabel(self.displayGroupBox)
self.lblStyle.setObjectName("lblStyle")
self.formLayout_4.setWidget(3, QtGui.QFormLayout.LabelRole, self.lblStyle)
self.cbGraphStyle = QtGui.QComboBox(self.displayGroupBox)
self.cbGraphStyle.setObjectName("cbGraphStyle")
self.cbGraphStyle.addItem("")
self.cbGraphStyle.addItem("")
self.cbGraphStyle.addItem("")
self.cbGraphStyle.addItem("")
self.cbGraphStyle.addItem("")
self.cbGraphStyle.addItem("")
self.cbGraphStyle.addItem("")
self.formLayout_4.setWidget(3, QtGui.QFormLayout.FieldRole, self.cbGraphStyle)
self.lblScaleRound = QtGui.QLabel(self.displayGroupBox)
self.lblScaleRound.setObjectName("lblScaleRound")
self.formLayout_4.setWidget(4, QtGui.QFormLayout.LabelRole, self.lblScaleRound)
self.cbScaleRound = QtGui.QComboBox(self.displayGroupBox)
self.cbScaleRound.setObjectName("cbScaleRound")
self.cbScaleRound.addItem("")
self.cbScaleRound.addItem("")
self.cbScaleRound.addItem("")
self.cbScaleRound.addItem("")
self.formLayout_4.setWidget(4, QtGui.QFormLayout.FieldRole, self.cbScaleRound)
self.spScaleResolution = QtGui.QSpinBox(self.displayGroupBox)
self.spScaleResolution.setMinimum(8)
self.spScaleResolution.setMaximum(40)
self.spScaleResolution.setProperty("value", 20)
self.spScaleResolution.setObjectName("spScaleResolution")
self.formLayout_4.setWidget(5, QtGui.QFormLayout.FieldRole, self.spScaleResolution)
self.lblScaleResolution = QtGui.QLabel(self.displayGroupBox)
self.lblScaleResolution.setObjectName("lblScaleResolution")
self.formLayout_4.setWidget(5, QtGui.QFormLayout.LabelRole, self.lblScaleResolution)
self.optionsLayout.addWidget(self.displayGroupBox)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.optionsLayout.addItem(spacerItem2)
self.verticalLayout.addLayout(self.optionsLayout)
self.webView = QtWebKit.QWebView(self.centralwidget)
self.webView.setUrl(QtCore.QUrl("about:blank"))
self.webView.setObjectName("webView")
self.verticalLayout.addWidget(self.webView)
self.log = QtGui.QPlainTextEdit(self.centralwidget)
self.log.setMaximumSize(QtCore.QSize(16777215, 60))
self.log.setUndoRedoEnabled(False)
self.log.setReadOnly(True)
self.log.setObjectName("log")
self.verticalLayout.addWidget(self.log)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 843, 22))
self.menubar.setObjectName("menubar")
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setEnabled(False)
self.menuFile.setObjectName("menuFile")
self.menuSnapshots = QtGui.QMenu(self.menubar)
self.menuSnapshots.setEnabled(False)
self.menuSnapshots.setObjectName("menuSnapshots")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.toolBar = QtGui.QToolBar(MainWindow)
self.toolBar.setMovable(False)
self.toolBar.setIconSize(QtCore.QSize(24, 24))
self.toolBar.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
self.toolBar.setFloatable(False)
self.toolBar.setObjectName("toolBar")
MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.actionSave_as = QtGui.QAction(MainWindow)
self.actionSave_as.setObjectName("actionSave_as")
self.actionSnapshot = QtGui.QAction(MainWindow)
self.actionSnapshot.setObjectName("actionSnapshot")
self.actionCreate = QtGui.QAction(MainWindow)
self.actionCreate.setObjectName("actionCreate")
self.actionDelete = QtGui.QAction(MainWindow)
self.actionDelete.setObjectName("actionDelete")
self.actionClear_all_snapshots = QtGui.QAction(MainWindow)
self.actionClear_all_snapshots.setObjectName("actionClear_all_snapshots")
self.actionGenerate = QtGui.QAction(MainWindow)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("../refresh.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionGenerate.setIcon(icon)
self.actionGenerate.setIconVisibleInMenu(True)
self.actionGenerate.setObjectName("actionGenerate")
self.actionReset = QtGui.QAction(MainWindow)
self.actionReset.setEnabled(True)
self.actionReset.setObjectName("actionReset")
self.actionExport_as = QtGui.QAction(MainWindow)
self.actionExport_as.setObjectName("actionExport_as")
self.menuFile.addAction(self.actionExport_as)
self.menuSnapshots.addAction(self.actionCreate)
self.menuSnapshots.addAction(self.actionDelete)
self.menuSnapshots.addAction(self.actionClear_all_snapshots)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuSnapshots.menuAction())
self.toolBar.addAction(self.actionGenerate)
self.toolBar.addAction(self.actionReset)
self.retranslateUi(MainWindow)
self.cbScaleRound.setCurrentIndex(3)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "Stats Viewer", None, QtGui.QApplication.UnicodeUTF8))
self.lblSelectAReport.setText(QtGui.QApplication.translate("MainWindow", "Select a report", None, QtGui.QApplication.UnicodeUTF8))
self.displayNHoursLabel.setText(QtGui.QApplication.translate("MainWindow", "Display N hours", None, QtGui.QApplication.UnicodeUTF8))
self.lblUntil.setText(QtGui.QApplication.translate("MainWindow", "Until", None, QtGui.QApplication.UnicodeUTF8))
self.chkNow.setText(QtGui.QApplication.translate("MainWindow", "Now", None, QtGui.QApplication.UnicodeUTF8))
self.gbJobUsage.setTitle(QtGui.QApplication.translate("MainWindow", "Jobs usage options", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("MainWindow", "Track Value", None, QtGui.QApplication.UnicodeUTF8))
self.groupByLabel.setText(QtGui.QApplication.translate("MainWindow", "Group by", None, QtGui.QApplication.UnicodeUTF8))
self.gbRnUsage.setTitle(QtGui.QApplication.translate("MainWindow", "RN usage options", None, QtGui.QApplication.UnicodeUTF8))
self.chkWorking.setText(QtGui.QApplication.translate("MainWindow", "Working", None, QtGui.QApplication.UnicodeUTF8))
self.chkPaused.setText(QtGui.QApplication.translate("MainWindow", "Paused", None, QtGui.QApplication.UnicodeUTF8))
self.chkIdle.setText(QtGui.QApplication.translate("MainWindow", "Idle", None, QtGui.QApplication.UnicodeUTF8))
self.chkOffline.setText(QtGui.QApplication.translate("MainWindow", "Offline", None, QtGui.QApplication.UnicodeUTF8))
self.displayGroupBox.setTitle(QtGui.QApplication.translate("MainWindow", "Display options", None, QtGui.QApplication.UnicodeUTF8))
self.lblResolution.setText(QtGui.QApplication.translate("MainWindow", "Resolution", None, QtGui.QApplication.UnicodeUTF8))
self.lblGraphType.setText(QtGui.QApplication.translate("MainWindow", "Graph type", None, QtGui.QApplication.UnicodeUTF8))
self.cbGraphType.setItemText(0, QtGui.QApplication.translate("MainWindow", "Line", None, QtGui.QApplication.UnicodeUTF8))
self.cbGraphType.setItemText(1, QtGui.QApplication.translate("MainWindow", "Stacked", None, QtGui.QApplication.UnicodeUTF8))
self.lblScaleType.setText(QtGui.QApplication.translate("MainWindow", "Scale type", None, QtGui.QApplication.UnicodeUTF8))
self.cbScaleType.setItemText(0, QtGui.QApplication.translate("MainWindow", "Standard", None, QtGui.QApplication.UnicodeUTF8))
self.cbScaleType.setItemText(1, QtGui.QApplication.translate("MainWindow", "Logarithmic", None, QtGui.QApplication.UnicodeUTF8))
self.lblStyle.setText(QtGui.QApplication.translate("MainWindow", "Style", None, QtGui.QApplication.UnicodeUTF8))
self.cbGraphStyle.setItemText(0, QtGui.QApplication.translate("MainWindow", "RedBlue", None, QtGui.QApplication.UnicodeUTF8))
self.cbGraphStyle.setItemText(1, QtGui.QApplication.translate("MainWindow", "Default", None, QtGui.QApplication.UnicodeUTF8))
self.cbGraphStyle.setItemText(2, QtGui.QApplication.translate("MainWindow", "Blue", None, QtGui.QApplication.UnicodeUTF8))
self.cbGraphStyle.setItemText(3, QtGui.QApplication.translate("MainWindow", "Light", None, QtGui.QApplication.UnicodeUTF8))
self.cbGraphStyle.setItemText(4, QtGui.QApplication.translate("MainWindow", "Clean", None, QtGui.QApplication.UnicodeUTF8))
self.cbGraphStyle.setItemText(5, QtGui.QApplication.translate("MainWindow", "DarkColorized", None, QtGui.QApplication.UnicodeUTF8))
self.cbGraphStyle.setItemText(6, QtGui.QApplication.translate("MainWindow", "DarkGreenBlue", None, QtGui.QApplication.UnicodeUTF8))
self.lblScaleRound.setText(QtGui.QApplication.translate("MainWindow", "Round time (min)", None, QtGui.QApplication.UnicodeUTF8))
self.cbScaleRound.setItemText(0, QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8))
self.cbScaleRound.setItemText(1, QtGui.QApplication.translate("MainWindow", "10", None, QtGui.QApplication.UnicodeUTF8))
self.cbScaleRound.setItemText(2, QtGui.QApplication.translate("MainWindow", "30", None, QtGui.QApplication.UnicodeUTF8))
self.cbScaleRound.setItemText(3, QtGui.QApplication.translate("MainWindow", "60", None, QtGui.QApplication.UnicodeUTF8))
self.lblScaleResolution.setText(QtGui.QApplication.translate("MainWindow", "Resolution in scale", None, QtGui.QApplication.UnicodeUTF8))
self.menuFile.setTitle(QtGui.QApplication.translate("MainWindow", "&Graph", None, QtGui.QApplication.UnicodeUTF8))
self.menuSnapshots.setTitle(QtGui.QApplication.translate("MainWindow", "&Snapshots", None, QtGui.QApplication.UnicodeUTF8))
self.toolBar.setWindowTitle(QtGui.QApplication.translate("MainWindow", "toolBar", None, QtGui.QApplication.UnicodeUTF8))
self.actionSave_as.setText(QtGui.QApplication.translate("MainWindow", "Save as...", None, QtGui.QApplication.UnicodeUTF8))
self.actionSnapshot.setText(QtGui.QApplication.translate("MainWindow", "Snapshot", None, QtGui.QApplication.UnicodeUTF8))
self.actionCreate.setText(QtGui.QApplication.translate("MainWindow", "&Create", None, QtGui.QApplication.UnicodeUTF8))
self.actionCreate.setShortcut(QtGui.QApplication.translate("MainWindow", "Ctrl+T", None, QtGui.QApplication.UnicodeUTF8))
self.actionDelete.setText(QtGui.QApplication.translate("MainWindow", "Delete", None, QtGui.QApplication.UnicodeUTF8))
self.actionClear_all_snapshots.setText(QtGui.QApplication.translate("MainWindow", "Clear all snapshots", None, QtGui.QApplication.UnicodeUTF8))
self.actionGenerate.setText(QtGui.QApplication.translate("MainWindow", "Refresh", None, QtGui.QApplication.UnicodeUTF8))
self.actionGenerate.setToolTip(QtGui.QApplication.translate("MainWindow", "Create graph with current parameters", None, QtGui.QApplication.UnicodeUTF8))
self.actionGenerate.setStatusTip(QtGui.QApplication.translate("MainWindow", "Create graph with current parameters", None, QtGui.QApplication.UnicodeUTF8))
self.actionGenerate.setShortcut(QtGui.QApplication.translate("MainWindow", "F5", None, QtGui.QApplication.UnicodeUTF8))
self.actionReset.setText(QtGui.QApplication.translate("MainWindow", "Reset", None, QtGui.QApplication.UnicodeUTF8))
self.actionReset.setToolTip(QtGui.QApplication.translate("MainWindow", "Set parameter values to default", None, QtGui.QApplication.UnicodeUTF8))
self.actionExport_as.setText(QtGui.QApplication.translate("MainWindow", "&Export to...", None, QtGui.QApplication.UnicodeUTF8))
from PyQt4 import QtWebKit
| [
"PyQt4.QtGui.QLabel",
"PyQt4.QtGui.QStatusBar",
"PyQt4.QtGui.QApplication.translate",
"PyQt4.QtGui.QMenuBar",
"PyQt4.QtGui.QSlider",
"PyQt4.QtCore.QSize",
"PyQt4.QtGui.QWidget",
"PyQt4.QtGui.QAction",
"PyQt4.QtGui.QMenu",
"PyQt4.QtCore.QMetaObject.connectSlotsByName",
"PyQt4.QtGui.QComboBox",
... | [((448, 473), 'PyQt4.QtGui.QWidget', 'QtGui.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (461, 473), False, 'from PyQt4 import QtCore, QtGui\n'), ((562, 599), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.centralwidget'], {}), '(self.centralwidget)\n', (579, 599), False, 'from PyQt4 import QtCore, QtGui\n'), ((688, 707), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (705, 707), False, 'from PyQt4 import QtCore, QtGui\n'), ((796, 815), 'PyQt4.QtGui.QFormLayout', 'QtGui.QFormLayout', ([], {}), '()\n', (813, 815), False, 'from PyQt4 import QtCore, QtGui\n'), ((1004, 1036), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1016, 1036), False, 'from PyQt4 import QtCore, QtGui\n'), ((1220, 1255), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1235, 1255), False, 'from PyQt4 import QtCore, QtGui\n'), ((1464, 1496), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1476, 1496), False, 'from PyQt4 import QtCore, QtGui\n'), ((1690, 1709), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (1707, 1709), False, 'from PyQt4 import QtCore, QtGui\n'), ((1790, 1823), 'PyQt4.QtGui.QSlider', 'QtGui.QSlider', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1803, 1823), False, 'from PyQt4 import QtCore, QtGui\n'), ((2127, 2161), 'PyQt4.QtGui.QSpinBox', 'QtGui.QSpinBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2141, 2161), False, 'from PyQt4 import QtCore, QtGui\n'), ((2536, 2568), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2548, 2568), False, 'from PyQt4 import QtCore, QtGui\n'), ((2729, 2748), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (2746, 2748), False, 'from PyQt4 import QtCore, QtGui\n'), ((2821, 2856), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2836, 2856), False, 'from PyQt4 import QtCore, QtGui\n'), ((3009, 3048), 'PyQt4.QtGui.QDateTimeEdit', 'QtGui.QDateTimeEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3028, 3048), False, 'from PyQt4 import QtCore, QtGui\n'), ((3357, 3443), 'PyQt4.QtGui.QSpacerItem', 'QtGui.QSpacerItem', (['(40)', '(20)', 'QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Minimum'], {}), '(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.\n Minimum)\n', (3374, 3443), False, 'from PyQt4 import QtCore, QtGui\n'), ((3571, 3590), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (3588, 3590), False, 'from PyQt4 import QtCore, QtGui\n'), ((3679, 3698), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (3696, 3698), False, 'from PyQt4 import QtCore, QtGui\n'), ((3785, 3820), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3800, 3820), False, 'from PyQt4 import QtCore, QtGui\n'), ((3967, 4001), 'PyQt4.QtGui.QFormLayout', 'QtGui.QFormLayout', (['self.gbJobUsage'], {}), '(self.gbJobUsage)\n', (3984, 4001), False, 'from PyQt4 import QtCore, QtGui\n'), ((4077, 4109), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.gbJobUsage'], {}), '(self.gbJobUsage)\n', (4092, 4109), False, 'from PyQt4 import QtCore, QtGui\n'), ((4257, 4286), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.gbJobUsage'], {}), '(self.gbJobUsage)\n', (4269, 4286), False, 'from PyQt4 import QtCore, QtGui\n'), ((4435, 4464), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.gbJobUsage'], {}), '(self.gbJobUsage)\n', (4447, 4464), False, 'from PyQt4 import QtCore, QtGui\n'), ((4631, 4663), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.gbJobUsage'], {}), '(self.gbJobUsage)\n', (4646, 4663), False, 'from PyQt4 import QtCore, QtGui\n'), ((4876, 4911), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4891, 4911), False, 'from PyQt4 import QtCore, QtGui\n'), ((5102, 5135), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', (['self.gbRnUsage'], {}), '(self.gbRnUsage)\n', (5119, 5135), False, 'from PyQt4 import QtCore, QtGui\n'), ((5232, 5263), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.gbRnUsage'], {}), '(self.gbRnUsage)\n', (5247, 5263), False, 'from PyQt4 import QtCore, QtGui\n'), ((5442, 5473), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.gbRnUsage'], {}), '(self.gbRnUsage)\n', (5457, 5473), False, 'from PyQt4 import QtCore, QtGui\n'), ((5646, 5677), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.gbRnUsage'], {}), '(self.gbRnUsage)\n', (5661, 5677), False, 'from PyQt4 import QtCore, QtGui\n'), ((5845, 5876), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.gbRnUsage'], {}), '(self.gbRnUsage)\n', (5860, 5876), False, 'from PyQt4 import QtCore, QtGui\n'), ((6106, 6192), 'PyQt4.QtGui.QSpacerItem', 'QtGui.QSpacerItem', (['(20)', '(40)', 'QtGui.QSizePolicy.Minimum', 'QtGui.QSizePolicy.Expanding'], {}), '(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.\n Expanding)\n', (6123, 6192), False, 'from PyQt4 import QtCore, QtGui\n'), ((6326, 6361), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (6341, 6361), False, 'from PyQt4 import QtCore, QtGui\n'), ((6525, 6564), 'PyQt4.QtGui.QFormLayout', 'QtGui.QFormLayout', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (6542, 6564), False, 'from PyQt4 import QtCore, QtGui\n'), ((6738, 6772), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (6750, 6772), False, 'from PyQt4 import QtCore, QtGui\n'), ((6951, 6970), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (6968, 6970), False, 'from PyQt4 import QtCore, QtGui\n'), ((7063, 7098), 'PyQt4.QtGui.QSlider', 'QtGui.QSlider', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (7076, 7098), False, 'from PyQt4 import QtCore, QtGui\n'), ((7480, 7516), 'PyQt4.QtGui.QSpinBox', 'QtGui.QSpinBox', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (7494, 7516), False, 'from PyQt4 import QtCore, QtGui\n'), ((7926, 7960), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (7938, 7960), False, 'from PyQt4 import QtCore, QtGui\n'), ((8131, 8168), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (8146, 8168), False, 'from PyQt4 import QtCore, QtGui\n'), ((8411, 8445), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (8423, 8445), False, 'from PyQt4 import QtCore, QtGui\n'), ((8616, 8653), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (8631, 8653), False, 'from PyQt4 import QtCore, QtGui\n'), ((8892, 8926), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (8904, 8926), False, 'from PyQt4 import QtCore, QtGui\n'), ((9086, 9123), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (9101, 9123), False, 'from PyQt4 import QtCore, QtGui\n'), ((9562, 9596), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (9574, 9596), False, 'from PyQt4 import QtCore, QtGui\n'), ((9771, 9808), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (9786, 9808), False, 'from PyQt4 import QtCore, QtGui\n'), ((10137, 10173), 'PyQt4.QtGui.QSpinBox', 'QtGui.QSpinBox', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (10151, 10173), False, 'from PyQt4 import QtCore, QtGui\n'), ((10513, 10547), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.displayGroupBox'], {}), '(self.displayGroupBox)\n', (10525, 10547), False, 'from PyQt4 import QtCore, QtGui\n'), ((10790, 10876), 'PyQt4.QtGui.QSpacerItem', 'QtGui.QSpacerItem', (['(40)', '(20)', 'QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Minimum'], {}), '(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.\n Minimum)\n', (10807, 10876), False, 'from PyQt4 import QtCore, QtGui\n'), ((11001, 11038), 'PyQt4.QtWebKit.QWebView', 'QtWebKit.QWebView', (['self.centralwidget'], {}), '(self.centralwidget)\n', (11018, 11038), False, 'from PyQt4 import QtWebKit\n'), ((11212, 11252), 'PyQt4.QtGui.QPlainTextEdit', 'QtGui.QPlainTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (11232, 11252), False, 'from PyQt4 import QtCore, QtGui\n'), ((11556, 11582), 'PyQt4.QtGui.QMenuBar', 'QtGui.QMenuBar', (['MainWindow'], {}), '(MainWindow)\n', (11570, 11582), False, 'from PyQt4 import QtCore, QtGui\n'), ((11715, 11740), 'PyQt4.QtGui.QMenu', 'QtGui.QMenu', (['self.menubar'], {}), '(self.menubar)\n', (11726, 11740), False, 'from PyQt4 import QtCore, QtGui\n'), ((11858, 11883), 'PyQt4.QtGui.QMenu', 'QtGui.QMenu', (['self.menubar'], {}), '(self.menubar)\n', (11869, 11883), False, 'from PyQt4 import QtCore, QtGui\n'), ((12056, 12084), 'PyQt4.QtGui.QStatusBar', 'QtGui.QStatusBar', (['MainWindow'], {}), '(MainWindow)\n', (12072, 12084), False, 'from PyQt4 import QtCore, QtGui\n'), ((12206, 12232), 'PyQt4.QtGui.QToolBar', 'QtGui.QToolBar', (['MainWindow'], {}), '(MainWindow)\n', (12220, 12232), False, 'from PyQt4 import QtCore, QtGui\n'), ((12583, 12608), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12596, 12608), False, 'from PyQt4 import QtCore, QtGui\n'), ((12697, 12722), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12710, 12722), False, 'from PyQt4 import QtCore, QtGui\n'), ((12811, 12836), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12824, 12836), False, 'from PyQt4 import QtCore, QtGui\n'), ((12921, 12946), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12934, 12946), False, 'from PyQt4 import QtCore, QtGui\n'), ((13044, 13069), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13057, 13069), False, 'from PyQt4 import QtCore, QtGui\n'), ((13182, 13207), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13195, 13207), False, 'from PyQt4 import QtCore, QtGui\n'), ((13223, 13236), 'PyQt4.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (13234, 13236), False, 'from PyQt4 import QtCore, QtGui\n'), ((13514, 13539), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13527, 13539), False, 'from PyQt4 import QtCore, QtGui\n'), ((13667, 13692), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13680, 13692), False, 'from PyQt4 import QtCore, QtGui\n'), ((14307, 14356), 'PyQt4.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (14344, 14356), False, 'from PyQt4 import QtCore, QtGui\n'), ((3860, 3887), 'PyQt4.QtCore.QSize', 'QtCore.QSize', (['(300)', '(16777215)'], {}), '(300, 16777215)\n', (3872, 3887), False, 'from PyQt4 import QtCore, QtGui\n'), ((4950, 4977), 'PyQt4.QtCore.QSize', 'QtCore.QSize', (['(300)', '(16777215)'], {}), '(300, 16777215)\n', (4962, 4977), False, 'from PyQt4 import QtCore, QtGui\n'), ((6406, 6433), 'PyQt4.QtCore.QSize', 'QtCore.QSize', (['(269)', '(16777215)'], {}), '(269, 16777215)\n', (6418, 6433), False, 'from PyQt4 import QtCore, QtGui\n'), ((11067, 11093), 'PyQt4.QtCore.QUrl', 'QtCore.QUrl', (['"""about:blank"""'], {}), "('about:blank')\n", (11078, 11093), False, 'from PyQt4 import QtCore, QtGui\n'), ((11285, 11311), 'PyQt4.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(60)'], {}), '(16777215, 60)\n', (11297, 11311), False, 'from PyQt4 import QtCore, QtGui\n'), ((11616, 11643), 'PyQt4.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(843)', '(22)'], {}), '(0, 0, 843, 22)\n', (11628, 11643), False, 'from PyQt4 import QtCore, QtGui\n'), ((12305, 12325), 'PyQt4.QtCore.QSize', 'QtCore.QSize', (['(24)', '(24)'], {}), '(24, 24)\n', (12317, 12325), False, 'from PyQt4 import QtCore, QtGui\n'), ((13260, 13291), 'PyQt4.QtGui.QPixmap', 'QtGui.QPixmap', (['"""../refresh.png"""'], {}), "('../refresh.png')\n", (13273, 13291), False, 'from PyQt4 import QtCore, QtGui\n'), ((14433, 14534), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Stats Viewer"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Stats Viewer', None, QtGui.\n QApplication.UnicodeUTF8)\n", (14461, 14534), False, 'from PyQt4 import QtCore, QtGui\n'), ((14569, 14673), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Select a report"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Select a report', None, QtGui.\n QApplication.UnicodeUTF8)\n", (14597, 14673), False, 'from PyQt4 import QtCore, QtGui\n'), ((14710, 14814), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Display N hours"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Display N hours', None, QtGui.\n QApplication.UnicodeUTF8)\n", (14738, 14814), False, 'from PyQt4 import QtCore, QtGui\n'), ((14841, 14935), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Until"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Until', None, QtGui.\n QApplication.UnicodeUTF8)\n", (14869, 14935), False, 'from PyQt4 import QtCore, QtGui\n'), ((14960, 15052), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Now"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Now', None, QtGui.QApplication.\n UnicodeUTF8)\n", (14988, 15052), False, 'from PyQt4 import QtCore, QtGui\n'), ((15082, 15188), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Jobs usage options"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Jobs usage options', None,\n QtGui.QApplication.UnicodeUTF8)\n", (15110, 15188), False, 'from PyQt4 import QtCore, QtGui\n'), ((15213, 15313), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Track Value"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Track Value', None, QtGui.\n QApplication.UnicodeUTF8)\n", (15241, 15313), False, 'from PyQt4 import QtCore, QtGui\n'), ((15344, 15441), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Group by"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Group by', None, QtGui.\n QApplication.UnicodeUTF8)\n", (15372, 15441), False, 'from PyQt4 import QtCore, QtGui\n'), ((15470, 15575), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""RN usage options"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'RN usage options', None, QtGui.\n QApplication.UnicodeUTF8)\n", (15498, 15575), False, 'from PyQt4 import QtCore, QtGui\n'), ((15604, 15700), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Working"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Working', None, QtGui.\n QApplication.UnicodeUTF8)\n", (15632, 15700), False, 'from PyQt4 import QtCore, QtGui\n'), ((15728, 15823), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Paused"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Paused', None, QtGui.\n QApplication.UnicodeUTF8)\n", (15756, 15823), False, 'from PyQt4 import QtCore, QtGui\n'), ((15849, 15942), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Idle"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Idle', None, QtGui.QApplication\n .UnicodeUTF8)\n", (15877, 15942), False, 'from PyQt4 import QtCore, QtGui\n'), ((15971, 16067), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Offline"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Offline', None, QtGui.\n QApplication.UnicodeUTF8)\n", (15999, 16067), False, 'from PyQt4 import QtCore, QtGui\n'), ((16102, 16206), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Display options"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Display options', None, QtGui.\n QApplication.UnicodeUTF8)\n", (16130, 16206), False, 'from PyQt4 import QtCore, QtGui\n'), ((16238, 16337), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Resolution"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Resolution', None, QtGui.\n QApplication.UnicodeUTF8)\n", (16266, 16337), False, 'from PyQt4 import QtCore, QtGui\n'), ((16368, 16467), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Graph type"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Graph type', None, QtGui.\n QApplication.UnicodeUTF8)\n", (16396, 16467), False, 'from PyQt4 import QtCore, QtGui\n'), ((16504, 16597), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Line"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Line', None, QtGui.QApplication\n .UnicodeUTF8)\n", (16532, 16597), False, 'from PyQt4 import QtCore, QtGui\n'), ((16634, 16730), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Stacked"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Stacked', None, QtGui.\n QApplication.UnicodeUTF8)\n", (16662, 16730), False, 'from PyQt4 import QtCore, QtGui\n'), ((16761, 16860), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Scale type"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Scale type', None, QtGui.\n QApplication.UnicodeUTF8)\n", (16789, 16860), False, 'from PyQt4 import QtCore, QtGui\n'), ((16897, 16994), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Standard"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Standard', None, QtGui.\n QApplication.UnicodeUTF8)\n", (16925, 16994), False, 'from PyQt4 import QtCore, QtGui\n'), ((17031, 17131), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Logarithmic"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Logarithmic', None, QtGui.\n QApplication.UnicodeUTF8)\n", (17059, 17131), False, 'from PyQt4 import QtCore, QtGui\n'), ((17158, 17252), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Style"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Style', None, QtGui.\n QApplication.UnicodeUTF8)\n", (17186, 17252), False, 'from PyQt4 import QtCore, QtGui\n'), ((17290, 17386), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""RedBlue"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'RedBlue', None, QtGui.\n QApplication.UnicodeUTF8)\n", (17318, 17386), False, 'from PyQt4 import QtCore, QtGui\n'), ((17424, 17520), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Default"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Default', None, QtGui.\n QApplication.UnicodeUTF8)\n", (17452, 17520), False, 'from PyQt4 import QtCore, QtGui\n'), ((17558, 17651), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Blue"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Blue', None, QtGui.QApplication\n .UnicodeUTF8)\n", (17586, 17651), False, 'from PyQt4 import QtCore, QtGui\n'), ((17689, 17783), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Light"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Light', None, QtGui.\n QApplication.UnicodeUTF8)\n", (17717, 17783), False, 'from PyQt4 import QtCore, QtGui\n'), ((17821, 17915), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Clean"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Clean', None, QtGui.\n QApplication.UnicodeUTF8)\n", (17849, 17915), False, 'from PyQt4 import QtCore, QtGui\n'), ((17953, 18055), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""DarkColorized"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'DarkColorized', None, QtGui.\n QApplication.UnicodeUTF8)\n", (17981, 18055), False, 'from PyQt4 import QtCore, QtGui\n'), ((18093, 18195), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""DarkGreenBlue"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'DarkGreenBlue', None, QtGui.\n QApplication.UnicodeUTF8)\n", (18121, 18195), False, 'from PyQt4 import QtCore, QtGui\n'), ((18227, 18332), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Round time (min)"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Round time (min)', None, QtGui.\n QApplication.UnicodeUTF8)\n", (18255, 18332), False, 'from PyQt4 import QtCore, QtGui\n'), ((18370, 18460), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""1"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', '1', None, QtGui.QApplication.\n UnicodeUTF8)\n", (18398, 18460), False, 'from PyQt4 import QtCore, QtGui\n'), ((18498, 18589), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""10"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', '10', None, QtGui.QApplication.\n UnicodeUTF8)\n", (18526, 18589), False, 'from PyQt4 import QtCore, QtGui\n'), ((18627, 18718), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""30"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', '30', None, QtGui.QApplication.\n UnicodeUTF8)\n", (18655, 18718), False, 'from PyQt4 import QtCore, QtGui\n'), ((18756, 18847), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""60"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', '60', None, QtGui.QApplication.\n UnicodeUTF8)\n", (18784, 18847), False, 'from PyQt4 import QtCore, QtGui\n'), ((18884, 18991), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Resolution in scale"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Resolution in scale', None,\n QtGui.QApplication.UnicodeUTF8)\n", (18912, 18991), False, 'from PyQt4 import QtCore, QtGui\n'), ((19020, 19115), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""&Graph"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', '&Graph', None, QtGui.\n QApplication.UnicodeUTF8)\n", (19048, 19115), False, 'from PyQt4 import QtCore, QtGui\n'), ((19148, 19247), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""&Snapshots"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', '&Snapshots', None, QtGui.\n QApplication.UnicodeUTF8)\n", (19176, 19247), False, 'from PyQt4 import QtCore, QtGui\n'), ((19280, 19376), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""toolBar"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'toolBar', None, QtGui.\n QApplication.UnicodeUTF8)\n", (19308, 19376), False, 'from PyQt4 import QtCore, QtGui\n'), ((19408, 19507), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Save as..."""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Save as...', None, QtGui.\n QApplication.UnicodeUTF8)\n", (19436, 19507), False, 'from PyQt4 import QtCore, QtGui\n'), ((19540, 19637), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Snapshot"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Snapshot', None, QtGui.\n QApplication.UnicodeUTF8)\n", (19568, 19637), False, 'from PyQt4 import QtCore, QtGui\n'), ((19668, 19764), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""&Create"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', '&Create', None, QtGui.\n QApplication.UnicodeUTF8)\n", (19696, 19764), False, 'from PyQt4 import QtCore, QtGui\n'), ((19799, 19894), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Ctrl+T"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Ctrl+T', None, QtGui.\n QApplication.UnicodeUTF8)\n", (19827, 19894), False, 'from PyQt4 import QtCore, QtGui\n'), ((19925, 20020), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Delete"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Delete', None, QtGui.\n QApplication.UnicodeUTF8)\n", (19953, 20020), False, 'from PyQt4 import QtCore, QtGui\n'), ((20064, 20171), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Clear all snapshots"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Clear all snapshots', None,\n QtGui.QApplication.UnicodeUTF8)\n", (20092, 20171), False, 'from PyQt4 import QtCore, QtGui\n'), ((20205, 20301), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Refresh"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Refresh', None, QtGui.\n QApplication.UnicodeUTF8)\n", (20233, 20301), False, 'from PyQt4 import QtCore, QtGui\n'), ((20337, 20466), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Create graph with current parameters"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow',\n 'Create graph with current parameters', None, QtGui.QApplication.\n UnicodeUTF8)\n", (20365, 20466), False, 'from PyQt4 import QtCore, QtGui\n'), ((20500, 20629), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Create graph with current parameters"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow',\n 'Create graph with current parameters', None, QtGui.QApplication.\n UnicodeUTF8)\n", (20528, 20629), False, 'from PyQt4 import QtCore, QtGui\n'), ((20662, 20753), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""F5"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'F5', None, QtGui.QApplication.\n UnicodeUTF8)\n", (20690, 20753), False, 'from PyQt4 import QtCore, QtGui\n'), ((20783, 20877), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Reset"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', 'Reset', None, QtGui.\n QApplication.UnicodeUTF8)\n", (20811, 20877), False, 'from PyQt4 import QtCore, QtGui\n'), ((20910, 21029), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""Set parameter values to default"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow',\n 'Set parameter values to default', None, QtGui.QApplication.UnicodeUTF8)\n", (20938, 21029), False, 'from PyQt4 import QtCore, QtGui\n'), ((21064, 21166), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MainWindow"""', '"""&Export to..."""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MainWindow', '&Export to...', None, QtGui.\n QApplication.UnicodeUTF8)\n", (21092, 21166), False, 'from PyQt4 import QtCore, QtGui\n')] |
from selenium import webdriver
#from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.chrome.options import Options
import time, send_mail
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--headless')
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--disable-dev-shm-usage')
wd = webdriver.Chrome('chromedriver', chrome_options=chrome_options)
#wd = webdriver.Chrome(ChromeDriverManager().install())
def navigate_friday():
try:
wd.get('https://app.conexasaude.com.br/')
time.sleep(8)
email = wd.find_element_by_xpath('//*[@id="email"]')
email.send_keys('<EMAIL>')
senha = wd.find_element_by_xpath('//*[@id="senha"]')
senha.send_keys('<PASSWORD>')
botao_entrar = wd.find_element_by_xpath('//*[@id="submit"]')
botao_entrar.click()
time.sleep(8)
wd.get('https://app.conexasaude.com.br/horarios')
time.sleep(8)
botao_adicionar_horario = wd.find_element_by_xpath('/html/body/div[8]/div[5]/div/div[4]/div/div/div[2]/button')
botao_adicionar_horario.click()
time.sleep(3)
campo_horario_inicio = wd.find_element_by_xpath('/html/body/div[8]/div[5]/div/div[4]/div/div/div[1]/div/div[1]/div/input')
campo_horario_inicio.send_keys('0900')
time.sleep(1)
campo_horario_termino = wd.find_element_by_xpath('/html/body/div[8]/div[5]/div/div[4]/div/div/div[1]/div/div[2]/div/input')
campo_horario_termino.send_keys('1100')
time.sleep(1)
botao_adicionar_horario = wd.find_element_by_xpath('/html/body/div[8]/div[5]/div/div[4]/div/div/div[2]/button')
botao_adicionar_horario.click()
time.sleep(3)
except:
time.sleep(180)
try:
wd.get('https://app.conexasaude.com.br/')
time.sleep(8)
email = wd.find_element_by_xpath('//*[@id="email"]')
email.send_keys('<EMAIL>')
senha = wd.find_element_by_xpath('//*[@id="senha"]')
senha.send_keys('1234@Unimed')
botao_entrar = wd.find_element_by_xpath('//*[@id="submit"]')
botao_entrar.click()
time.sleep(8)
wd.get('https://app.conexasaude.com.br/horarios')
time.sleep(8)
botao_adicionar_horario = wd.find_element_by_xpath(
'/html/body/div[8]/div[5]/div/div[4]/div/div/div[2]/button')
botao_adicionar_horario.click()
time.sleep(3)
campo_horario_inicio = wd.find_element_by_xpath(
'/html/body/div[8]/div[5]/div/div[4]/div/div/div[1]/div/div[1]/div/input')
campo_horario_inicio.send_keys('0900')
time.sleep(1)
campo_horario_termino = wd.find_element_by_xpath(
'/html/body/div[8]/div[5]/div/div[4]/div/div/div[1]/div/div[2]/div/input')
campo_horario_termino.send_keys('1100')
time.sleep(1)
botao_adicionar_horario = wd.find_element_by_xpath(
'/html/body/div[8]/div[5]/div/div[4]/div/div/div[2]/button')
botao_adicionar_horario.click()
time.sleep(3)
except Exception as err:
send_mail.sendmail(err, '<EMAIL>')
| [
"selenium.webdriver.Chrome",
"selenium.webdriver.ChromeOptions",
"time.sleep",
"send_mail.sendmail"
] | [((191, 216), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (214, 216), False, 'from selenium import webdriver\n'), ((367, 430), 'selenium.webdriver.Chrome', 'webdriver.Chrome', (['"""chromedriver"""'], {'chrome_options': 'chrome_options'}), "('chromedriver', chrome_options=chrome_options)\n", (383, 430), False, 'from selenium import webdriver\n'), ((584, 597), 'time.sleep', 'time.sleep', (['(8)'], {}), '(8)\n', (594, 597), False, 'import time, send_mail\n'), ((906, 919), 'time.sleep', 'time.sleep', (['(8)'], {}), '(8)\n', (916, 919), False, 'import time, send_mail\n'), ((988, 1001), 'time.sleep', 'time.sleep', (['(8)'], {}), '(8)\n', (998, 1001), False, 'import time, send_mail\n'), ((1173, 1186), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1183, 1186), False, 'import time, send_mail\n'), ((1376, 1389), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1386, 1389), False, 'import time, send_mail\n'), ((1581, 1594), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1591, 1594), False, 'import time, send_mail\n'), ((1766, 1779), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1776, 1779), False, 'import time, send_mail\n'), ((1804, 1819), 'time.sleep', 'time.sleep', (['(180)'], {}), '(180)\n', (1814, 1819), False, 'import time, send_mail\n'), ((1902, 1915), 'time.sleep', 'time.sleep', (['(8)'], {}), '(8)\n', (1912, 1915), False, 'import time, send_mail\n'), ((2253, 2266), 'time.sleep', 'time.sleep', (['(8)'], {}), '(8)\n', (2263, 2266), False, 'import time, send_mail\n'), ((2343, 2356), 'time.sleep', 'time.sleep', (['(8)'], {}), '(8)\n', (2353, 2356), False, 'import time, send_mail\n'), ((2558, 2571), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2568, 2571), False, 'import time, send_mail\n'), ((2791, 2804), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2801, 2804), False, 'import time, send_mail\n'), ((3026, 3039), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3036, 3039), False, 'import time, send_mail\n'), ((3241, 3254), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (3251, 3254), False, 'import time, send_mail\n'), ((3304, 3338), 'send_mail.sendmail', 'send_mail.sendmail', (['err', '"""<EMAIL>"""'], {}), "(err, '<EMAIL>')\n", (3322, 3338), False, 'import time, send_mail\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('bugs', '0013_auto_20151123_1415'),
]
operations = [
migrations.RenameField(
model_name='bug',
old_name='person',
new_name='create_person',
),
migrations.RenameField(
model_name='stage',
old_name='person',
new_name='update_person',
),
migrations.RenameField(
model_name='stage',
old_name='change_time',
new_name='update_time',
),
]
| [
"django.db.migrations.RenameField"
] | [((248, 338), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""bug"""', 'old_name': '"""person"""', 'new_name': '"""create_person"""'}), "(model_name='bug', old_name='person', new_name=\n 'create_person')\n", (270, 338), False, 'from django.db import models, migrations\n'), ((390, 482), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""stage"""', 'old_name': '"""person"""', 'new_name': '"""update_person"""'}), "(model_name='stage', old_name='person', new_name=\n 'update_person')\n", (412, 482), False, 'from django.db import models, migrations\n'), ((534, 629), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""stage"""', 'old_name': '"""change_time"""', 'new_name': '"""update_time"""'}), "(model_name='stage', old_name='change_time', new_name\n ='update_time')\n", (556, 629), False, 'from django.db import models, migrations\n')] |
#!/usr/bin/env python
from setuptools import setup
from setuptools.command.test import test as BaseTestCommand
from djangocms_local_navigation import __version__
install_requires = [
'django-cms>=3.0',
'beautifulsoup4',
]
tests_require = [
'djangocms-text-ckeditor',
]
class TestCommand(BaseTestCommand):
def run_tests(self):
import sys
import django
from django.conf import settings
from django.test.runner import DiscoverRunner
from tests import settings as test_settings
settings.configure(**{
setting: getattr(test_settings, setting)
for setting in dir(test_settings)
if setting.isupper()
})
django.setup()
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(failures)
setup(
name='djangocms-local-navigation',
version=__version__,
packages=['djangocms_local_navigation'],
author='<NAME>',
author_email='<EMAIL>',
description="Display menus based on the HTML structure of the pages",
long_description=open('README.md').read(),
url='https://github.com/liip/djangocms-local-navigation',
install_requires=install_requires,
tests_require=tests_require,
license='BSD',
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
cmdclass={
'test': TestCommand,
}
)
| [
"django.test.runner.DiscoverRunner",
"django.setup",
"sys.exit"
] | [((720, 734), 'django.setup', 'django.setup', ([], {}), '()\n', (732, 734), False, 'import django\n'), ((757, 784), 'django.test.runner.DiscoverRunner', 'DiscoverRunner', ([], {'verbosity': '(1)'}), '(verbosity=1)\n', (771, 784), False, 'from django.test.runner import DiscoverRunner\n'), ((871, 889), 'sys.exit', 'sys.exit', (['failures'], {}), '(failures)\n', (879, 889), False, 'import sys\n')] |
from django.http import HttpResponse, HttpResponseRedirect
from django.urls import reverse
from django.db.models import Q
from .models import (
Deck,
Grave,
Hand,
Duel,
Trigger,
Lock,
)
from pprint import pprint
from .battle_det import battle_det,battle_det_return_org_ai
from .duel import DuelObj
from time import time
def lock_lock(room_number, lock,request):
duel = Duel.objects.filter(id=room_number).get()
if duel.guest_flag is False:
ID1 = -1
else:
ID1 = duel.guest_id
if duel.guest_flag2 is False:
ID2 = -1
else:
ID2 = duel.guest_id2
if "ID" in request.COOKIES :
ID = request.COOKIES["ID"]
else:
ID = ""
if room_number == 1:
if lock.lock_1 is True and time() - lock.time_1 < 20:
if duel.is_ai is False:
return HttpResponse("waiting")
duelobj = DuelObj(room_number)
duelobj.duel = duel
duelobj.room_number = room_number
duelobj.in_execute = False
decks = Deck.objects.all()
graves = Grave.objects.all()
hands = Hand.objects.all()
user_1 = duel.user_1
user_2 = duel.user_2
if request.user != user_1 and request.user != user_2:
if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2):
pass
else:
return HttpResponse("error")
if request.user == user_1 or (ID1 == ID and duel.guest_flag):
duelobj.user = 1
user = 1
other_user = 2
if request.user == user_2 or (ID2 == ID and duel.guest_flag2):
duelobj.user = 2
user = 2
other_user = 1
duelobj.init_all(user, other_user, room_number)
return battle_det_return_org_ai(
duelobj, decks, graves, hands, user, other_user, choices, room_number
)
else:
lock.lock_1 = True
lock.time_1 = time()
lock.save()
elif room_number == 2:
if lock.lock_2 is True and time() - lock.time_2 < 20:
if duel.is_ai is False:
return HttpResponse("waiting")
duelobj = DuelObj(room_number)
duelobj.duel = duel
duelobj.room_number = room_number
duelobj.in_execute = False
decks = Deck.objects.all()
graves = Grave.objects.all()
hands = Hand.objects.all()
user_1 = duel.user_1
user_2 = duel.user_2
if request.user != user_1 and request.user != user_2:
return HttpResponse("error")
if request.user == user_1:
duelobj.user = 1
user = 1
other_user = 2
if request.user == user_2:
duelobj.user = 2
user = 2
other_user = 1
duelobj.init_all(user, other_user, room_number)
return battle_det_return_org_ai(
duelobj, decks, graves, hands, user, other_user, choices, room_number
)
else:
lock.lock_2 = True
lock.time_2 = time()
lock.save()
elif room_number == 3:
if lock.lock_3 is True and time() - lock.time_3 < 20:
if duel.is_ai is False:
return HttpResponse("waiting")
duelobj = DuelObj(room_number)
duelobj.duel = duel
duelobj.room_number = room_number
duelobj.in_execute = False
decks = Deck.objects.all()
graves = Grave.objects.all()
hands = Hand.objects.all()
user_1 = duel.user_1
user_2 = duel.user_2
if request.user != user_1 and request.user != user_2:
return HttpResponse("error")
if request.user == user_1:
duelobj.user = 1
user = 1
other_user = 2
if request.user == user_2:
duelobj.user = 2
user = 2
other_user = 1
duelobj.init_all(user, other_user, room_number)
return battle_det_return_org_ai(
duelobj, decks, graves, hands, user, other_user, choices, room_number
)
else:
lock.lock_3 = True
lock.time_3 = time()
lock.save()
return "OK"
def choices(request):
room_number = int(request.POST["room_number"])
trigger_id = request.POST["trigger_id"]
lock = Lock.objects.get()
lock_flag = lock_lock(room_number, lock,request)
duel = Duel.objects.filter(id=room_number).get()
if duel.guest_flag is False:
ID1 = -1
else:
ID1 = duel.guest_id
if duel.guest_flag2 is False:
ID2 = -1
else:
ID2 = duel.guest_id2
if "ID" in request.COOKIES :
ID = request.COOKIES["ID"]
else:
ID = ""
if lock_flag != "OK":
if duel.is_ai == False:
return HttpResponse("waiting")
else:
duelobj = DuelObj(room_number)
duelobj.duel = duel
duelobj.room_number = room_number
duelobj.in_execute = False
decks = Deck.objects.all()
graves = Grave.objects.all()
hands = Hand.objects.all()
user_1 = duel.user_1
user_2 = duel.user_2
if request.user != user_1 and request.user != user_2:
if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2):
pass
else:
return HttpResponse("error")
if request.user == user_1 or(ID1 == ID and duel.guest_flag is True):
duelobj.user = 1
user = 1
other_user = 2
if request.user == user_2 or(ID2 == ID and duel.guest_flag2 is True):
duelobj.user = 2
user = 2
other_user = 1
duelobj.init_all(user, other_user, room_number)
return battle_det_return_org_ai(
duelobj, decks, graves, hands, user, other_user, choices, room_number
)
if duel.user_1 != request.user and duel.user_2 != request.user:
if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2):
pass
else:
free_lock(room_number, lock)
return HttpResponseRedirect(reverse("tcgcreator:watch_battle"))
if duel.user_1 == request.user or ( ID1 == ID and duel.guest_flag is True):
user = 1
other_user = 2
elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True):
user = 2
other_user = 1
duelobj = DuelObj(room_number)
duelobj.duel = duel
duelobj.user = user
duelobj.room_number = room_number
decks = Deck.objects.all()
graves = Grave.objects.all()
hands = Hand.objects.all()
duelobj.init_all(user, other_user, room_number)
duelobj.check_eternal_effect(
decks, graves, hands, duel.phase, duel.user_turn, user, other_user
)
if duel.in_cost is True:
free_lock(room_number, lock)
return HttpResponse("error")
if duel.user_1 == request.user or ( ID1 == ID and duel.guest_flag is True):
if duel.appoint != 1:
free_lock(room_number, lock)
return HttpResponse("error")
duelobj.user = 1
user = 1
other_user = 2
if choices_det(duelobj, trigger_id, request, user) != -1:
duelobj.duel.mute = False
duelobj.save_all(user, other_user, room_number)
free_lock(room_number, lock)
return battle_det(request, duelobj)
else:
free_lock(room_number, lock)
return HttpResponse("error")
elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True):
if duel.appoint != 2:
free_lock(room_number, lock)
return HttpResponse("error")
duelobj.user = 2
user = 2
other_user = 1
if choices_det(duelobj, trigger_id, request, user) != -1:
duelobj.duel.mute = False
duelobj.save_all(user, other_user, room_number)
free_lock(room_number, lock)
return battle_det(request, duelobj)
else:
free_lock(room_number, lock)
return HttpResponse("error")
free_lock(room_number, lock)
return HttpResponse("error")
def choices_det(duelobj, trigger_id, request, user):
if user == 1:
other_user = 2
else:
other_user = 1
triggers = Trigger.objects.all()
trigger = triggers.get(id=trigger_id)
if trigger is not None and duelobj.check_launch_trigger( trigger, duelobj.duel.phase, duelobj.duel.user_turn, user, other_user, user):
return duelobj.invoke_trigger(trigger, "", "", "", duelobj.user, "")
else:
return -1
def free_lock(room_number, lock):
if room_number == 1:
lock.lock_1 = False
lock.save()
elif room_number == 2:
lock.lock_2 = False
lock.save()
elif room_number == 3:
lock.lock_3 = False
lock.save()
| [
"django.http.HttpResponse",
"time.time",
"django.urls.reverse"
] | [((8580, 8601), 'django.http.HttpResponse', 'HttpResponse', (['"""error"""'], {}), "('error')\n", (8592, 8601), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((7296, 7317), 'django.http.HttpResponse', 'HttpResponse', (['"""error"""'], {}), "('error')\n", (7308, 7317), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((2080, 2086), 'time.time', 'time', ([], {}), '()\n', (2084, 2086), False, 'from time import time\n'), ((5113, 5136), 'django.http.HttpResponse', 'HttpResponse', (['"""waiting"""'], {}), "('waiting')\n", (5125, 5136), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((7489, 7510), 'django.http.HttpResponse', 'HttpResponse', (['"""error"""'], {}), "('error')\n", (7501, 7510), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((7905, 7926), 'django.http.HttpResponse', 'HttpResponse', (['"""error"""'], {}), "('error')\n", (7917, 7926), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((858, 881), 'django.http.HttpResponse', 'HttpResponse', (['"""waiting"""'], {}), "('waiting')\n", (870, 881), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((3271, 3277), 'time.time', 'time', ([], {}), '()\n', (3275, 3277), False, 'from time import time\n'), ((6553, 6587), 'django.urls.reverse', 'reverse', (['"""tcgcreator:watch_battle"""'], {}), "('tcgcreator:watch_battle')\n", (6560, 6587), False, 'from django.urls import reverse\n'), ((8100, 8121), 'django.http.HttpResponse', 'HttpResponse', (['"""error"""'], {}), "('error')\n", (8112, 8121), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((8514, 8535), 'django.http.HttpResponse', 'HttpResponse', (['"""error"""'], {}), "('error')\n", (8526, 8535), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((772, 778), 'time.time', 'time', ([], {}), '()\n', (776, 778), False, 'from time import time\n'), ((1455, 1476), 'django.http.HttpResponse', 'HttpResponse', (['"""error"""'], {}), "('error')\n", (1467, 1476), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((2259, 2282), 'django.http.HttpResponse', 'HttpResponse', (['"""waiting"""'], {}), "('waiting')\n", (2271, 2282), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((2717, 2738), 'django.http.HttpResponse', 'HttpResponse', (['"""error"""'], {}), "('error')\n", (2729, 2738), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((4462, 4468), 'time.time', 'time', ([], {}), '()\n', (4466, 4468), False, 'from time import time\n'), ((5724, 5745), 'django.http.HttpResponse', 'HttpResponse', (['"""error"""'], {}), "('error')\n", (5736, 5745), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((2173, 2179), 'time.time', 'time', ([], {}), '()\n', (2177, 2179), False, 'from time import time\n'), ((3450, 3473), 'django.http.HttpResponse', 'HttpResponse', (['"""waiting"""'], {}), "('waiting')\n", (3462, 3473), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((3908, 3929), 'django.http.HttpResponse', 'HttpResponse', (['"""error"""'], {}), "('error')\n", (3920, 3929), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((3364, 3370), 'time.time', 'time', ([], {}), '()\n', (3368, 3370), False, 'from time import time\n')] |
from math import sqrt, floor, ceil
#import math
num = int(input("Digite um número: "))
#raiz = math.sqrt(num)
raiz = sqrt(num) #from math import sqrt, floor
#print (f"A raiz de {num} é igual a {math.ceil(raiz)}") # Arredonda pra cima
#print (f"A raiz de {num} é igual a {math.floor(raiz)}") #Arredonda pra baixo
print (f"A raiz de {num} é igual a {ceil(raiz)}") # Arredonda pra cima
print (f"A raiz de {num} é igual a {floor(raiz)}") #Arredonda pra baixo
| [
"math.ceil",
"math.sqrt",
"math.floor"
] | [((118, 127), 'math.sqrt', 'sqrt', (['num'], {}), '(num)\n', (122, 127), False, 'from math import sqrt, floor, ceil\n'), ((350, 360), 'math.ceil', 'ceil', (['raiz'], {}), '(raiz)\n', (354, 360), False, 'from math import sqrt, floor, ceil\n'), ((421, 432), 'math.floor', 'floor', (['raiz'], {}), '(raiz)\n', (426, 432), False, 'from math import sqrt, floor, ceil\n')] |
import sys
sys.path.append('../')
import caffe2_paths
import numpy as np
import glob
from itertools import product
import pinn.preproc as preproc
import pinn.data_reader as data_reader
import matplotlib.pyplot as plt
import pickle
import os
# ----------------- Preprocessing --------------------
vds = np.concatenate((np.linspace(-0.1, -0.01, 10),np.linspace(0.01, 0.3, 30)))
# print(vds)
vbg = np.linspace(-0.1, 0.3, 41)
# print(vbg)
vtg = np.linspace(-0.1, 0.3, 41)
# print(vtg)
id_file = glob.glob('./transiXOR_data/current_D9.npy')
db_path = 'db/'
id_data = np.load(id_file[0])
# !!CAUTION!! If use batch direct weighted L1 loss,
# make sure no zero label in the training data
# Future version will address this issue internally.
# selected_vds_idx = [1, 5, 9, 12, 15, 17, 18, 19, 20]
# vds = vds[selected_vds_idx]
# id_data = id_data[selected_vds_idx,:,:]
id_data = np.concatenate((id_data[0:10,:,:],id_data[11:,:,:]))
## Check whether zero label exit
assert np.min(np.abs(id_data).flatten()) > 1e-9, "Zero exist in labels"
# vds, vbg, vtg, id
print('original data shape: '
+ str(id_data.shape) + '; '
+ str(id_data.shape[0] * id_data.shape[1] * id_data.shape[2])
)
iter_lst = list(product(vds, vbg, vtg))
vds_train = np.expand_dims(np.array([e[0] for e in iter_lst], dtype=np.float32), axis=1)
vbg_train = np.array([e[1] for e in iter_lst], dtype=np.float32)
vtg_train = np.array([e[2] for e in iter_lst], dtype=np.float32)
id_train = np.expand_dims(id_data.flatten(), axis=1).astype(np.float32)
vg_train = np.column_stack((vtg_train, vbg_train))
print('--- Original shape: ')
print(vg_train.shape)
print(vds_train.shape)
print(id_train.shape)
## Using the fact that vtg and vbg are interchangeable
## CAUTION: This invariance may not be true for experimental data
vg_train = np.sum(vg_train, axis=1, keepdims=True)
## random select train/eval = 0.9/0.1
np.random.seed = 42
data_arrays = [vg_train, vds_train, id_train]
permu = np.random.permutation(len(data_arrays[0]))
num_eval = int(len(data_arrays[0])*0.1)
data_arrays = [e[permu] for e in data_arrays]
data_arrays_eval = [e[0:num_eval] for e in data_arrays]
data_arrays_train = [e[num_eval:] for e in data_arrays]
## Odd for train, even for eval
# vg_eval = vg_train[::2]; vg_train = vg_train[1::2]
# vds_eval = vds_train[::2]; vds_train = vds_train[1::2]
# id_eval = id_train[::2]; id_train = id_train[1::2]
# data_arrays_train = [vg_train, vds_train, id_train]
# data_arrays_eval = [vg_eval, vds_eval, id_eval]
## Check shape of train and eval dataset
print('--- Train/Eval shape: ')
print(
data_arrays_train[0].shape,
data_arrays_train[1].shape,
data_arrays_train[2].shape
)
print(
data_arrays_eval[0].shape,
data_arrays_eval[1].shape,
data_arrays_eval[2].shape
)
scale, vg_shift = preproc.compute_dc_meta(*data_arrays_train)
preproc_param = {
'scale' : scale,
'vg_shift' : vg_shift,
}
print(preproc_param)
## Saving the preproc param
preproc_data_arrays_train = preproc.dc_iv_preproc(
data_arrays_train[0], data_arrays_train[1], data_arrays_train[2],
preproc_param['scale'],
preproc_param['vg_shift']
)
preproc_data_arrays_eval = preproc.dc_iv_preproc(
data_arrays_eval[0], data_arrays_eval[1], data_arrays_eval[2],
preproc_param['scale'],
preproc_param['vg_shift']
)
# Only expand the dim if the number of dimension is 1
preproc_data_arrays_train = [np.expand_dims(
x, axis=1) if x.ndim == 1 else x for x in preproc_data_arrays_train]
preproc_data_arrays_eval = [np.expand_dims(
x, axis=1) if x.ndim == 1 else x for x in preproc_data_arrays_eval]
# Write to database
if os.path.isfile(db_path+'train.minidb'):
print("XXX Delete the old train database...")
os.remove(db_path+'train.minidb')
if os.path.isfile(db_path+'eval.minidb'):
print("XXX Delete the old eval database...")
os.remove(db_path+'eval.minidb')
data_reader.write_db('minidb', db_path+'train.minidb', preproc_data_arrays_train)
data_reader.write_db('minidb', db_path+'eval.minidb', preproc_data_arrays_eval)
pickle.dump(preproc_param, open(db_path+'preproc_param.p', 'wb'))
| [
"pinn.data_reader.write_db",
"numpy.abs",
"itertools.product",
"numpy.column_stack",
"os.path.isfile",
"numpy.array",
"numpy.linspace",
"numpy.sum",
"numpy.concatenate",
"pinn.preproc.dc_iv_preproc",
"numpy.expand_dims",
"numpy.load",
"sys.path.append",
"pinn.preproc.compute_dc_meta",
"g... | [((11, 33), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (26, 33), False, 'import sys\n'), ((396, 422), 'numpy.linspace', 'np.linspace', (['(-0.1)', '(0.3)', '(41)'], {}), '(-0.1, 0.3, 41)\n', (407, 422), True, 'import numpy as np\n'), ((442, 468), 'numpy.linspace', 'np.linspace', (['(-0.1)', '(0.3)', '(41)'], {}), '(-0.1, 0.3, 41)\n', (453, 468), True, 'import numpy as np\n'), ((492, 536), 'glob.glob', 'glob.glob', (['"""./transiXOR_data/current_D9.npy"""'], {}), "('./transiXOR_data/current_D9.npy')\n", (501, 536), False, 'import glob\n'), ((564, 583), 'numpy.load', 'np.load', (['id_file[0]'], {}), '(id_file[0])\n', (571, 583), True, 'import numpy as np\n'), ((898, 955), 'numpy.concatenate', 'np.concatenate', (['(id_data[0:10, :, :], id_data[11:, :, :])'], {}), '((id_data[0:10, :, :], id_data[11:, :, :]))\n', (912, 955), True, 'import numpy as np\n'), ((1346, 1398), 'numpy.array', 'np.array', (['[e[1] for e in iter_lst]'], {'dtype': 'np.float32'}), '([e[1] for e in iter_lst], dtype=np.float32)\n', (1354, 1398), True, 'import numpy as np\n'), ((1411, 1463), 'numpy.array', 'np.array', (['[e[2] for e in iter_lst]'], {'dtype': 'np.float32'}), '([e[2] for e in iter_lst], dtype=np.float32)\n', (1419, 1463), True, 'import numpy as np\n'), ((1547, 1586), 'numpy.column_stack', 'np.column_stack', (['(vtg_train, vbg_train)'], {}), '((vtg_train, vbg_train))\n', (1562, 1586), True, 'import numpy as np\n'), ((1817, 1856), 'numpy.sum', 'np.sum', (['vg_train'], {'axis': '(1)', 'keepdims': '(True)'}), '(vg_train, axis=1, keepdims=True)\n', (1823, 1856), True, 'import numpy as np\n'), ((2793, 2836), 'pinn.preproc.compute_dc_meta', 'preproc.compute_dc_meta', (['*data_arrays_train'], {}), '(*data_arrays_train)\n', (2816, 2836), True, 'import pinn.preproc as preproc\n'), ((2979, 3121), 'pinn.preproc.dc_iv_preproc', 'preproc.dc_iv_preproc', (['data_arrays_train[0]', 'data_arrays_train[1]', 'data_arrays_train[2]', "preproc_param['scale']", "preproc_param['vg_shift']"], {}), "(data_arrays_train[0], data_arrays_train[1],\n data_arrays_train[2], preproc_param['scale'], preproc_param['vg_shift'])\n", (3000, 3121), True, 'import pinn.preproc as preproc\n'), ((3152, 3291), 'pinn.preproc.dc_iv_preproc', 'preproc.dc_iv_preproc', (['data_arrays_eval[0]', 'data_arrays_eval[1]', 'data_arrays_eval[2]', "preproc_param['scale']", "preproc_param['vg_shift']"], {}), "(data_arrays_eval[0], data_arrays_eval[1],\n data_arrays_eval[2], preproc_param['scale'], preproc_param['vg_shift'])\n", (3173, 3291), True, 'import pinn.preproc as preproc\n'), ((3600, 3640), 'os.path.isfile', 'os.path.isfile', (["(db_path + 'train.minidb')"], {}), "(db_path + 'train.minidb')\n", (3614, 3640), False, 'import os\n'), ((3725, 3764), 'os.path.isfile', 'os.path.isfile', (["(db_path + 'eval.minidb')"], {}), "(db_path + 'eval.minidb')\n", (3739, 3764), False, 'import os\n'), ((3844, 3931), 'pinn.data_reader.write_db', 'data_reader.write_db', (['"""minidb"""', "(db_path + 'train.minidb')", 'preproc_data_arrays_train'], {}), "('minidb', db_path + 'train.minidb',\n preproc_data_arrays_train)\n", (3864, 3931), True, 'import pinn.data_reader as data_reader\n'), ((3926, 4011), 'pinn.data_reader.write_db', 'data_reader.write_db', (['"""minidb"""', "(db_path + 'eval.minidb')", 'preproc_data_arrays_eval'], {}), "('minidb', db_path + 'eval.minidb',\n preproc_data_arrays_eval)\n", (3946, 4011), True, 'import pinn.data_reader as data_reader\n'), ((1221, 1243), 'itertools.product', 'product', (['vds', 'vbg', 'vtg'], {}), '(vds, vbg, vtg)\n', (1228, 1243), False, 'from itertools import product\n'), ((1272, 1324), 'numpy.array', 'np.array', (['[e[0] for e in iter_lst]'], {'dtype': 'np.float32'}), '([e[0] for e in iter_lst], dtype=np.float32)\n', (1280, 1324), True, 'import numpy as np\n'), ((3688, 3723), 'os.remove', 'os.remove', (["(db_path + 'train.minidb')"], {}), "(db_path + 'train.minidb')\n", (3697, 3723), False, 'import os\n'), ((3811, 3845), 'os.remove', 'os.remove', (["(db_path + 'eval.minidb')"], {}), "(db_path + 'eval.minidb')\n", (3820, 3845), False, 'import os\n'), ((319, 347), 'numpy.linspace', 'np.linspace', (['(-0.1)', '(-0.01)', '(10)'], {}), '(-0.1, -0.01, 10)\n', (330, 347), True, 'import numpy as np\n'), ((348, 374), 'numpy.linspace', 'np.linspace', (['(0.01)', '(0.3)', '(30)'], {}), '(0.01, 0.3, 30)\n', (359, 374), True, 'import numpy as np\n'), ((3378, 3403), 'numpy.expand_dims', 'np.expand_dims', (['x'], {'axis': '(1)'}), '(x, axis=1)\n', (3392, 3403), True, 'import numpy as np\n'), ((3492, 3517), 'numpy.expand_dims', 'np.expand_dims', (['x'], {'axis': '(1)'}), '(x, axis=1)\n', (3506, 3517), True, 'import numpy as np\n'), ((999, 1014), 'numpy.abs', 'np.abs', (['id_data'], {}), '(id_data)\n', (1005, 1014), True, 'import numpy as np\n')] |
import random
import base64
import typing
_lowercase = "abcdefghijklmnopqrstuvwxyz"
_uppercase = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
_numbers = "0123456789"
_specials = "_"
def gen_password(
n: int,
lower_letters: str = _lowercase,
upper_letters: str = _uppercase,
number_letters: str = _numbers,
special_letters: str = _specials,
min_lower: int = 1,
min_upper: int = 1,
min_number: int = 1,
min_specials: int = 1,
):
""" generate password for any resource
>>> len(gen_password(20))
20
:param int n: password total length
:param str lower_letters: all lowercase letters
:param str upper_letters: all uppercase letters
:param str number_letters: all number letters
:param str special_letters: all special letters
:param int min_lower: minimal number of lowercase letters
:param int min_upper: minimal number of uppercase letters
:param int min_number: minimal number of number letters
:param int min_specials: minimal number of special letters
:return:
"""
all_letters = "".join(
[lower_letters, upper_letters, number_letters, special_letters]
)
minimal_total = min_lower + min_upper + min_number + min_specials
if n < minimal_total:
raise ValueError(
(
"the length of password must be larger than "
"total minimal letters number"
)
)
minimal_letters = "".join(
[
gen_string(lower_letters, min_lower),
gen_string(upper_letters, min_upper),
gen_string(number_letters, min_number),
gen_string(special_letters, min_specials),
]
)
additional_letters = random.sample(all_letters, n - minimal_total)
results = list(minimal_letters) + additional_letters
random.shuffle(results)
return "".join(results)
def gen_string(letters: str, length: int):
return "".join([random.choice(letters) for i in range(length)])
def first(l: typing.List[typing.Any]) -> typing.Any:
if len(l) == 0:
return None
return l[0]
def b64encode(s: str) -> str:
""" base64 encode
:param str s: input string
:return: base64 string
"""
return base64.b64encode(s.encode()).decode()
def b64decode(s: str) -> str:
""" base64 decode
:param str s: base64 string
:return: output string
"""
return base64.b64decode(s.encode()).decode()
| [
"random.sample",
"random.choice",
"random.shuffle"
] | [((1711, 1756), 'random.sample', 'random.sample', (['all_letters', '(n - minimal_total)'], {}), '(all_letters, n - minimal_total)\n', (1724, 1756), False, 'import random\n'), ((1818, 1841), 'random.shuffle', 'random.shuffle', (['results'], {}), '(results)\n', (1832, 1841), False, 'import random\n'), ((1935, 1957), 'random.choice', 'random.choice', (['letters'], {}), '(letters)\n', (1948, 1957), False, 'import random\n')] |
import click
import os
import yaml
import json
from bioblend import galaxy
from requests import ConnectionError as RequestsConnectionError
from bioblend import ConnectionError as BioblendConnectionError
from yaml import SafeLoader
from gxwf import utils
def invocations(id_):
gi, cnfg, aliases = utils._login()
if id_:
id_ = aliases.get(id_, id_) # if the user provided an alias, return the id; else assume they provided a raw id
invocations = gi.workflows.get_invocations(id_) # will be deprecated, use line below in future
# invocations = gi.invocations.get_invocations(workflow_id=id_)
else: # get all invocations - whether this is actually useful or not I don't know, but you get to see a lot of pretty colours
invocations = gi.invocations.get_invocations()
for n in range(len(invocations)):
click.echo(click.style("\nInvocation {}".format(n+1), bold=True))
invoc_id = invocations[n]['id']
step_no = 1
state_colors = {'ok': 'green', 'running': 'yellow', 'error': 'red', 'paused': 'cyan', 'deleted': 'magenta', 'deleted_new': 'magenta', 'new': 'cyan', 'queued': 'yellow'}
for state in state_colors:
for k in range(gi.invocations.get_invocation_summary(invoc_id)['states'].get(state, 0)):
click.echo(click.style(u'\u2B24' + ' Job {} ({})'.format(k+step_no, state), fg=state_colors[state]))
step_no += k + 1
| [
"gxwf.utils._login"
] | [((305, 319), 'gxwf.utils._login', 'utils._login', ([], {}), '()\n', (317, 319), False, 'from gxwf import utils\n')] |
from pathlib import Path
import requests
from bs4 import BeautifulSoup
from loguru import logger as log
nginx_header = "../build-templates/proxy/confs/production.conf"
with open(nginx_header) as headers:
for line in headers.readlines():
line = line.strip()
if not line.startswith("ssl_ciphers"):
continue
line = line.removeprefix('ssl_ciphers "')
line = line.removesuffix('";')
ciphers = line.split(":")
for cipher in ciphers:
if cipher.startswith("!"):
continue
page = requests.get(
f"https://ciphersuite.info/search/?q={cipher}", timeout=30
)
soup = BeautifulSoup(page.content, "html5lib")
# Results are organized on a ul class='prettylist'
ul = soup.find("ul", attrs={"class": "prettylist"})
# Find all the <a> into the ul
a_list = ul.findChildren("a")
for a in a_list:
# The <a> content has a lot of thinghs, in particular
# multiple spaces to be removed:
text = " ".join(a.text.split())
# After removing all the multiple spaces
# only two words space-separated will remain
# text_split == the level (Recommended, Secure, Weak, etc)
# Recommended == the cipher
text_split = text.split(" ")
# The href points to the detail page where the IANA name can be found
# that is the same used by ssllab
# Convert the href to a path to take the last part
iana_name = Path(a["href"]).name
if text_split[1] == cipher:
level = text_split[0]
if level == "Recommended":
log.info(f"{cipher: <30} {iana_name: <45} {level}")
elif level == "Secure":
log.info(f"{cipher: <30} {iana_name: <45} {level}")
elif level == "Weak":
log.warning(f"{cipher: <30} {iana_name: <45} {level}")
else:
log.error(f"{cipher: <30} {iana_name: <45} {level}")
break
else:
log.error("{} not found on ciphersuite", cipher)
| [
"loguru.logger.info",
"pathlib.Path",
"loguru.logger.warning",
"requests.get",
"bs4.BeautifulSoup",
"loguru.logger.error"
] | [((579, 651), 'requests.get', 'requests.get', (['f"""https://ciphersuite.info/search/?q={cipher}"""'], {'timeout': '(30)'}), "(f'https://ciphersuite.info/search/?q={cipher}', timeout=30)\n", (591, 651), False, 'import requests\n'), ((701, 740), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page.content', '"""html5lib"""'], {}), "(page.content, 'html5lib')\n", (714, 740), False, 'from bs4 import BeautifulSoup\n'), ((2299, 2347), 'loguru.logger.error', 'log.error', (['"""{} not found on ciphersuite"""', 'cipher'], {}), "('{} not found on ciphersuite', cipher)\n", (2308, 2347), True, 'from loguru import logger as log\n'), ((1664, 1679), 'pathlib.Path', 'Path', (["a['href']"], {}), "(a['href'])\n", (1668, 1679), False, 'from pathlib import Path\n'), ((1842, 1893), 'loguru.logger.info', 'log.info', (['f"""{cipher: <30} {iana_name: <45} {level}"""'], {}), "(f'{cipher: <30} {iana_name: <45} {level}')\n", (1850, 1893), True, 'from loguru import logger as log\n'), ((1962, 2013), 'loguru.logger.info', 'log.info', (['f"""{cipher: <30} {iana_name: <45} {level}"""'], {}), "(f'{cipher: <30} {iana_name: <45} {level}')\n", (1970, 2013), True, 'from loguru import logger as log\n'), ((2080, 2134), 'loguru.logger.warning', 'log.warning', (['f"""{cipher: <30} {iana_name: <45} {level}"""'], {}), "(f'{cipher: <30} {iana_name: <45} {level}')\n", (2091, 2134), True, 'from loguru import logger as log\n'), ((2185, 2237), 'loguru.logger.error', 'log.error', (['f"""{cipher: <30} {iana_name: <45} {level}"""'], {}), "(f'{cipher: <30} {iana_name: <45} {level}')\n", (2194, 2237), True, 'from loguru import logger as log\n')] |
#!/usr/bin/env python
# coding: utf-8
import os
#from torchvision import datasets
import torch
from torch.utils.data import Dataset, DataLoader
from pathlib import Path
import glob
import torchvision.transforms as transforms
from PIL import Image
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
# the following import is required for training to be robust to truncated images
from tqdm import tqdm
from PIL import ImageFile
import numpy as np
from data import CustomDataset
import torchvision.models as models
import torch.nn as nn
if __name__ =='__main__':
ImageFile.LOAD_TRUNCATED_IMAGES = True
use_cuda = torch.cuda.is_available()
transform_train = transforms.Compose([
transforms.CenterCrop((224,224)),
transforms.RandomHorizontalFlip(),
transforms.RandomPerspective(),
transforms.RandomRotation(30),
transforms.ToTensor(),
transforms.RandomErasing(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])
transform_valid = transforms.Compose([
transforms.CenterCrop((224,224)),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])
train_data = CustomDataset('dogImages/train', transform_train)
valid_data = CustomDataset('dogImages/valid', transform_valid)
test_data = CustomDataset('dogImages/test', transform_valid)
batch_size = 64
num_workers = 5
dataloader_train = DataLoader(train_data, batch_size=batch_size,
shuffle=True, num_workers=num_workers)
dataloader_valid= DataLoader(valid_data, batch_size=batch_size,
shuffle=False, num_workers=num_workers)
dataloader_test = DataLoader(test_data, batch_size=batch_size,
shuffle=False, num_workers=num_workers)
# define the CNN architecture
class Net(nn.Module):
### TODO: choose an architecture, and complete the class
def __init__(self):
super(Net, self).__init__()
## Define layers of a CNN
self.conv1 = nn.Conv2d(in_channels=3, out_channels=32, kernel_size=3, padding=1) # in: 3x224x224 out: 32x112x112
self.conv2 = nn.Conv2d(32, 64, 3, padding=1) # in: 32x112x112 out:64x56x56
self.conv3 = nn.Conv2d(64, 128, 3, padding=1) # in: 64x56x56 out: 128x28x28
self.pool1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.fc1 = nn.Linear(in_features=128 * 28 * 28, out_features=512)
self.fc2 = nn.Linear(512, 133)
self.dropout = nn.Dropout(p=0.5)
def forward(self, x):
## Define forward behavior
x = self.pool1(F.relu(self.conv1(x)))
x = self.pool1(F.relu(self.conv2(x)))
x = self.pool1(F.relu(self.conv3(x)))
x = self.dropout(x)
# flatten image input
x = x.view(-1, 128 * 28 * 28)
# add dropout layer
x = F.relu(self.fc1(x))
x = self.dropout(x)
x = self.fc2(x)
return x
#-#-# You do NOT have to modify the code below this line. #-#-#
# instantiate the CNN
model_scratch = Net()
# move tensors to GPU if CUDA is available
if use_cuda:
model_scratch.cuda()
### TODO: select loss function
criterion_scratch = nn.CrossEntropyLoss()
### TODO: select optimizer
optimizer_scratch = optim.SGD(model_scratch.parameters(), lr=0.05)
def train(n_epochs, loaders, model, optimizer, criterion, use_cuda, save_path):
"""returns trained model"""
# initialize tracker for minimum validation loss
valid_loss_min = np.Inf
print("CUDA:", use_cuda)
for epoch in tqdm(range(1, n_epochs+1),desc='Epochs'):
# initialize variables to monitor training and validation loss
train_loss = 0.0
valid_loss = 0.0
###################
# train the model #
###################
model.train()
for batch_idx, (data, target) in enumerate(tqdm(loaders['train'], desc='Train')):
# move to GPU
data, target = data.cuda(), target.cuda()
## find the loss and update the model parameters accordingly
## record the average training loss, using something like
## train_loss = train_loss + ((1 / (batch_idx + 1)) * (loss.data - train_loss))
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
loss.backward()
optimizer.step()
train_loss += ((1 / (batch_idx + 1)) * (loss.data - train_loss))
######################
# validate the model #
######################
model.eval()
for batch_idx, (data, target) in enumerate(tqdm(loaders['valid'], desc='Valid')):
# move to GPU
data, target = data.cuda(), target.cuda()
## update the average validation loss
output = model(data)
loss = criterion(output, target)
valid_loss += ((1 / (batch_idx + 1)) * (loss.data - valid_loss))
# print training/validation statistics
print('Epoch: {} \tTraining Loss: {:.6f} \tValidation Loss: {:.6f}'.format(
epoch,
train_loss,
valid_loss
))
## TODO: save the model if validation loss has decreased
if valid_loss < valid_loss_min:
valid_loss_min = valid_loss
torch.save(model.state_dict(), save_path)
# return trained model
return model
loaders_scratch = {'train': dataloader_train,
'valid': dataloader_valid,
'test': dataloader_test}
# train the model
# model_scratch = train(100,
# loaders_scratch,
# model_scratch,
# optimizer_scratch,
# criterion_scratch,
# use_cuda,
# 'model_scratch.pt')
# model_scratch.load_state_dict(torch.load('model_scratch.pt'))
def test(loaders, model, criterion, use_cuda):
# monitor test loss and accuracy
test_loss = 0.
correct = 0.
total = 0.
model.eval()
for batch_idx, (data, target) in enumerate(tqdm(loaders['test'], desc='Testing')):
# move to GPU
data, target = data.cuda(), target.cuda()
# forward pass: compute predicted outputs by passing inputs to the model
output = model(data)
# calculate the loss
loss = criterion(output, target)
# update average test loss
test_loss = test_loss + ((1 / (batch_idx + 1)) * (loss.data - test_loss))
# convert output probabilities to predicted class
pred = output.data.max(1, keepdim=True)[1]
# compare predictions to true label
correct += np.sum(np.squeeze(pred.eq(target.data.view_as(pred))).cpu().numpy())
total += data.size(0)
print('Test Loss: {:.6f}\n'.format(test_loss))
print('\nTest Accuracy: %2d%% (%2d/%2d)' % (
100. * correct / total, correct, total))
# call test function
#test(loaders_scratch, model_scratch, criterion_scratch, use_cuda)
## TODO: Specify data loaders
# transform = transforms.Compose([
# transforms.CenterCrop((224,224)),
# transforms.ToTensor(),
# transforms.Normalize(mean=[0.485, 0.456, 0.406],
# std=[0.229, 0.224, 0.225])
# ])
# train_data = CustomDataset('dogImages/train', transform)
# valid_data = CustomDataset('dogImages/valid', transform)
# test_data = CustomDataset('dogImages/test', transform)
batch_size = 64
num_workers = 5
# dataloader_train = DataLoader(train_data, batch_size=batch_size,
# shuffle=True, num_workers=num_workers)
# dataloader_valid= DataLoader(valid_data, batch_size=batch_size,
# shuffle=True, num_workers=num_workers)
# dataloader_test = DataLoader(test_data, batch_size=batch_size,
# shuffle=True, num_workers=num_workers)
loaders_transfer = {'train': dataloader_train,
'valid': dataloader_valid,
'test': dataloader_test}
## TODO: Specify model architecture
model_transfer = models.vgg16(pretrained=True)
# Prevent weights from being updated
for param in model_transfer.features.parameters():
param.requires_grad = False
print("in_features: ", model_transfer.classifier[6].in_features)
print("out_features: ", model_transfer.classifier[6].out_features)
n_inputs = model_transfer.classifier[6].in_features
last_layer = nn.Linear(n_inputs, 133) # This has required_grad=True by default
model_transfer.classifier[6] = last_layer
print("out_features: ", model_transfer.classifier[6].out_features)
model_transfer = model_transfer.cuda()
criterion_transfer = nn.CrossEntropyLoss()
optimizer_transfer = optim.SGD(model_transfer.parameters(), lr=0.05)
model_transfer.load_state_dict(torch.load('model_transfer.pt'))
# train the model
model_transfer = train(5,
loaders_transfer,
model_transfer,
optimizer_transfer,
criterion_transfer,
use_cuda,
'model_transfer.pt')
# load the model that got the best validation accuracy (uncomment the line below)
model_transfer.load_state_dict(torch.load('model_transfer.pt'))
test(loaders_transfer, model_transfer, criterion_transfer, use_cuda)
| [
"torchvision.transforms.CenterCrop",
"torchvision.transforms.RandomPerspective",
"data.CustomDataset",
"torch.nn.Dropout",
"torch.nn.CrossEntropyLoss",
"torchvision.transforms.RandomRotation",
"torch.load",
"tqdm.tqdm",
"torchvision.transforms.RandomHorizontalFlip",
"torch.nn.Conv2d",
"torchvisi... | [((662, 687), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (685, 687), False, 'import torch\n'), ((1317, 1366), 'data.CustomDataset', 'CustomDataset', (['"""dogImages/train"""', 'transform_train'], {}), "('dogImages/train', transform_train)\n", (1330, 1366), False, 'from data import CustomDataset\n'), ((1384, 1433), 'data.CustomDataset', 'CustomDataset', (['"""dogImages/valid"""', 'transform_valid'], {}), "('dogImages/valid', transform_valid)\n", (1397, 1433), False, 'from data import CustomDataset\n'), ((1450, 1498), 'data.CustomDataset', 'CustomDataset', (['"""dogImages/test"""', 'transform_valid'], {}), "('dogImages/test', transform_valid)\n", (1463, 1498), False, 'from data import CustomDataset\n'), ((1564, 1653), 'torch.utils.data.DataLoader', 'DataLoader', (['train_data'], {'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': 'num_workers'}), '(train_data, batch_size=batch_size, shuffle=True, num_workers=\n num_workers)\n', (1574, 1653), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((1699, 1789), 'torch.utils.data.DataLoader', 'DataLoader', (['valid_data'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': 'num_workers'}), '(valid_data, batch_size=batch_size, shuffle=False, num_workers=\n num_workers)\n', (1709, 1789), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((1837, 1926), 'torch.utils.data.DataLoader', 'DataLoader', (['test_data'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': 'num_workers'}), '(test_data, batch_size=batch_size, shuffle=False, num_workers=\n num_workers)\n', (1847, 1926), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((3474, 3495), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (3493, 3495), True, 'import torch.nn as nn\n'), ((8898, 8927), 'torchvision.models.vgg16', 'models.vgg16', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (8910, 8927), True, 'import torchvision.models as models\n'), ((9285, 9309), 'torch.nn.Linear', 'nn.Linear', (['n_inputs', '(133)'], {}), '(n_inputs, 133)\n', (9294, 9309), True, 'import torch.nn as nn\n'), ((9546, 9567), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (9565, 9567), True, 'import torch.nn as nn\n'), ((9677, 9708), 'torch.load', 'torch.load', (['"""model_transfer.pt"""'], {}), "('model_transfer.pt')\n", (9687, 9708), False, 'import torch\n'), ((10151, 10182), 'torch.load', 'torch.load', (['"""model_transfer.pt"""'], {}), "('model_transfer.pt')\n", (10161, 10182), False, 'import torch\n'), ((740, 773), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224, 224)'], {}), '((224, 224))\n', (761, 773), True, 'import torchvision.transforms as transforms\n'), ((782, 815), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (813, 815), True, 'import torchvision.transforms as transforms\n'), ((825, 855), 'torchvision.transforms.RandomPerspective', 'transforms.RandomPerspective', ([], {}), '()\n', (853, 855), True, 'import torchvision.transforms as transforms\n'), ((865, 894), 'torchvision.transforms.RandomRotation', 'transforms.RandomRotation', (['(30)'], {}), '(30)\n', (890, 894), True, 'import torchvision.transforms as transforms\n'), ((904, 925), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (923, 925), True, 'import torchvision.transforms as transforms\n'), ((935, 961), 'torchvision.transforms.RandomErasing', 'transforms.RandomErasing', ([], {}), '()\n', (959, 961), True, 'import torchvision.transforms as transforms\n'), ((971, 1046), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (991, 1046), True, 'import torchvision.transforms as transforms\n'), ((1126, 1159), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224, 224)'], {}), '((224, 224))\n', (1147, 1159), True, 'import torchvision.transforms as transforms\n'), ((1168, 1189), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1187, 1189), True, 'import torchvision.transforms as transforms\n'), ((1199, 1274), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (1219, 1274), True, 'import torchvision.transforms as transforms\n'), ((2210, 2277), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(3)', 'out_channels': '(32)', 'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels=3, out_channels=32, kernel_size=3, padding=1)\n', (2219, 2277), True, 'import torch.nn as nn\n'), ((2335, 2366), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(64)', '(3)'], {'padding': '(1)'}), '(32, 64, 3, padding=1)\n', (2344, 2366), True, 'import torch.nn as nn\n'), ((2422, 2454), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(128)', '(3)'], {'padding': '(1)'}), '(64, 128, 3, padding=1)\n', (2431, 2454), True, 'import torch.nn as nn\n'), ((2510, 2547), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)'}), '(kernel_size=2, stride=2)\n', (2522, 2547), True, 'import torch.nn as nn\n'), ((2571, 2625), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': '(128 * 28 * 28)', 'out_features': '(512)'}), '(in_features=128 * 28 * 28, out_features=512)\n', (2580, 2625), True, 'import torch.nn as nn\n'), ((2649, 2668), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(133)'], {}), '(512, 133)\n', (2658, 2668), True, 'import torch.nn as nn\n'), ((2696, 2713), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)'}), '(p=0.5)\n', (2706, 2713), True, 'import torch.nn as nn\n'), ((6736, 6773), 'tqdm.tqdm', 'tqdm', (["loaders['test']"], {'desc': '"""Testing"""'}), "(loaders['test'], desc='Testing')\n", (6740, 6773), False, 'from tqdm import tqdm\n'), ((4230, 4266), 'tqdm.tqdm', 'tqdm', (["loaders['train']"], {'desc': '"""Train"""'}), "(loaders['train'], desc='Train')\n", (4234, 4266), False, 'from tqdm import tqdm\n'), ((5080, 5116), 'tqdm.tqdm', 'tqdm', (["loaders['valid']"], {'desc': '"""Valid"""'}), "(loaders['valid'], desc='Valid')\n", (5084, 5116), False, 'from tqdm import tqdm\n')] |
# -*- coding: utf-8 -*-
import scrapy
from floss.items import WikipediaRowItem
from floss.items import WikipediaSoftwareItem
class WikipediaSpider(scrapy.Spider):
name = "wikipedia"
allowed_domains = ["wikipedia.org"]
start_urls = (
"https://fr.wikipedia.org/wiki/Correspondance_entre_logiciels_libres_et_logiciels_propriétaires",
)
def parse(self, response):
# On construit d'abord des listes de logiciels en fonction des catégories et des sous-catégories
for categorie in response.xpath("//table/tr[position() > 2]"):
ligne = WikipediaRowItem()
ligne['categorie'] = "".join(categorie.xpath("../preceding-sibling::h3[1]/span[@class='mw-headline']//text()").extract())
ligne['fonctionnalite'] = "".join(categorie.xpath("th//text()").extract())
ligne['logiciels_proprietaires'] = categorie.xpath("td[1]/ul/li/a[1]/text()").extract()
ligne['logiciels_libres_windows'] = categorie.xpath("td[2]/ul/li/a[1]/text()").extract()
ligne['logiciels_libres_linux'] = categorie.xpath("td[3]/ul/li/a[1]/text()").extract()
ligne['logiciels_libres_mac'] = categorie.xpath("td[4]/ul/li/a[1]/text()").extract()
ligne['logiciels_libres_bsd'] = categorie.xpath("td[5]/ul/li/a[1]/text()").extract()
yield ligne
# On construit ensuite les fiches d'identité de tous les logiciels (GNU/Linux uniquement)
for lien in response.xpath("//table/tr[position() > 2]/td[3]/ul/li/a[1]"):
logiciel = WikipediaSoftwareItem()
url_brute = str(lien.xpath("@href").extract()) # La conversion est obligatoire, sinon ça renvoie une liste
url_traitee = url_brute[3:len(url_brute)-2] # On enlève les [u'...']
logiciel['nom'] = "".join(lien.xpath("text()").extract())
url = response.urljoin(url_traitee)
yield scrapy.Request(url, callback=self.parse_software, meta={'logiciel': logiciel}, dont_filter=True)
def parse_software(self, response):
logiciel = response.meta.get('logiciel', None)
# On doit transformer les URLs relatives en URLs absolues pour que Scrapy puisse les aspirer
liste_urls_relatives = response.xpath("//table[@class='infobox_v2']/tr/td/a[@class='image']/img/@src").extract()
liste_urls_absolues = []
for url_relative in liste_urls_relatives:
liste_urls_absolues.append('https:' + url_relative)
logiciel['image_urls'] = liste_urls_absolues
logiciel['developpeurs'] = response.xpath("//table[@class='infobox_v2']/tr/th/a[@href='/wiki/D%C3%A9veloppeur']/../../td//text()").extract()
logiciel['version'] = "".join(response.xpath("//table[@class='infobox_v2']/tr/th/a[@href='/wiki/Version_d%27un_logiciel']/../../td//text()").extract())
logiciel['technologies'] = response.xpath("//table[@class='infobox_v2']/tr/th/a[@title='Langage de programmation']/../../td/a/text()").extract()
logiciel['langues'] = response.xpath("//table[@class='infobox_v2']/tr/th/a[@title='Internationalisation (informatique)']/../../td/a/text()").extract()
logiciel['licences'] = response.xpath("//table[@class='infobox_v2']/tr/th/a[@title='Licence de logiciel']/../../td/a/text()").extract()
logiciel['site'] = "".join(response.xpath("//table[@class='infobox_v2']/tr/th/a[@title='Site web']/../../td/a[1]/text()").extract())
logiciel['description'] = "".join(response.xpath("//div[@id='mw-content-text']/p[1]//text()").extract())
yield logiciel
| [
"floss.items.WikipediaRowItem",
"scrapy.Request",
"floss.items.WikipediaSoftwareItem"
] | [((587, 605), 'floss.items.WikipediaRowItem', 'WikipediaRowItem', ([], {}), '()\n', (603, 605), False, 'from floss.items import WikipediaRowItem\n'), ((1549, 1572), 'floss.items.WikipediaSoftwareItem', 'WikipediaSoftwareItem', ([], {}), '()\n', (1570, 1572), False, 'from floss.items import WikipediaSoftwareItem\n'), ((1909, 2009), 'scrapy.Request', 'scrapy.Request', (['url'], {'callback': 'self.parse_software', 'meta': "{'logiciel': logiciel}", 'dont_filter': '(True)'}), "(url, callback=self.parse_software, meta={'logiciel':\n logiciel}, dont_filter=True)\n", (1923, 2009), False, 'import scrapy\n')] |
from robot import RoboLogger
from robot import Message
from robot import MQTTEngine
from robot.common.singleton import Singleton
import asyncio
from collections import deque
import traceback
from uuid import uuid4, UUID
log = RoboLogger()
class InboundMessageProcessor(metaclass=Singleton):
"""
Description:
Class, singleton, to manage inbound messages
"""
def __init__(
self,
event_loop: asyncio.BaseEventLoop,
mqtt_configuration: dict) -> None:
"""
Description :
Constructor.
Args:
mqtt_configuration : dict, configuration dict that tells where to
connect, which topics to listen to, etc.
event_loop : event loop for the runner
"""
try:
# Type and value checking
if not isinstance(mqtt_configuration, dict):
raise TypeError('mqtt_configuration has to be a dictionnary')
if not isinstance(event_loop, asyncio.BaseEventLoop):
raise TypeError(f'Constructor requires event_loop to be of '
f'asyncio.BaseEventLoop() class')
self.event_loop = event_loop
self.__mqtt_engine = MQTTEngine(
mqtt_configuration=mqtt_configuration,
event_loop=self.event_loop)
self.in_msg_q = self.__mqtt_engine.in_msg_q
self.recipient_map = {} # {taskman_node_id: deque()}
self.__running = False
except Exception:
raise (f'Problem in init : traceback = {traceback.print_exc()}')
async def run(self) -> None:
"""
Description:
starts the runner
"""
try:
# Launch the MQTT engine
self.__mqtt_engine.run()
self.__running = True
while self.__running:
# Get q'd message from engine
msg = await self.__mqtt_engine.in_msg_q.get()
# Get the proper Q for the target
q = deque(self.recipient_map[msg.dst_node_id])
q.extend(msg)
except:
pass
def register(
self,
node_id: UUID,
in_msg_q: asyncio.Queue) -> None:
"""
Description:
Used to register a new itemto the recipient map, so that inbound
messages are sent to the right queue for processing
Args:
node_id : id of the node that registers (uuid)
in_msg_q : queue that needs to be registered
"""
if not isinstance(node_id, UUID):
raise TypeError('node_id has to be of type UUID')
if not isinstance(in_msg_q, asyncio.Queue):
raise TypeError('in_msg_q has to be of type asyncio.Queue')
self.recipient_map[node_id] = in_msg_q
@staticmethod
def get_instance():
return Singleton._instances[__class__]
def graceful_shutdown(self) -> None:
"""
Description:
Used to gracefully shut down the message processor
"""
self.__running = False
| [
"robot.RoboLogger",
"traceback.print_exc",
"collections.deque",
"robot.MQTTEngine"
] | [((228, 240), 'robot.RoboLogger', 'RoboLogger', ([], {}), '()\n', (238, 240), False, 'from robot import RoboLogger\n'), ((1252, 1329), 'robot.MQTTEngine', 'MQTTEngine', ([], {'mqtt_configuration': 'mqtt_configuration', 'event_loop': 'self.event_loop'}), '(mqtt_configuration=mqtt_configuration, event_loop=self.event_loop)\n', (1262, 1329), False, 'from robot import MQTTEngine\n'), ((2065, 2107), 'collections.deque', 'deque', (['self.recipient_map[msg.dst_node_id]'], {}), '(self.recipient_map[msg.dst_node_id])\n', (2070, 2107), False, 'from collections import deque\n'), ((1598, 1619), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1617, 1619), False, 'import traceback\n')] |
import streamlit as st
import pandas as pd
import numpy as np
import json
import pandas as pd
from pathlib import Path
from datetime import datetime,timedelta
import matplotlib.pyplot as plt
from plotly_calplot import calplot
import plotly.express as px
from utils import (
load_config,
save_config,
pretty_print_json,
save_json_to_file,
polar_datetime_to_python_datetime_str,
xml_to_dict,
)
from accesslink import AccessLink
from datetime import datetime
from pathlib import Path
import os
CONFIG_FILENAME = "config.yml"
DATA_DIR = Path("./data")
class PolarData():
"""Example application for Polar Open AccessLink v3."""
def __init__(self):
self.config = load_config(CONFIG_FILENAME)
if "access_token" not in self.config:
print("Authorization is required. Run authorization.py first.")
return
self.accesslink = AccessLink(
client_id=self.config["client_id"],
client_secret=self.config["client_secret"],
)
self.running = True
self.check_available_data()
def get_user_information(self):
user_info = self.accesslink.users.get_information(
user_id=self.config["user_id"], access_token=self.config["access_token"]
)
pretty_print_json(user_info)
USER_DIR = DATA_DIR / "user_data"
if not USER_DIR.is_dir():
os.mkdir(USER_DIR)
save_json_to_file(
user_info,
USER_DIR / f'user_data_{datetime.today().strftime("%Y-%m-%d")}.json',
)
def check_available_data(self):
available_data = self.accesslink.pull_notifications.list()
if not available_data:
print("No new data available.")
return
for item in available_data["available-user-data"]:
if item["data-type"] == "EXERCISE":
self.get_exercises()
elif item["data-type"] == "ACTIVITY_SUMMARY":
self.get_daily_activity()
elif item["data-type"] == "PHYSICAL_INFORMATION":
self.get_physical_info()
def revoke_access_token(self):
self.accesslink.users.delete(
user_id=self.config["user_id"], access_token=self.config["access_token"]
)
del self.config["access_token"]
del self.config["user_id"]
save_config(self.config, CONFIG_FILENAME)
print("Access token was successfully revoked.")
def get_exercises(self):
transaction = self.accesslink.training_data.create_transaction(
user_id=self.config["user_id"], access_token=self.config["access_token"]
)
if not transaction:
print("No new exercises available.")
return
resource_urls = transaction.list_exercises()["exercises"]
EXERCISE_DIR = DATA_DIR / "exercise"
if not EXERCISE_DIR.is_dir():
os.mkdir(EXERCISE_DIR)
for url in resource_urls:
exercise_summary = transaction.get_exercise_summary(url)
gpx_data = transaction.get_gpx(url)
tcx_data = transaction.get_tcx(url)
hr_data = transaction.get_heart_rate_zones(url)
samples_data = transaction.get_available_samples(url)
sample_data = transaction.get_samples(url)
time = polar_datetime_to_python_datetime_str(
str(exercise_summary["start-time"])
)
save_json_to_file(
exercise_summary, EXERCISE_DIR / f"summary_data_{time}.json"
)
if (
gpx_data
): # not empty dict. If there is no data, this variable will have '{}' value
save_json_to_file(
xml_to_dict(gpx_data), EXERCISE_DIR / f"gpx_data_{time}.json"
)
if tcx_data:
save_json_to_file(
xml_to_dict(tcx_data), EXERCISE_DIR / f"tcx_data_{time}.json"
)
if hr_data:
save_json_to_file(hr_data, EXERCISE_DIR / f"hr_data_{time}.json")
if samples_data:
save_json_to_file(
samples_data, EXERCISE_DIR / f"samples_data_{time}.json"
)
if sample_data:
save_json_to_file(
sample_data, EXERCISE_DIR / f"sample_data_{time}.json"
)
transaction.commit()
def get_daily_activity(self):
transaction = self.accesslink.daily_activity.create_transaction(
user_id=self.config["user_id"], access_token=self.config["access_token"]
)
if not transaction:
print("No new daily activity available.")
return
resource_urls = transaction.list_activities()["activity-log"]
ACTIVITY_DIR = DATA_DIR / "activity"
if not ACTIVITY_DIR.is_dir():
os.mkdir(ACTIVITY_DIR)
for url in resource_urls:
activity_summary = transaction.get_activity_summary(url)
save_json_to_file(
activity_summary,
ACTIVITY_DIR
/ f'daily_activity_data_{str(activity_summary["date"])}.json',
)
transaction.commit()
def get_physical_info(self):
transaction = self.accesslink.physical_info.create_transaction(
user_id=self.config["user_id"], access_token=self.config["access_token"]
)
if not transaction:
print("No new physical information available.")
return
PHYSICAL_DIR = DATA_DIR / "physical"
resource_urls = transaction.list_physical_infos()["physical-informations"]
if not PHYSICAL_DIR.is_dir():
os.mkdir(PHYSICAL_DIR)
try:
for url in resource_urls:
physical_info = transaction.get_physical_info(url)
time = polar_datetime_to_python_datetime_str(
str(physical_info["created"])
)
save_json_to_file(
physical_info, PHYSICAL_DIR / f"physical_data{time}.json"
)
transaction.commit()
except FileNotFoundError:
print("Missing directory")
def polar_datetime_to_python_datetime_str(polar_dt):
new_dt = polar_dt.replace("T", " ")
date_time_obj = datetime.strptime(new_dt, "%Y-%m-%d %H:%M:%S.%f")
return date_time_obj.strftime("%Y-%m-%d+%H_%M_%S_%f")
def polar_time_conversion(polar_t):
return timedelta(seconds=int(float(polar_t.replace("PT", "").replace("S", "")))) /timedelta(minutes=1)
def load_data():
DIR = Path(r"data/user_data")
files = sorted(DIR.glob("training-session*.json"))
dfs = list()
for file in files:
with open(file, "r") as f:
data = json.load(f)
if "kiloCalories" in data.keys():
dfs.append(
pd.DataFrame(
[
[
data["kiloCalories"],
polar_datetime_to_python_datetime_str(
data["exercises"][0]["startTime"]
).split("+")[0],
data["exercises"][0]["sport"],
polar_time_conversion(data["exercises"][0]["duration"]),
]
],
columns=["Calories", "Date", "Sport", "Duration"],
)
)
df = pd.concat(dfs)
df["Date"] = pd.to_datetime(df["Date"])
return df
st.set_page_config(layout="wide")
col1, col2 = st.columns([4, 1])
data = load_data()
option =st.sidebar.selectbox('Year',data.Date.dt.year.unique(),index=len(data.Date.dt.year.unique())-1)
total_daily_calories = (
data.loc[data.Date.dt.year == option].groupby("Date").sum().reset_index()
)
fig = calplot(
total_daily_calories,
x="Date",
y="Calories",
name="Calories",
colorscale="purples",
month_lines_width=2,
month_lines_color="#d9d9d9",
)
fig.update_layout(height=250)
with col1:
st.title("Calories burned per day")
st.plotly_chart(fig,use_container_width=True)
st.title("Weekly summaries")
st.header('Training time')
data['Week'] = data['Date'].dt.isocalendar().week
week_data=data.loc[data.Date.dt.year == option].groupby(['Week']).sum().reset_index()
total_daily_training=data.loc[data.Date.dt.year == option].groupby('Date').sum().reset_index()
fig2=px.bar(week_data,x='Week',y='Duration')
fig2.update_layout(height=250)
st.plotly_chart(fig2,use_container_width=True)
st.header('Total calories')
fig3=px.bar(week_data,x='Week',y='Calories')
fig3.update_layout(height=250)
st.plotly_chart(fig3,use_container_width=True)
with col2:
st.header('YTD Stats')
#YTD Stats
ytd_training_time = pd.to_timedelta(data.loc[data.Date.dt.year == option,'Duration'].sum(),unit='m')
seconds = ytd_training_time.seconds
hours = seconds//3600
minutes = (seconds//60)%60
delta_training_time = data.loc[data.Date.dt.year == option,'Duration'].iloc[-1]
st.metric(label='Training time',value=f'{hours} hrs {minutes} mins',delta=f'{delta_training_time:.0f} mins')
ytd_calories = data.loc[data.Date.dt.year == option,'Calories'].sum()
delta_calories = int(data.loc[data.Date.dt.year == option,'Calories'].iloc[-1])
st.metric(label="Calories burned", value=ytd_calories, delta=delta_calories)
ytd_sessions = data.loc[data.Date.dt.year == option,'Duration'].count()
st.metric(label="Sessions", value=ytd_sessions)
st.subheader('Sports')
ytd_by_sport = data.loc[data.Date.dt.year == option].groupby(['Sport']).sum().reset_index()
fig4=px.pie(ytd_by_sport,values='Duration',names='Sport')
fig4.update_layout(margin=dict(l=0, r=0, t=0, b=0),height=600)
fig4.update_layout(legend=dict(
orientation="h",
yanchor="bottom",
y=0.9,
xanchor="right",
x=1
))
st.plotly_chart(fig4,use_container_width=True) | [
"utils.polar_datetime_to_python_datetime_str",
"streamlit.metric",
"datetime.datetime.today",
"datetime.timedelta",
"utils.pretty_print_json",
"streamlit.header",
"pandas.to_datetime",
"accesslink.AccessLink",
"plotly.express.pie",
"utils.save_config",
"streamlit.title",
"utils.save_json_to_fi... | [((560, 574), 'pathlib.Path', 'Path', (['"""./data"""'], {}), "('./data')\n", (564, 574), False, 'from pathlib import Path\n'), ((7657, 7690), 'streamlit.set_page_config', 'st.set_page_config', ([], {'layout': '"""wide"""'}), "(layout='wide')\n", (7675, 7690), True, 'import streamlit as st\n'), ((7704, 7722), 'streamlit.columns', 'st.columns', (['[4, 1]'], {}), '([4, 1])\n', (7714, 7722), True, 'import streamlit as st\n'), ((7959, 8105), 'plotly_calplot.calplot', 'calplot', (['total_daily_calories'], {'x': '"""Date"""', 'y': '"""Calories"""', 'name': '"""Calories"""', 'colorscale': '"""purples"""', 'month_lines_width': '(2)', 'month_lines_color': '"""#d9d9d9"""'}), "(total_daily_calories, x='Date', y='Calories', name='Calories',\n colorscale='purples', month_lines_width=2, month_lines_color='#d9d9d9')\n", (7966, 8105), False, 'from plotly_calplot import calplot\n'), ((6374, 6423), 'datetime.datetime.strptime', 'datetime.strptime', (['new_dt', '"""%Y-%m-%d %H:%M:%S.%f"""'], {}), "(new_dt, '%Y-%m-%d %H:%M:%S.%f')\n", (6391, 6423), False, 'from datetime import datetime\n'), ((6654, 6676), 'pathlib.Path', 'Path', (['"""data/user_data"""'], {}), "('data/user_data')\n", (6658, 6676), False, 'from pathlib import Path\n'), ((7582, 7596), 'pandas.concat', 'pd.concat', (['dfs'], {}), '(dfs)\n', (7591, 7596), True, 'import pandas as pd\n'), ((7614, 7640), 'pandas.to_datetime', 'pd.to_datetime', (["df['Date']"], {}), "(df['Date'])\n", (7628, 7640), True, 'import pandas as pd\n'), ((8178, 8213), 'streamlit.title', 'st.title', (['"""Calories burned per day"""'], {}), "('Calories burned per day')\n", (8186, 8213), True, 'import streamlit as st\n'), ((8218, 8264), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig'], {'use_container_width': '(True)'}), '(fig, use_container_width=True)\n', (8233, 8264), True, 'import streamlit as st\n'), ((8268, 8296), 'streamlit.title', 'st.title', (['"""Weekly summaries"""'], {}), "('Weekly summaries')\n", (8276, 8296), True, 'import streamlit as st\n'), ((8301, 8327), 'streamlit.header', 'st.header', (['"""Training time"""'], {}), "('Training time')\n", (8310, 8327), True, 'import streamlit as st\n'), ((8580, 8621), 'plotly.express.bar', 'px.bar', (['week_data'], {'x': '"""Week"""', 'y': '"""Duration"""'}), "(week_data, x='Week', y='Duration')\n", (8586, 8621), True, 'import plotly.express as px\n'), ((8659, 8706), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig2'], {'use_container_width': '(True)'}), '(fig2, use_container_width=True)\n', (8674, 8706), True, 'import streamlit as st\n'), ((8710, 8737), 'streamlit.header', 'st.header', (['"""Total calories"""'], {}), "('Total calories')\n", (8719, 8737), True, 'import streamlit as st\n'), ((8747, 8788), 'plotly.express.bar', 'px.bar', (['week_data'], {'x': '"""Week"""', 'y': '"""Calories"""'}), "(week_data, x='Week', y='Calories')\n", (8753, 8788), True, 'import plotly.express as px\n'), ((8826, 8873), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig3'], {'use_container_width': '(True)'}), '(fig3, use_container_width=True)\n', (8841, 8873), True, 'import streamlit as st\n'), ((8888, 8910), 'streamlit.header', 'st.header', (['"""YTD Stats"""'], {}), "('YTD Stats')\n", (8897, 8910), True, 'import streamlit as st\n'), ((9218, 9333), 'streamlit.metric', 'st.metric', ([], {'label': '"""Training time"""', 'value': 'f"""{hours} hrs {minutes} mins"""', 'delta': 'f"""{delta_training_time:.0f} mins"""'}), "(label='Training time', value=f'{hours} hrs {minutes} mins', delta\n =f'{delta_training_time:.0f} mins')\n", (9227, 9333), True, 'import streamlit as st\n'), ((9489, 9565), 'streamlit.metric', 'st.metric', ([], {'label': '"""Calories burned"""', 'value': 'ytd_calories', 'delta': 'delta_calories'}), "(label='Calories burned', value=ytd_calories, delta=delta_calories)\n", (9498, 9565), True, 'import streamlit as st\n'), ((9646, 9693), 'streamlit.metric', 'st.metric', ([], {'label': '"""Sessions"""', 'value': 'ytd_sessions'}), "(label='Sessions', value=ytd_sessions)\n", (9655, 9693), True, 'import streamlit as st\n'), ((9698, 9720), 'streamlit.subheader', 'st.subheader', (['"""Sports"""'], {}), "('Sports')\n", (9710, 9720), True, 'import streamlit as st\n'), ((9826, 9880), 'plotly.express.pie', 'px.pie', (['ytd_by_sport'], {'values': '"""Duration"""', 'names': '"""Sport"""'}), "(ytd_by_sport, values='Duration', names='Sport')\n", (9832, 9880), True, 'import plotly.express as px\n'), ((10072, 10119), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig4'], {'use_container_width': '(True)'}), '(fig4, use_container_width=True)\n', (10087, 10119), True, 'import streamlit as st\n'), ((701, 729), 'utils.load_config', 'load_config', (['CONFIG_FILENAME'], {}), '(CONFIG_FILENAME)\n', (712, 729), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n'), ((899, 994), 'accesslink.AccessLink', 'AccessLink', ([], {'client_id': "self.config['client_id']", 'client_secret': "self.config['client_secret']"}), "(client_id=self.config['client_id'], client_secret=self.config[\n 'client_secret'])\n", (909, 994), False, 'from accesslink import AccessLink\n'), ((1291, 1319), 'utils.pretty_print_json', 'pretty_print_json', (['user_info'], {}), '(user_info)\n', (1308, 1319), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n'), ((2369, 2410), 'utils.save_config', 'save_config', (['self.config', 'CONFIG_FILENAME'], {}), '(self.config, CONFIG_FILENAME)\n', (2380, 2410), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n'), ((6605, 6625), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (6614, 6625), False, 'from datetime import datetime, timedelta\n'), ((1408, 1426), 'os.mkdir', 'os.mkdir', (['USER_DIR'], {}), '(USER_DIR)\n', (1416, 1426), False, 'import os\n'), ((2924, 2946), 'os.mkdir', 'os.mkdir', (['EXERCISE_DIR'], {}), '(EXERCISE_DIR)\n', (2932, 2946), False, 'import os\n'), ((3464, 3543), 'utils.save_json_to_file', 'save_json_to_file', (['exercise_summary', "(EXERCISE_DIR / f'summary_data_{time}.json')"], {}), "(exercise_summary, EXERCISE_DIR / f'summary_data_{time}.json')\n", (3481, 3543), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n'), ((4922, 4944), 'os.mkdir', 'os.mkdir', (['ACTIVITY_DIR'], {}), '(ACTIVITY_DIR)\n', (4930, 4944), False, 'import os\n'), ((5751, 5773), 'os.mkdir', 'os.mkdir', (['PHYSICAL_DIR'], {}), '(PHYSICAL_DIR)\n', (5759, 5773), False, 'import os\n'), ((6827, 6839), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6836, 6839), False, 'import json\n'), ((4041, 4106), 'utils.save_json_to_file', 'save_json_to_file', (['hr_data', "(EXERCISE_DIR / f'hr_data_{time}.json')"], {}), "(hr_data, EXERCISE_DIR / f'hr_data_{time}.json')\n", (4058, 4106), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n'), ((4152, 4227), 'utils.save_json_to_file', 'save_json_to_file', (['samples_data', "(EXERCISE_DIR / f'samples_data_{time}.json')"], {}), "(samples_data, EXERCISE_DIR / f'samples_data_{time}.json')\n", (4169, 4227), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n'), ((4310, 4383), 'utils.save_json_to_file', 'save_json_to_file', (['sample_data', "(EXERCISE_DIR / f'sample_data_{time}.json')"], {}), "(sample_data, EXERCISE_DIR / f'sample_data_{time}.json')\n", (4327, 4383), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n'), ((6039, 6115), 'utils.save_json_to_file', 'save_json_to_file', (['physical_info', "(PHYSICAL_DIR / f'physical_data{time}.json')"], {}), "(physical_info, PHYSICAL_DIR / f'physical_data{time}.json')\n", (6056, 6115), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n'), ((3761, 3782), 'utils.xml_to_dict', 'xml_to_dict', (['gpx_data'], {}), '(gpx_data)\n', (3772, 3782), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n'), ((3921, 3942), 'utils.xml_to_dict', 'xml_to_dict', (['tcx_data'], {}), '(tcx_data)\n', (3932, 3942), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n'), ((1513, 1529), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1527, 1529), False, 'from datetime import datetime\n'), ((7090, 7162), 'utils.polar_datetime_to_python_datetime_str', 'polar_datetime_to_python_datetime_str', (["data['exercises'][0]['startTime']"], {}), "(data['exercises'][0]['startTime'])\n", (7127, 7162), False, 'from utils import load_config, save_config, pretty_print_json, save_json_to_file, polar_datetime_to_python_datetime_str, xml_to_dict\n')] |
# -*- coding: utf-8 -*-
# Copyright 2019 Open End AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bson, email, py, os, time, uuid
from dateutil.relativedelta import relativedelta
from datetime import datetime, date, timedelta
from decimal import Decimal
try:
from itertools import izip_longest #py2
except ImportError:
from itertools import zip_longest as izip_longest #py3
from pytransact import commit, contextbroker, exceptions, mongo, queryops
from pytransact.exceptions import ClientError
from pytransact.testsupport import BLMTests, Time
from pytransact.object import model
import pytransact.runtime as ri
from accounting import config, mail, sie_import
import blm
import OpenSSL.crypto
from accounting import luhn
import copy
import pytest
import random
import string
import codecs
from accounting import bankgiro, plusgiro
from accounting.test.blmsupport import PermissionTests
import sys
if sys.version_info <(3,0) :
PYT3 = False #py2
py23txtu = lambda t: unicode(t)
py23txtuc = lambda t, c: unicode(t,c)
else: #py3
PYT3 =True
py23txtu = lambda t: t
py23txtuc = lambda t, c: str(t,c)
class TestRoles(BLMTests):
roles = [ 'admins', 'accountants', 'storekeepers', 'ticketcheckers', 'members',
'invoicesenders' ]
def test_user(self):
org = blm.accounting.Org()
user = blm.accounting.User()
assert not blm.accounting.currentUserHasRole(org, *self.roles, user=user)
org.ug[0].users = [user]
assert blm.accounting.currentUserHasRole(org, *self.roles, user=user)
def test_apiuser(self):
org = blm.accounting.Org()
apiu = blm.accounting.APIUser()
roles = ['admins', 'accountants', 'ticketcheckers', 'members', ]
assert not blm.accounting.currentUserHasRole(org, *roles, user=apiu)
org.ug[0].users = [apiu]
assert not blm.accounting.currentUserHasRole(org, *roles, user=apiu)
for role in ['invoicesenders', 'storekeepers']:
assert blm.accounting.currentUserHasRole(org, role, user=apiu)
apiu.roles = ['admins']
assert blm.accounting.currentUserHasRole(org, 'admins', user=apiu)
org2 = blm.accounting.Org()
assert not blm.accounting.currentUserHasRole(org2, 'invoicesenders', user=apiu)
class TestClient(BLMTests):
def test_create(self):
client = blm.accounting.Client(name=['foo'])
def test_relation(self):
client = blm.accounting.Client()
ug = blm.accounting.UG(users=[client])
def test_stop_privilege_escalation(self):
client = blm.accounting.Client()
self.commit()
self.ctx.setUser(client)
py.test.raises(exceptions.ClientError, client, super=[True])
class TestUser(BLMTests):
def test_create(self):
user = blm.accounting.User(name=['foo'], emailAddress=['<EMAIL>'])
def test_relation(self):
user = blm.accounting.User()
ug = blm.accounting.UG(users=[user])
def test_stop_privilege_escalation(self):
user = blm.accounting.User()
self.commit()
self.ctx.setUser(user)
py.test.raises(exceptions.ClientError, user, super=[True])
def test_openid_permissions(self):
user1 = blm.accounting.User()
self.commit()
user1 = blm.accounting.User._query(id=user1).run()[0]
self.ctx.setUser(user1)
user1(openid=['foo'])
assert user1.openid == ['foo']
user2 = blm.accounting.User()
self.commit()
self.ctx.setUser(user2)
py.test.raises(ClientError, user1, openid=['bar'])
class TestAPIUser(BLMTests):
def test_create(self):
u = blm.accounting.APIUser()
assert uuid.UUID(u.key[0])
def test_newkey(self):
u = blm.accounting.APIUser()
key = u.key[0]
u.newkey()
assert u.key[0] != key
def test_createAPIUser(self):
org = blm.accounting.Org()
key = blm.accounting.createAPIUser(org=[org])
assert org.apikey == key
with py.test.raises(exceptions.ClientError):
# max one api user per org for now
blm.accounting.createAPIUser(org=[org])
class TestInvitation(BLMTests):
def test_create(self, monkeypatch):
monkeypatch.setattr(mail, 'sendmail', lambda *a, **kw: None)
org = blm.accounting.Org(name=['foo inc.'], orgnum=['123456-7890'])
invite = blm.accounting.Invitation(org=[org], emailTo=['<EMAIL>'])
assert invite.emailTo == ['<EMAIL>']
assert invite.inviteCode[0]
assert invite.org == [org]
assert invite.accepted == [False]
assert invite.groups
def test_invite(self, monkeypatch):
user = blm.accounting.User(name=[u'<NAME>'], emailAddress=['<EMAIL>'])
config.config.set('accounting', 'smtp_domain', 'test')
calls = []
def makemail(body, **headers):
calls.append((body, headers))
return 1, 2, 3
def sendmail(*args, **kw):
assert args == (1, 2, 3)
assert kw['identity'] == str(user.id[0])
calls.append('sendmail')
monkeypatch.setattr(mail, 'makemail', makemail)
monkeypatch.setattr(mail, 'sendmail', sendmail)
self.ctx.setUser(user)
org = blm.accounting.Org(name=[u'Räksmörgåsar AB'], orgnum=['123456-7890'])
result = org.invite(['<EMAIL>'], ['storekeeper'])
invite = blm.accounting.Invitation._query(org=org).run()
assert result == invite
assert len(invite) == 1
assert invite[0].groups == ['storekeepers']
(body, headers), sendmail = calls
assert sendmail == 'sendmail'
assert invite[0].inviteCode[0] in body
assert headers['envfrom'] == str(org.id[0])
if PYT3:
# email.formataddr will convert (in blm.accounting.Invitation) to string values
# suitable for RFC 2822 headers.
# Thus we need to check against those strings.
assert headers['Reply-to'] == '=?utf-8?b?R8O2c3RhIELDtnNzYQ==?= <<EMAIL>>'
assert headers['From'] == '=?utf-8?b?R8O2c3RhIELDtnNzYQ==?= <noreply@test>'
else:
assert headers['Reply-to'] == u'Gösta Bössa <<EMAIL>>'
assert headers['From'] == u'Gösta Bössa <noreply@test>'
assert u'Räksmörgåsar AB' in body
def test_accept(self, monkeypatch):
monkeypatch.setattr(mail, 'sendmail', lambda *a, **kw: None)
org = blm.accounting.Org(name=['ACME'])
inv1 = blm.accounting.Invitation(org=[org], emailTo=['<EMAIL>'],
groups=['admins'])
inv2 = blm.accounting.Invitation(org=[org], emailTo=['<EMAIL>'],
groups=['admins'])
user1 = blm.accounting.User()
for inv in inv1, inv2:
for _ in range(2): # reentrant when called with same user
inv.accept([user1])
assert user1 in org.members
assert user1 in org.admins
assert inv.accepted == [True]
assert inv.acceptedBy == [user1]
assert org.ug[0] in user1.allowRead
# When accepting multiple invitations to the same Org,
# do not add org to user's UG more than once
assert user1.ugs[:].count(org.ug[0]) == 1
assert user1.allowRead[:].count(org.ug[0]) == 1
# can't use invitation by other user, though
user2 = blm.accounting.User()
py.test.raises(exceptions.ClientError, inv.accept, [user2])
class TestPaymentProvider(BLMTests):
def test_series_sanitation(self):
org = blm.accounting.Org()
ppd = blm.accounting.PaymentProvider(org=org, series=[''])
assert ppd.series == []
ppd = blm.accounting.PaymentProvider(org=org, series=['P'])
assert ppd.series == ['P']
ppd(series=[''])
assert ppd.series == []
def test_delete(self):
import members
import blm.members
org = blm.accounting.Org()
ppd1 = blm.accounting.PaymentProvider(org=org)
ppd2 = blm.accounting.PaymentProvider(org=org)
payment1 = blm.members.Payment(paymentProvider=ppd1)
payment2 = blm.members.Payment(paymentProvider=ppd2)
self.commit()
ppd1, = blm.accounting.PaymentProvider._query(id=ppd1.id).run()
ppd1._delete()
self.commit()
payments = blm.members.Payment._query().run()
payments.sort(key=lambda toi: toi.id)
assert payments[1].paymentProvider == [ppd2]
assert payments[0].paymentProvider == []
class TestPlusgiroProvider(BLMTests):
def test_require_subscriber(self):
org = blm.accounting.Org()
py.test.raises(ClientError, blm.accounting.PlusgiroProvider, org=org)
def test_normalize_pgnum(self):
org = blm.accounting.Org(subscriptionLevel=['subscriber'])
ppd = blm.accounting.PlusgiroProvider(org=org, pgnum=['1234566'])
assert ppd.pgnum == ['1234566']
ppd(pgnum=['2345676'])
assert ppd.pgnum == ['2345676']
ppd = blm.accounting.PlusgiroProvider(org=org, pgnum=['123456-6'])
assert ppd.pgnum == ['1234566']
ppd(pgnum=['234567-6'])
assert ppd.pgnum == ['2345676']
ppd = blm.accounting.PlusgiroProvider(org=org, pgnum=['12 34 56 - 6'])
assert ppd.pgnum == ['1234566']
class TestBankgiroProvider(BLMTests):
def test_require_subscriber(self):
org = blm.accounting.Org()
py.test.raises(ClientError, blm.accounting.BankgiroProvider, org=org)
def test_normalize_bgnum(self):
org = blm.accounting.Org(subscriptionLevel=['subscriber'])
ppd = blm.accounting.BankgiroProvider(org=org, bgnum=['1234566'])
assert ppd.bgnum == ['1234566']
ppd(bgnum=['2345676'])
assert ppd.bgnum == ['2345676']
ppd = blm.accounting.BankgiroProvider(org=org, bgnum=['123-4566'])
assert ppd.bgnum == ['1234566']
ppd(bgnum=['234-5676'])
assert ppd.bgnum == ['2345676']
ppd = blm.accounting.BankgiroProvider(org=org, bgnum=['123 - 4566'])
assert ppd.bgnum == ['1234566']
class TestPaysonProvider(BLMTests):
def test_create(self):
org = blm.accounting.Org(subscriptionLevel=['subscriber'])
blm.accounting.PaysonProvider(org=org, apiUserId=['foo'], apiPassword=['<PASSWORD>'], receiverEmail=['baz'])
def test_require_subscriber(self):
org = blm.accounting.Org()
py.test.raises(ClientError, blm.accounting.PaysonProvider, org=org, apiUserId=['foo'], apiPassword=['<PASSWORD>'], receiverEmail=['baz'])
class TestSeqrProvider(BLMTests):
def test_create(self):
org = blm.accounting.Org(subscriptionLevel=['subscriber'])
blm.accounting.SeqrProvider(org=org, principalId=['foo'], password=['<PASSWORD>'])
def test_require_subscriber(self):
org = blm.accounting.Org()
py.test.raises(ClientError, blm.accounting.SeqrProvider,
org=org, principalId=['foo'], password=['<PASSWORD>'])
class TestStripeProvider(BLMTests):
def test_create(self):
org = blm.accounting.Org(subscriptionLevel=['subscriber'])
blm.accounting.StripeProvider(org=org, access_token=['stripe'])
def test_require_subscriber(self):
org = blm.accounting.Org()
py.test.raises(ClientError, blm.accounting.StripeProvider,
org=org, access_token=['foo'])
class TestSwishProvider(BLMTests):
certs = os.path.join(
os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'test')
cert = os.path.join(certs, 'swish.crt.pem')
pkey = os.path.join(certs, 'swish.key.pem')
def setup_method(self, method):
super(TestSwishProvider, self).setup_method(method)
self.org = blm.accounting.Org(subscriptionLevel=['subscriber'],
name=[u'<NAME>'],
email=[u'<EMAIL>'],
orgnum=['1234567890'])
def test_create(self):
with open(self.cert) as f:
cert = f.read()
with open(self.pkey) as f:
pkey = f.read()
provider = blm.accounting.SwishProvider(org=self.org,
swish_id=self.org.orgnum,
cert=cert, pkey=pkey)
assert provider.cert[0] == cert
assert provider.pkey[0] == pkey
def test_normalize_id(self):
provider = blm.accounting.SwishProvider(org=self.org,
swish_id='123 339 93 26')
assert provider.swish_id == ['1233399326']
def test_create_with_automatic_pkey_generation(self):
provider = blm.accounting.SwishProvider(org=self.org,
swish_id=self.org.orgnum)
assert OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM,
provider.pkey[0])
def test_use_test_pkey_with_test_id(self):
provider = blm.accounting.SwishProvider(org=self.org,
swish_id='1231181189')
key = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM,
provider.pkey[0])
pem = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,
key)
with open(self.pkey) as f:
test_key = OpenSSL.crypto.load_privatekey(
OpenSSL.crypto.FILETYPE_PEM, f.read())
test_pem = OpenSSL.crypto.dump_privatekey(
OpenSSL.crypto.FILETYPE_PEM, test_key)
assert pem == test_pem
def test_cert_sanity_checking(self):
provider = blm.accounting.SwishProvider(org=self.org,
swish_id='1231181189')
with open(self.cert) as f:
provider(cert=[f.read()]) # don't explode
py.test.raises(ClientError, provider, cert=['not a valid certificate'])
def test_csr(self):
provider = blm.accounting.SwishProvider(org=self.org,
swish_id=self.org.orgnum)
csr = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_PEM, provider.csr[0])
assert csr.get_subject().organizationName == u'Räksmörgåsar AB'
assert csr.get_subject().emailAddress == u'<EMAIL>'
pkey = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM,
provider.pkey[0])
if hasattr(OpenSSL.crypto, 'dump_publickey'):
pems = [OpenSSL.crypto.dump_publickey(OpenSSL.crypto.FILETYPE_PEM, k)
for k in [csr.get_pubkey(), pkey]]
assert pems[0] == pems[1]
def test_require_subscriber(self):
org = blm.accounting.Org()
py.test.raises(ClientError, blm.accounting.SwishProvider,
org=org, swish_id=org.orgnum)
def test_not_after(self):
with open(self.cert) as f:
provider = blm.accounting.SwishProvider(
org=self.org, swish_id='1231181189', cert=[f.read()])
assert provider.cert_expires == [1589438114]
class TestOrg(BLMTests):
def test_create(self):
user = blm.accounting.User()
self.ctx.setUser(user)
org = blm.accounting.Org(name=['foo inc.'], orgnum=['1234567890'])
ug, = org.ug
assert ug.users == [user]
assert org.members == [user]
assert org.orgnum == ['123456-7890']
def test_members(self):
client = blm.accounting.Client()
apiuser = blm.accounting.APIUser()
user = blm.accounting.User()
org = blm.accounting.Org(name=['foo inc.'], orgnum=['1234567890'])
org.ug[0].users = [client, user, apiuser]
assert org.members == [user]
def test_permissions(self):
user = blm.accounting.User(name=['u1'])
user2 = blm.accounting.User(name=['u2'])
org = blm.accounting.Org()
org.ug[0](users = [user, user2])
assert org.permissions == []
with ri.setuid(user):
assert org.permissions == ['members']
org.admins = [user]
assert org.permissions == []
userlist = [{
'id': str(user.id[0]),
'name': 'u1',
'ugs': [str(ug.id[0]) for ug in user.ugs],
'roles': ['admin', 'member']
}, {
'id': str(user2.id[0]),
'name': 'u2',
'ugs': [str(ug.id[0]) for ug in user2.ugs],
'roles': ['member']
}]
for i in org.userpermissions:
assert i in userlist
with ri.setuid(user):
assert set(org.permissions) == {'admins', 'members'}
def test_updateMemberRoles(self):
org = blm.accounting.Org()
admin = blm.accounting.User(name=['admin'], ugs=org.ug)
user1 = blm.accounting.User(name=['u1'], ugs=org.ug)
user2 = blm.accounting.User(name=['u2'], ugs=org.ug)
org.admins = [admin]
self.commit()
org, = blm.accounting.Org._query().run()
assert set(org.members) == {admin, user1, user2} # sanity
with ri.setuid(admin):
blm.accounting.updateMemberRoles(
org=[org],
roleData=[{'id': user1.id[0], 'roles': ['admin', 'accountant',
'payer',
'storekeeper',
'ticketchecker']},
{'id': user2.id[0], 'roles': ['member']}])
self.commit()
assert set(org.admins) == {admin, user1}
assert set(org.members) == {admin, user1, user2}
assert org.payers == [user1]
assert org.accountants == [user1]
assert org.storekeepers == [user1]
assert org.ticketcheckers == [user1]
assert set(org.members) == {admin, user1, user2}
assert org.ticketchecker_ug[0].users == [user1]
with ri.setuid(admin):
py.test.raises(exceptions.ClientError, blm.accounting.updateMemberRoles,
org=[org],
roleData=[{'id': user1.id[0], 'roles': ['nosuchrole']}])
with ri.setuid(user2):
# only admins can change roles
py.test.raises(exceptions.ClientError, blm.accounting.updateMemberRoles,
org=[org],
roleData=[{'id': user2.id[0], 'roles': ['admin']}])
blm.accounting.updateMemberRoles(
org=[org],
roleData=[{'id': user1.id[0], 'roles': ['ticketchecker']}])
assert user1 not in org.members
assert user1 not in org.ug[0].users
with py.test.raises(exceptions.ClientError):
# must have at least one role
blm.accounting.updateMemberRoles(
org=[org],
roleData=[{'id': user1.id[0], 'roles': []}])
blm.accounting.updateMemberRoles(
org=[org],
roleData=[{'id': user1.id[0], 'roles': ['accountant']}])
assert user1 in org.members
with ri.setuid(admin):
with py.test.raises(exceptions.ClientError):
# at least one admin is required in an org
blm.accounting.updateMemberRoles(
org=[org],
roleData=[{'id': admin.id[0], 'roles': ['member']}])
def test_openend_ab_is_unique(self):
blm.accounting.Org(name=['Open End'], orgnum=['556609-2473'])
py.test.raises(exceptions.ClientError, blm.accounting.Org, name=['<NAME>.'], orgnum=['556609-2473'])
org2 = blm.accounting.Org(name=['<NAME>.'], orgnum=['111111-1111'])
py.test.raises(exceptions.ClientError, org2, orgnum=['556609-2473'])
def test_datapropagation(self):
org = blm.accounting.Org()
acc_prev = blm.accounting.Accounting(org=[org], start=['2010-01-01'])
acc_curr = blm.accounting.Accounting(org=[org], start=['2011-01-01'])
assert org.current_accounting == [acc_curr]
org(name=['<NAME>.'])
assert org.name == ['<NAME>.'] # don't forget to invoke ._update()
assert acc_curr.orgname == ['<NAME>.']
assert acc_prev.orgname == []
org(orgnum=['123456-7890'])
assert org.orgnum == ['123456-7890']
assert acc_curr.orgnum == ['123456-7890']
assert acc_prev.orgnum == []
def test_subscribe(self):
org = blm.accounting.Org()
acc = blm.accounting.Accounting(org=org)
blm.accounting.subscribe([org], ['subscriber'])
assert org.subscriptionLevel == ['subscriber']
# 'pg' level is deprecated
#blm.accounting.subscribe([org], ['pg'])
#assert org.subscriptionLevel == ['subscriber', 'pg']
def test_get_ocr(self, monkeypatch):
year = '2013'
def strftime(s):
assert s == '%Y'
return year
monkeypatch.setattr(time, 'strftime', strftime)
org = blm.accounting.Org()
assert org.ocrYearReset == [3]
assert org.ocrCounter == [1]
ocr, = org.get_ocr()
assert ocr == '10355'
assert org.ocrCounter == [2]
ocr, = org.get_ocr()
assert ocr == '11353'
year = '2014'
ocr, = org.get_ocr()
assert ocr == '10454'
assert org.ocrYearReset == [4]
assert org.ocrCounter == [2]
def test_get_ocr_rapidly(self):
org = blm.accounting.Org()
self.commit()
pids = []
parallel = 20
for i in range(parallel):
pid = os.fork()
if not pid:
try:
self.pushnewctx()
op = commit.CallToi(org.id[0], 'get_ocr', [])
interested = str(i)
self.ctx.runCommit([op], interested=interested)
result, error = commit.wait_for_commit(self.database,
interested)
print(result)
finally:
os._exit(0)
else:
pids.append(pid)
for pid in pids:
os.waitpid(pid, 0)
self.sync()
org._clear()
assert org.ocrCounter == [1 + parallel]
def test_removeMembers(self):
org = blm.accounting.Org(name=['foo inc.'], orgnum=['1234567890'])
user1 = blm.accounting.User(name=['user1'], ugs=org.ug)
user2 = blm.accounting.User(name=['user2'], ugs=org.ug)
org.admins = [user1, user2]
self.commit()
hacker = blm.accounting.User(name=['hacker'])
self.ctx.setUser(hacker)
# can't modify orgs we're not a member of
py.test.raises(ClientError, blm.accounting.removeMembers, [org], [user1])
self.ctx.setUser(user2)
org, = blm.accounting.Org._query(id=org.id[0]).run()
user1, = blm.accounting.User._query(name='user1').run()
user2, = blm.accounting.User._query(name='user2').run()
result = blm.accounting.removeMembers([org], [user1])
assert len(result)
assert result == org.userpermissions
assert org.ug[0].users == [user2]
assert user1 not in org.admins
# can't remove the last member
py.test.raises(ClientError, blm.accounting.removeMembers, [org], [user2])
user3 = blm.accounting.User(name=['user3'], ugs=org.ug)
org.admins.append(user3)
result = blm.accounting.removeMembers([org], [user2])
assert result == [] # Empty when removing self
assert len(org.userpermissions)
def test_manual_payment_provider(self):
org = blm.accounting.Org()
mpp = org.get_manual_payment_provider()
assert [mpp] == org.manual_payment_provider
mpp2 = org.get_manual_payment_provider()
assert [mpp2] == org.manual_payment_provider
assert mpp is mpp2
class TestOrgRemoval(BLMTests):
day = 3600 * 24
week = day * 7
def setup_method(self, method):
super(TestOrgRemoval, self).setup_method(method)
self.org = blm.accounting.Org(created=0)
def test_expireTrialOrgs(self):
with Time(0) as time:
time += self.day * 100
blm.accounting.expireTrialOrg([self.org])
with Time(0) as first_warning:
first_warning += self.org.TRIAL_PERIOD - (
self.org.TRIAL_WARNING_INTERVAL *
self.org.TRIAL_WARNING_COUNT) + 1
blm.accounting.expireTrialOrg([self.org])
assert self.org.removalWarnings == [first_warning]
with Time(0) as second_warning:
second_warning += self.org.TRIAL_PERIOD - 1
blm.accounting.expireTrialOrg([self.org])
assert self.org.removalWarnings == [first_warning, second_warning]
with Time(0) as time:
time += self.org.TRIAL_PERIOD + self.day * 6
blm.accounting.expireTrialOrg([self.org])
assert self.org.removalWarnings == [first_warning, second_warning]
with Time(0) as time:
time += self.org.TRIAL_PERIOD + self.day * 7 + 1
blm.accounting.expireTrialOrg([self.org])
assert self.org._deleted
def test_disable_org(self):
self.org.subscriptionLevel = ['subscriber']
user = blm.accounting.User(name=['u1'], ugs=self.org.ug)
self.commit()
org, = blm.accounting.Org._query().run()
assert org.subscriptionLevel == ['subscriber'] # sanity
assert len(org.ug[0].users) == 1 # sanity
org.disable()
assert org.subscriptionLevel == []
assert len(org.ug[0].users) == 0
class TestPGOrder(BLMTests):
def test_create(self):
user = blm.accounting.User()
self.ctx.setUser(user)
org = blm.accounting.Org()
pgorder = blm.accounting.PGOrder(
org=[org],
contact=['Mr. Foo'],
contactPhone=['1234567'],
contactEmail=['<EMAIL>'],
pgnum=['12345-6'])
assert pgorder.createdBy == [user]
def test_orderPG(self):
user = blm.accounting.User()
self.ctx.setUser(user)
org = blm.accounting.Org()
self.commit()
org, = blm.accounting.Org._query().run()
py.test.raises(ClientError, blm.accounting.orderPG, [org], ['Mr. Foo'],
['1234566'], ['<EMAIL>'],
['12345-5'], ['1000'], ['B'])
self.commit()
org, = blm.accounting.Org._query().run()
org.subscriptionLevel = ['subscriber']
blm.accounting.orderPG([org], ['Mr. Foo'], ['1234566'], ['<EMAIL>'],
['12345-5'], ['1000'], ['B'])
self.commit()
org, = blm.accounting.Org._query().run()
pgorder, = blm.accounting.PGOrder._query().run()
assert pgorder.org == [org]
assert pgorder.contact == ['Mr. Foo']
assert pgorder.contactPhone == ['1234566']
assert pgorder.contactEmail == ['<EMAIL>']
assert pgorder.pgnum == ['12345-5']
pg, = blm.accounting.PlusgiroProvider._query(org=org).run()
assert pg.account == ['1000']
assert pg.series == ['B']
assert pg.pgnum == []
assert pg.pgnum_real == ['123455']
def test_send(self, monkeypatch):
config.config.set('plusgiro', 'setup_email_from', 'bounce@<EMAIL>')
config.config.set('plusgiro', 'setup_email_to', 'plusgiro@test, cc@test')
calls = []
def sendmail(*args):
calls.append(args)
monkeypatch.setattr(mail, 'sendmail', sendmail)
org = blm.accounting.Org(
orgnum=['223344-6677']
)
pgorder = blm.accounting.PGOrder(
org=[org],
contact=['Mr. Foo'],
contactPhone=['1234567'],
contactEmail=['<EMAIL>'],
pgnum=['12345-6'])
pgorder.send()
assert pgorder.sent == [True]
(fromaddr, all_recipients, body), = calls
assert fromaddr == 'bounce@test'
assert all_recipients == ['<EMAIL>', 'cc@test']
print(body)
assert '\nTo: <<EMAIL>>, <cc@test>\n' in body
assert '\nFrom: <<EMAIL>>\n' in body
msg = email.message_from_string(body)
body = msg.get_payload(decode=True)
if PYT3:
body = body.decode()
assert '223344-6677' in body
assert 'foo@<EMAIL>' in body
def test_send_sent(self, monkeypatch):
#TODO: visar
org = blm.accounting.Org()
pgorder = blm.accounting.PGOrder(
org=[org],
contact=['Mr. Foo'],
contactPhone=['1234567'],
contactEmail=['<EMAIL>'],
pgnum=['12345-6'],
sent=[True])
calls = []
def sendmail(*args):
calls.append(args)
monkeypatch.setattr(mail, 'sendmail', sendmail)
pgorder.send()
assert calls == []
class TestCurrentAccounting(BLMTests):
def setup_method(self, method):
super(TestCurrentAccounting, self).setup_method(method)
self.org = blm.accounting.Org()
def test_empty(self):
assert self.org.current_accounting == []
def test_set_one(self):
accounting = blm.accounting.Accounting(org=self.org)
assert self.org.current_accounting == [accounting]
def test_multiple_ascending(self):
accounting2010 = blm.accounting.Accounting(org=self.org,
start='2010-01-01')
assert self.org.current_accounting == [accounting2010]
accounting2011 = blm.accounting.Accounting(org=self.org,
start='2011-01-01')
assert self.org.current_accounting == [accounting2011]
accounting2012 = blm.accounting.Accounting(org=self.org,
start='2012-01-01')
assert self.org.current_accounting == [accounting2012]
def test_multiple_descending(self):
accounting2012 = blm.accounting.Accounting(org=self.org,
start='2012-01-01')
assert self.org.current_accounting == [accounting2012]
accounting2011 = blm.accounting.Accounting(org=self.org,
start='2011-01-01')
assert self.org.current_accounting == [accounting2012]
accounting2010 = blm.accounting.Accounting(org=self.org,
start='2010-01-01')
assert self.org.current_accounting == [accounting2012]
def test_delete(self):
accounting2010 = blm.accounting.Accounting(org=self.org,
start='2010-01-01')
assert self.org.current_accounting == [accounting2010]
accounting2011 = blm.accounting.Accounting(org=self.org,
start='2011-01-01')
assert self.org.current_accounting == [accounting2011]
self.commit()
self.org, = blm.TO._query(id=self.org.id).run()
accounting2011, = blm.TO._query(id=accounting2011.id).run()
accounting2011._delete()
assert self.org.current_accounting == [accounting2010]
def test_start_year_edited(self):
accounting2010 = blm.accounting.Accounting(org=self.org,
start='2010-01-01')
assert self.org.current_accounting == [accounting2010]
accounting2011 = blm.accounting.Accounting(org=self.org,
start='2011-01-01')
assert self.org.current_accounting == [accounting2011]
self.commit()
accounting2010, = blm.TO._query(id=accounting2010.id).run()
accounting2010(start=['2012-01-01'])
self.org.current_accounting == [accounting2010]
class TestAccounting(BLMTests):
def test_create(self):
acc = blm.accounting.Accounting()
dimensions = blm.accounting.Dimension._query(accounting=acc).run()
assert len(dimensions) == 7
proj, = blm.accounting.Dimension._query(name=['Projekt']).run()
assert proj.project[0] # automatically set to project dimension
assert acc.accounting == [acc]
assert acc.years['0'] == [acc.start[0], acc.end[0]]
def test_name(self):
acc = blm.accounting.Accounting()
acc(start=['2010-01-01'], end=['2010-12-31'])
assert acc.name == ['2010-01-01 - 2010-12-31']
def test_org(self):
org = blm.accounting.Org(name=['foo'], orgnum=['123456-7890'])
acc = blm.accounting.Accounting(org=[org])
assert acc.orgname == ['foo']
assert acc.orgnum == ['123456-7890']
def test_accountingImport(self):
org = blm.accounting.Org()
data = model.BlobVal(open(os.path.join(os.path.dirname(__file__),
'accountingImport.si')))
acc, = blm.accounting.accountingImport(org=[org], data=[data])
assert acc.orgnum == ['555555-5555']
def test_start_end(self):
org = blm.accounting.Org(name=['foo'], orgnum=['bar'])
acc = blm.accounting.Accounting(org=[org])
assert acc.start == [time.strftime('%Y-%m-%d')]
assert acc.end == [(datetime.now() + relativedelta(
years=+1, days=-1)).strftime('%Y-%m-%d')]
acc2 = blm.accounting.Accounting(org=[org])
assert acc2.start == [(datetime.now() + relativedelta(
years=+1)).strftime('%Y-%m-%d')]
assert acc2.end == [(datetime.now() + relativedelta(
years=+2, days=-1)).strftime('%Y-%m-%d')]
def test_previous(self):
org = blm.accounting.Org()
acc1 = blm.accounting.Accounting(
org=[org], start=['2009-01-01'], end=['2009-12-31'])
acc2 = blm.accounting.Accounting(
org=[org], start=['2010-01-01'], end=['2010-12-31'])
acc3 = blm.accounting.Accounting(
org=[org], start=['2011-01-01'], end=['2011-12-31'])
assert acc1.previous == []
assert acc2.previous == [acc1]
assert acc3.previous == [acc2]
def mkAccount(self, accounting, number, **kw):
kw.setdefault('name', 'Account %s' % number)
return blm.accounting.Account(number=[number], accounting=[accounting],
**kw)
def test_initialise(self):
org = blm.accounting.Org()
prev = blm.accounting.Accounting(
org=[org], start=['2009-01-01'], end=['2009-12-31'])
acc = self.mkAccount(prev, '1000', name=u'<NAME>', type='T', opening_balance='10')
assert acc.balance == [Decimal('10.00')] # sanity
acc = self.mkAccount(prev, '1001', name=u'<NAME>', type='T', opening_balance='11')
acc = self.mkAccount(prev, '2000', name=u'<NAME>', type='S', opening_balance='27')
assert acc.balance == [Decimal('27.00')] # sanity
acc = self.mkAccount(prev, '3000', name=u'Intäkt 1', type='I', opening_balance='10')
assert acc.balance == [Decimal('10.00')] # sanity
acc = self.mkAccount(prev, '4000', name=u'Kostnad 1', type='K', opening_balance='10')
assert acc.balance == [Decimal('10.00')] # sanity
curr = blm.accounting.Accounting(
org=[org], start=['2010-01-01'], end=['2010-12-31'])
acc = self.mkAccount(curr, '1000', name=u'<NAME>', opening_balance='12')
ser = blm.accounting.VerificationSeries(accounting=curr, name=['A'])
ver = blm.accounting.Verification(accounting=curr, series=ser)
blm.accounting.Transaction(verification=ver, account=acc, version=[0], amount='5')
curr.initialise()
accounts = blm.accounting.Account._query(accounting=curr).run()
accounts = dict((toi.number[0], toi) for toi in accounts)
assert len(accounts) == 5
assert accounts['1000'].name == [u'<NAME>']
assert accounts['1000'].opening_balance == [Decimal('10')]
assert accounts['1000'].balance == [Decimal('15')]
assert accounts['1001'].name == [u'<NAME>']
assert accounts['1001'].opening_balance == [Decimal('11')]
assert accounts['1001'].balance == [Decimal('11')]
assert accounts['2000'].name == [u'Skuld 1']
assert accounts['2000'].opening_balance == [Decimal('27')]
assert accounts['2000'].balance == [Decimal('27')]
assert accounts['3000'].balance == [Decimal(0)]
assert accounts['4000'].balance == [Decimal(0)]
def test_ensureSeries(self):
org = blm.accounting.Org()
acc = blm.accounting.Accounting(org=org)
a, = acc.ensureSeries()
assert a.name == ['A']
assert not acc.ensureSeries()
assert blm.accounting.VerificationSeries._query().run() == [a]
blm.accounting.PaymentProvider(org=org, series=['X']) # runs ensureSeries()
x, = blm.accounting.VerificationSeries._query(name='X').run()
assert not acc.ensureSeries()
blm.accounting.PaymentProvider(org=org, series=['X'])
assert not acc.ensureSeries()
series = blm.accounting.VerificationSeries._query().run()
series.sort(key=lambda toi: toi.name[0])
assert series == [a, x]
org = blm.accounting.Org()
acc = blm.accounting.Accounting(org=org)
b = blm.accounting.VerificationSeries(accounting=acc, name='B')
assert not acc.ensureSeries()
series = blm.accounting.VerificationSeries._query(
accounting=acc).run()
assert series == [b]
class TestAccountingObject(BLMTests):
def test_create(self):
acc = blm.accounting.Accounting()
dim = blm.accounting.Dimension(number=['27'], name=['Customer'],
accounting=[acc])
blm.accounting.AccountingObject(number=['27'], name=['Nisse'],
dimension=[dim])
class TestDimension(BLMTests):
def setup_method(self, method):
super(TestDimension, self).setup_method(method)
self.accounting = blm.accounting.Accounting()
def test_create(self):
blm.accounting.Dimension(number=['27'], name=['Customer'],
accounting=[self.accounting])
def test_hierarchy(self):
parent = blm.accounting.Dimension(number=['27'], name=['Customer'],
accounting=[self.accounting])
child = blm.accounting.Dimension(number=['28'], name=['Customer'],
subdim_of=[parent],
accounting=[self.accounting])
def test_project_hierarchy(self):
parent = blm.accounting.Dimension(number=['27'], name=['Customer'],
project=[True],
accounting=[self.accounting])
child = blm.accounting.Dimension(number=['28'], name=['Customer'],
subdim_of=[parent],
accounting=[self.accounting])
assert child.project[0]
def account_default_type_test(toc, **kw):
acc = toc(number=['1234'], **kw)
assert acc.type == ['T'] # account no starts with 1
acc = toc(number=['2345'], **kw)
assert acc.type == ['S'] # account no starts with 2
acc = toc(number=['3456'], **kw)
assert acc.type == ['I'] # account no starts with 3
for n in '4567':
acc = toc(number=[n + '999'], **kw)
assert acc.type == ['K'] # account no starts with 4567
acc = toc(number=['3456'], type=['T'], **kw)
assert acc.type == ['T'] # don't overwrite with default type
class TestBaseAccount(BLMTests):
def setup_method(self, method):
super(TestBaseAccount, self).setup_method(method)
def test_create(self):
acc = blm.accounting.BaseAccount(number=['1234'])
assert acc.name == ['* UNNAMED *']
def test_default_type(self):
account_default_type_test(blm.accounting.BaseAccount)
class TestAccountTemplate(BLMTests):
def test_default_type(self):
account_default_type_test(blm.accounting.AccountTemplate)
def test_root_only(self):
user = blm.accounting.User()
template = blm.accounting.AccountTemplate(number=['1111'],
allowRead=[user])
self.commit()
template._clear()
self.ctx.setUser(user)
with py.test.raises(ClientError):
blm.accounting.AccountTemplate(number=['2222'])
self.ctx.setUser(user)
with py.test.raises(ClientError):
template(name=['foo'])
self.ctx.setUser(None)
template(name=['foo'])
class TestChartOfAccounts(BLMTests):
def test_simple(self):
coa = blm.accounting.ChartOfAccounts(name=['test'])
assert coa.name == ['test']
def mkChartOfAccounts(self):
accts = []
for i in range(1,10):
accts.append(blm.accounting.AccountTemplate(
name=['acct %d' % i],
number=[str(1111 * i)]
))
coa = blm.accounting.ChartOfAccounts(
name=['test'],
accounts=accts
)
return coa, accts
def test_populate(self):
coa, accts = self.mkChartOfAccounts()
accounting = blm.accounting.Accounting()
accounts = coa.populate([accounting])
assert len(accounts) == len(accts)
assert accounts[0] != accts[0]
assert type(accounts[0]) == blm.accounting.Account
accname = sorted((a.name[0], a.number[0]) for a in accounts)
expname = sorted((a.name[0], a.number[0]) for a in accts)
assert accname == expname
def test_accountingFromTemplate(self):
coa, accts = self.mkChartOfAccounts()
org = blm.accounting.Org()
accounting, = blm.accounting.accountingFromTemplate([coa], [org])
assert accounting.org == [org]
accounts = blm.accounting.Account._query(accounting=accounting).run()
assert len(accounts) == len(accts)
series = blm.accounting.VerificationSeries._query(
accounting=accounting).run()
assert len(series) == 1
assert series[0].name == ['A']
def test_root_only(self):
user = blm.accounting.User()
coa = blm.accounting.ChartOfAccounts(name=['The chart!'],
allowRead=[user])
self.commit()
coa._clear()
self.ctx.setUser(user)
with py.test.raises(ClientError):
blm.accounting.ChartOfAccounts(name=['Fake chart'])
self.ctx.setUser(user)
with py.test.raises(ClientError):
coa(name=['foo'])
self.ctx.setUser(None)
coa(name=['foo'])
class TestAccount(BLMTests):
def setup_method(self, method):
super(TestAccount, self).setup_method(method)
self.accounting = blm.accounting.Accounting()
self.series = blm.accounting.VerificationSeries(
accounting=[self.accounting], name=['A'])
self.ver = blm.accounting.Verification(
series=[self.series], number=['1'], accounting=[self.accounting])
def test_create(self):
acc = blm.accounting.Account(number=['1234'],
accounting=[self.accounting])
assert acc.name == ['* UNNAMED *']
def test_default_type(self):
account_default_type_test(blm.accounting.Account, accounting=[self.accounting])
def test_vat_percentage(self):
blm.accounting.VatCode(code='10', xmlCode='gorp') # 25%
blm.accounting.VatCode(code='11', xmlCode='gorp') # 12%
acc = blm.accounting.Account(number=['1234'],
accounting=[self.accounting],
vatCode=['10'])
self.commit()
acc, = blm.accounting.Account._query().run()
assert acc.vatPercentage == [Decimal('25.00')]
acc(vatCode='11')
assert acc.vatPercentage == [Decimal('12.00')]
def test_balance(self):
account = blm.accounting.Account(number=['1234'],
opening_balance=['42.00'],
opening_quantity=['10'],
accounting=[self.accounting])
assert account.balance == [Decimal('42.00')]
account.transactions = [
blm.accounting.Transaction(verification=[self.ver], account=[account],
version=self.ver.version,
amount=['10.00'],
quantity=['5']),
blm.accounting.Transaction(verification=[self.ver], account=[account],
version=self.ver.version,
amount=['-5.00'], quantity=['-2'])
]
assert account.balance == [Decimal('47.00')] # 42 + 10 - 5
assert account.balance_quantity == [Decimal('13')] # 10 + 5 - 2
def test_recalc_balance_when_opening_balance_changes(self):
account = blm.accounting.Account(number=['1234'],
opening_balance=['0.00'],
opening_quantity=['0'],
accounting=[self.accounting])
account.transactions = [
blm.accounting.Transaction(verification=[self.ver], account=[account],
version=self.ver.version,
amount=['10.00'],
quantity=['5']),
]
self.commit()
account, = blm.accounting.Account._query().run()
account(opening_balance=['5.00'])
assert account.balance == [Decimal('15.00')]
def test_fromtemplate(self, monkeypatch):
accounting = blm.accounting.Accounting()
acc = blm.accounting.BaseAccount(number=['1234'])
accft = blm.accounting.Account.fromtemplate(acc, accounting=[accounting])
assert accft is not acc
assert accft.number == ['1234']
assert accft.accounting == [accounting]
class TestVerificationSeries(BLMTests):
def test_name_unique(self):
acc1 = blm.accounting.Accounting()
acc2 = blm.accounting.Accounting()
vs1 = blm.accounting.VerificationSeries(name=['A'], accounting=[acc1])
vs2 = blm.accounting.VerificationSeries(name=['B'], accounting=[acc1])
py.test.raises(ClientError, blm.accounting.VerificationSeries,
name=['A'], accounting=[acc1])
vs3 = blm.accounting.VerificationSeries(name=['A'], accounting=[acc2])
vs2(name=['C'])
py.test.raises(ClientError, vs2, name=['A'])
def test_pgseries_undeletable(self):
org = blm.accounting.Org()
acc = blm.accounting.Accounting(org=org)
ser = blm.accounting.VerificationSeries(name=['A'], accounting=[acc])
pp = blm.accounting.PaymentProvider(org=org, series=['A'])
py.test.raises(ClientError, ser._delete)
pp.series = ['B']
ser._delete()
assert ser._deleted
ser = blm.accounting.VerificationSeries(name=['B'], accounting=[acc])
acc._delete()
assert ser._deleted
def test_next_verification_data(self):
acc = blm.accounting.Accounting(start=['2010-01-01'])
accid = acc.id[0]
series = blm.accounting.VerificationSeries(name=['A'], accounting=[acc])
result = blm.accounting.next_verification_data([series])
assert result == {
'accounting': accid,
'number': 1,
'transaction_date': '2010-01-01'
}
blm.accounting.Verification(accounting=[acc], series=[series],
number=[1], transaction_date=['2010-01-04'])
result = blm.accounting.next_verification_data([series])
assert result == {
'accounting': accid,
'number': 2,
'transaction_date': '2010-01-04'
}
blm.accounting.Verification(accounting=[acc], series=[series],
number=[27], transaction_date=['2011-03-14'])
result = blm.accounting.next_verification_data([series])
assert result == {
'accounting': accid,
'number': 28,
'transaction_date': '2011-03-14'
}
class TestVerification(BLMTests):
def setup_method(self, method):
super(TestVerification, self).setup_method(method)
self.user = blm.accounting.User(name=['<NAME>'])
self.ctx.setUser(self.user)
self.org = blm.accounting.Org(accountants=[self.user])
self.ppd = blm.accounting.PaymentProvider(org=self.org, account=['1000'], series=['A'])
self.accounting = blm.accounting.Accounting(org=self.org)
self.series, = blm.accounting.VerificationSeries._query().run()
self.account = blm.accounting.Account(
accounting=self.accounting, number=['1000'])
def mkVerification(self, **kw):
kw.setdefault('series', self.series)
kw.setdefault('accounting', self.accounting)
return blm.accounting.Verification(**kw)
def test_create(self):
ver = self.mkVerification()
assert ver.number == [1] # default to 1
assert ver.transaction_date == [time.strftime('%Y-%m-%d')]
assert ver.registration_date == [time.strftime('%Y-%m-%d')]
assert ver.signature == [str(i) for i in self.user.id] # map(str, self.user.id)
assert ver.signature_name == self.user.name
assert ver.series == [self.series]
assert self.series.canBeDeleted == [False]
# badly formatted transaction date
py.test.raises(exceptions.ClientError, blm.accounting.Verification,
series=[self.series], accounting=[self.accounting],
transaction_date=['foo'])
# badly formatted registration date
py.test.raises(exceptions.ClientError, blm.accounting.Verification,
series=[self.series], accounting=[self.accounting],
registration_date=['foo'])
# number should be a positive integer
py.test.raises(exceptions.ClientError, blm.accounting.Verification,
series=[self.series], accounting=[self.accounting],
number=[0])
# number conflict within the series
py.test.raises(exceptions.ClientError, blm.accounting.Verification,
series=[self.series], accounting=[self.accounting],
number=[1])
def test_update_signature_and_regdate(self):
newUser = blm.accounting.User(name=['<NAME>'])
self.org.accountants.add(newUser)
self.accounting.allowRead = self.accounting.allowRead + [newUser]
self.mkVerification()
self.commit()
ver, = blm.accounting.Verification._query().run()
orig_date = ver.registration_date[0]
self.ctx.setUser(newUser)
with Time() as time:
time += 3600 * 24 * 2 # a new date
ver(text=['foo']) # provoke a change
assert ver.signature == [str(i) for i in newUser.id] # map(str, newUser.id)
assert ver.signature_name == newUser.name
assert ver.registration_date[0] != orig_date
assert ver.registration_date == [time.strftime('%Y-%m-%d')]
def test_block_changes(self):
self.mkVerification()
self.commit()
ver, = blm.accounting.Verification._query().run()
series2 = blm.accounting.VerificationSeries(
accounting=self.accounting, name='B')
py.test.raises(exceptions.ClientError, ver, number=[12])
py.test.raises(exceptions.ClientError, ver, series=[series2])
def test_saveVerification(self):
data = {
'verification': {'accounting': str(self.accounting.id[0]),
'series': str(self.series.id[0]),
'number': 1,
'transaction_date': '2010-01-01'},
'transactions': [
{'account': str(self.account.id[0]),
'amount': 10000,
'text': 'Transaction text',
'version': 0},
{'account': str(self.account.id[0]),
'amount': -10000,
'text': 'Transaction text 2',
'version': 0}
]
}
result, = blm.accounting.createVerification([data])
ver, = blm.accounting.Verification._query().run()
assert result['number'] == ver.number[0] == 1
assert result['id'] == ver.id[0]
assert ver.transaction_date == ['2010-01-01']
trans1, = blm.accounting.Transaction._query(text='Transaction text').run()
assert trans1.verification == [ver]
assert trans1.version == [0]
assert trans1.text == ['Transaction text']
assert trans1.amount == [Decimal('100.00')]
trans2, = blm.accounting.Transaction._query(text='Transaction text 2').run()
assert trans2.verification == [ver]
assert trans2.version == [0]
assert trans2.text == ['Transaction text 2']
assert trans2.amount == [Decimal('-100.00')]
data = {
'verification': {'id': str(ver.id[0]),
'accounting': str(self.accounting.id[0]),
'series': str(self.series.id[0]),
'version': 1,
'transaction_date': '2010-01-02'},
'transactions': [
{'id': str(trans1.id[0]),
'account': str(self.account.id[0]),
'amount': 20000,
'text': 'Changed transaction text',
'version': 1},
{'account': str(self.account.id[0]),
'amount': -20000,
'text': 'Transaction text 3',
'version': 1}
]
}
result, = blm.accounting.editVerification([data])
ver, = blm.accounting.Verification._query().run()
assert result['number'] == ver.number[0] == 1
assert result['id'] == ver.id[0]
assert ver.version == [1]
assert ver.transaction_date == ['2010-01-02']
trans1, = blm.accounting.Transaction._query(id=trans1.id[0]).run()
assert trans1.verification == [ver]
assert trans1.version == [1]
assert trans1.text == ['Changed transaction text']
assert trans1.amount == [Decimal('200.00')]
assert not blm.accounting.Transaction._query(id=trans2.id[0]).run()
trans3, = blm.accounting.Transaction._query(text='Transaction text 3').run()
assert trans3.verification == [ver]
assert trans3.version == [1]
assert trans3.text == ['Transaction text 3']
assert trans3.amount == [Decimal('-200.00')]
def test_saveVerification_unbalanced(self):
# Unbalanced verification should fail
data = {
'verification': {'accounting': str(self.accounting.id[0]),
'series': str(self.series.id[0]),
'number': 1,
'transaction_date': '2010-01-01'},
'transactions': [
{'account': str(self.account.id[0]),
'amount': 10000,
'text': 'Transaction text',
'version': 0},
{'account': str(self.account.id[0]),
'amount': -25000,
'text': 'Transaction text 2',
'version': 0}
]
}
try:
result, = blm.accounting.createVerification([data])
except ValueError:
pass
else:
raise AssertionError('Saving unbalanced verification should have raised an error.')
vers = blm.accounting.Verification._query().run()
assert len(vers) == 0
trans = blm.accounting.Transaction._query().run()
assert len(trans) == 0
def test_saveVerification_with_incomplete_edit_data(self):
data = {
'verification': {'accounting': str(self.accounting.id[0]),
'series': str(self.series.id[0]),
'number': 1,
'transaction_date': '2010-01-01'},
'transactions': [
{'account': str(self.account.id[0]),
'amount': 10000,
'text': 'Transaction text',
'version': 0},
{'account': str(self.account.id[0]),
'amount': -10000,
'text': 'Transaction text 2',
'version': 0}
]
}
result, = blm.accounting.createVerification([data])
self.commit()
ver, = blm.accounting.Verification._query().run()
trans1, = blm.accounting.Transaction._query(text='Transaction text').run()
trans2, = blm.accounting.Transaction._query(text='Transaction text 2').run()
data = {
'verification': {'id': str(ver.id[0]),
'accounting': str(self.accounting.id[0]),
'series': str(self.series.id[0]),
'version': 1,
'transaction_date': '2010-01-02'},
'transactions': [
{'id': str(trans1.id[0]),
'account': str(self.account.id[0]),
'amount': 20000,
'text': 'Changed transaction text',
'verification': '',
'version': 1},
{'account': str(self.account.id[0]),
'amount': -20000,
'text': 'Transaction text 3',
'verification': '',
'version': 1}
]
}
result, = blm.accounting.editVerification([data])
ver, = blm.accounting.Verification._query().run()
assert result['number'] == ver.number[0] == 1
assert result['id'] == ver.id[0]
assert ver.version == [1]
assert ver.transaction_date == ['2010-01-02']
trans1, = blm.accounting.Transaction._query(id=trans1.id[0]).run()
assert trans1.verification == [ver]
assert trans1.version == [1]
assert trans1.text == ['Changed transaction text']
assert trans1.amount == [Decimal('200.00')]
assert not blm.accounting.Transaction._query(id=trans2.id[0]).run()
trans3, = blm.accounting.Transaction._query(text='Transaction text 3').run()
assert trans3.verification == [ver]
assert trans3.version == [1]
assert trans3.text == ['Transaction text 3']
assert trans3.amount == [Decimal('-200.00')]
class TestBalance(BLMTests):
def test_abstract_class(self):
# We should never instantiate Balance directly, only sub classes.
with py.test.raises(AssertionError):
balance = blm.accounting.Balance()
class TestObjectBalanceBudget(BLMTests):
def setup_method(self, method):
super(TestObjectBalanceBudget, self).setup_method(method)
self.acc = blm.accounting.Accounting()
self.account = blm.accounting.Account(accounting=[self.acc], number=['1234'])
self.dim = blm.accounting.Dimension(name=['bar'], number=['23'], accounting=[self.acc])
self.aobj = blm.accounting.AccountingObject(name=['foo'], number=['42'], dimension=[self.dim])
def test_create(self):
ob = blm.accounting.ObjectBalanceBudget(period=['201003'],
accounting_object=[self.aobj],
account_balance=[self.account])
ob = blm.accounting.ObjectBalanceBudget(period=[''],
accounting_object=[self.aobj],
account_balance=[self.account])
# period should be either YYYYMM or empty
py.test.raises(Exception, blm.accounting.ObjectBalanceBudget,
account_balance=[self.account],
period=['2010'],
accounting_object=[self.aobj])
class TestBalanceBudget(BLMTests):
def setup_method(self, method):
super(TestBalanceBudget, self).setup_method(method)
self.acc = blm.accounting.Accounting()
self.account = blm.accounting.Account(accounting=[self.acc], number=['1234'])
def test_create(self):
blm.accounting.BalanceBudget(account_balance=[self.account], period=['201003'])
class TestAccountBalance(BLMTests):
def setup_method(self, method):
super(TestAccountBalance, self).setup_method(method)
self.accounting = blm.accounting.Accounting()
self.account = blm.accounting.Account(number=['1234'],
accounting=[self.accounting])
def test_create(self):
ab3 = blm.accounting.AccountBalance(year=[-3], account=[self.account])
assert self.account.account_balances['-3'] == ab3
class TestTransaction(BLMTests):
def setup_method(self, method):
super(TestTransaction, self).setup_method(method)
self.org = blm.accounting.Org()
self.accounting = blm.accounting.Accounting(org=[self.org])
self.series = blm.accounting.VerificationSeries(
accounting=self.accounting, name=['A'])
self.ver = blm.accounting.Verification(series=[self.series], number=['1'],
transaction_date=['2012-01-02'],
accounting=[self.accounting])
def test_create(self):
user = blm.accounting.User()
self.org.accountants = [user]
self.ctx.setUser(user)
accounting = self.accounting
series = self.series
accAsset = blm.accounting.Account(number=['1000'], type=['T'],
opening_balance=['10'],
opening_quantity=['2'],
accounting=[accounting])
accIncome = blm.accounting.Account(number=['3234'], type=['I'],
accounting=[accounting],
opening_balance=['20'],
opening_quantity=['5'])
trans = blm.accounting.Transaction(account=[accIncome],
verification=[self.ver],
version=self.ver.version)
assert trans.transtype == ['normal']
assert trans.amount == [Decimal('0')]
assert trans.amount.precision == 2
assert trans.quantity == [Decimal('0')]
assert trans.transaction_date == ['2012-01-02']
assert trans.signature == [str(i) for i in user.id] # map(str, user.id)
trans = blm.accounting.Transaction(account=[accAsset],
verification=[self.ver],
version=self.ver.version,
transaction_date=['2012-04-05'],
amount=['40'], quantity=['4'])
assert trans.transaction_date == ['2012-04-05']
assert accAsset.balance == [Decimal('50')]
assert accAsset.balance_quantity == [Decimal('6')]
trans = blm.accounting.Transaction(account=[accIncome],
verification=[self.ver],
version=self.ver.version,
transaction_date=['2012-04-05'],
amount=['30'], quantity=['2'])
assert accIncome.balance == [Decimal('50')]
assert accIncome.balance_quantity == [Decimal('7')]
# update accounting objects
dimension, = blm.accounting.Dimension._query(number=['1']).run()
accounting_object = blm.accounting.AccountingObject(number=['101'],
name=['foo'],
dimension=[dimension])
assert not accAsset.object_balance_budgets # sanity
# Test that ObjectBalanceBudget is created
trans = blm.accounting.Transaction(account=[accAsset],
verification=[self.ver],
version=self.ver.version,
transaction_date=['2012-04-05'],
accounting_objects=[accounting_object],
amount=['30'], quantity=['2'])
obb = accAsset.object_balance_budgets[0]
assert obb.balance == [Decimal('30')]
assert obb.balance_quantity == [Decimal('2')]
# Test that ObjectBalanceBudget is updated
trans = blm.accounting.Transaction(account=[accAsset],
verification=[self.ver],
version=self.ver.version,
transaction_date=['2012-04-05'],
accounting_objects=[accounting_object],
amount=['10'], quantity=['3'])
assert obb.balance == [Decimal('40')]
assert obb.balance_quantity == [Decimal('5')]
trans = blm.accounting.Transaction(account=[accIncome],
verification=[self.ver],
version=self.ver.version,
transaction_date=['2012-04-05'],
accounting_objects=[accounting_object],
amount=['20'], quantity=['7'])
obb = accIncome.object_balance_budgets[0]
assert obb.balance == [Decimal('20')]
assert obb.balance_quantity == [Decimal('7')]
def test_transactionIndex(self):
account = blm.accounting.Account(number=['9999'],
accounting=[self.accounting])
for text in ('foo', 'bar', 'baz', ''):
blm.accounting.Transaction(account=account, verification=[self.ver],
version=self.ver.version, text=[text])
self.commit()
direct_query = {'filter': [{'property': 'org',
'value': str(self.org.id[0])}]}
result = blm.accounting.transactionIndex([direct_query])
if PYT3:
result.sort(key=lambda a: a['text'])
else:
result.sort()
assert result == [{'text': 'bar', 'org': str(self.org.id[0])},
{'text': 'baz', 'org': str(self.org.id[0])},
{'text': 'foo', 'org': str(self.org.id[0])}]
def test_strip_text(self):
account = blm.accounting.Account(number=['9999'],
accounting=[self.accounting])
t = blm.accounting.Transaction(account=account, verification=[self.ver],
version=self.ver.version, text=['foo'])
assert t.text == ['foo']
t = blm.accounting.Transaction(account=account, verification=[self.ver],
version=self.ver.version, text=[' foo '])
assert t.text == ['foo']
t(text=[' bar '])
assert t.text == ['bar']
def test_transaction_update_recalculates_balance(self):
account1 = blm.accounting.Account(number=['9999'],
accounting=[self.accounting])
account2 = blm.accounting.Account(number=['5000'],
accounting=[self.accounting])
t1 = blm.accounting.Transaction(account=account1, verification=[self.ver],
version=self.ver.version, amount=['10.00'])
t2 = blm.accounting.Transaction(account=account1, verification=[self.ver],
version=self.ver.version, amount=['20.00'])
self.commit()
account1, = blm.accounting.Account._query(number=['9999']).run()
assert account1.balance == [Decimal('30.00')] # sanity
t1, = blm.accounting.Transaction._query(id=t1.id[0]).run()
t1(amount=['30.00'])
assert account1.balance == [Decimal('50.00')]
t1(account=account2)
assert account1.balance == [Decimal('20.00')]
def test_transaction_delete_recalculates_balance(self):
account1 = blm.accounting.Account(number=['9999'],
accounting=[self.accounting])
t1 = blm.accounting.Transaction(account=account1, verification=[self.ver],
version=self.ver.version, amount=['10.00'])
t2 = blm.accounting.Transaction(account=account1, verification=[self.ver],
version=self.ver.version, amount=['20.00'])
t3 = blm.accounting.Transaction(account=account1, verification=[self.ver],
version=self.ver.version, amount=['15.00'])
self.commit()
account1, = blm.accounting.Account._query(number=['9999']).run()
assert account1.balance == [Decimal('45.00')] # sanity
t1, = blm.accounting.Transaction._query(id=t1.id[0]).run()
t1._delete()
t2, = blm.accounting.Transaction._query(id=t2.id[0]).run()
t2._delete()
assert account1.balance == [Decimal('15.00')]
class TestLogVerification(BLMTests):
def setup_method(self, method):
super(TestLogVerification, self).setup_method(method)
org = blm.accounting.Org()
accounting = blm.accounting.Accounting(org=[org])
series = blm.accounting.VerificationSeries(
accounting=accounting, name=['A'])
acc1 = blm.accounting.Account(number=['1000'], accounting=[accounting])
acc2 = blm.accounting.Account(number=['2000'], accounting=[accounting])
ver = blm.accounting.Verification(accounting=accounting,
series=series,
text=['original'],
transaction_date=['2010-01-01'])
blm.accounting.Transaction(verification=[ver],
version=ver.version,
account=[acc1],
amount=['10.00'])
self.commit()
self.ver, = blm.accounting.Verification._query().run()
self.trans, = blm.accounting.Transaction._query().run()
self.acc1, self.acc2 = sorted(blm.accounting.Account._query().run(),
key=lambda t: t.number[0])
def loadAttrs(self, toi):
for attr in toi.logattrs:
getattr(toi, attr).value
def test_log_verification_edit(self):
self.loadAttrs(self.ver)
orgver = self.ver._attrData.copy()
self.ver(transaction_date=['2010-01-02'],
text=['changed'])
self.commit()
ver, = blm.accounting.Verification._query().run()
trans, = blm.accounting.Transaction._query().run()
assert ver.version == [1]
assert trans.transaction_date == ['2010-01-02']
assert trans.version == [0]
log = ver.log['0']
assert len(log) == 1
verlog = bson.BSON(log[0]).decode()
assert verlog == orgver
def test_log_transaction_add(self):
self.ver(text=['changed'])
assert len(self.ver.log['0']) == 1 # sanity
self.commit()
ver, = blm.accounting.Verification._query().run()
trans = blm.accounting.Transaction(verification=self.ver,
version=[1],
account=[self.acc1],
amount=['40.00'])
self.commit()
ver, = blm.accounting.Verification._query().run()
log = ver.log['0']
assert len(log) == 2
try:
translog = bson.BSON(log[1]).decode()
except UnicodeEncodeError:
print(repr(dict(ver.log)))
raise
assert translog == {'id': list(trans.id)}
def test_log_transaction_change(self):
self.ver(text=['changed'])
assert len(self.ver.log['0']) == 1 # sanity
self.commit()
trans, = blm.accounting.Transaction._query().run()
acc2 = blm.accounting.Account._query(number='2000').run()
trans(version=[1], amount=['40.00'], account=acc2)
self.commit()
ver, = blm.accounting.Verification._query().run()
log = self.ver.log['0']
assert len(log) == 2
translog = bson.BSON(log[1]).decode()
assert translog == {'id': list(self.trans.id),
'version': [0],
'amount': [Decimal('10.00')],
'quantity': [Decimal('0')],
'text': [''],
'signature': [],
'transaction_date': ['2010-01-01'],
'account': ['1000']}
assert self.trans.version == [1] # make sure toi._update() has been called
def test_log_transaction_delete(self):
self.ver(text=['changed'])
assert len(self.ver.log['0']) == 1 # sanity
self.commit()
self.trans._delete()
self.commit()
ver, = blm.accounting.Verification._query().run()
log = ver.log['0']
assert len(log) == 2
translog = bson.BSON(log[1]).decode()
assert translog == {'id': list(self.trans.id),
'version': [0],
'amount': [Decimal('10.00')],
'quantity': [Decimal('0')],
'text': [''],
'signature': [],
'transaction_date': ['2010-01-01'],
'account': ['1000']}
def test_version_mismatch(self):
py.test.raises(exceptions.ClientError, self.ver, version=5)
class TestAdminPermissions(BLMTests):
def setup_method(self, method):
super(TestAdminPermissions, self).setup_method(method)
self.admin = blm.accounting.User()
self.member = blm.accounting.User()
self.other = blm.accounting.User()
self.ctx.setUser(self.admin)
self.org = blm.accounting.Org(subscriptionLevel=['subscriber'])
self.ctx.setUser(None)
self.org.ug[0].users = [self.admin, self.member]
self.accounting = blm.accounting.Accounting(org=[self.org])
self.commit()
self.admin = blm.accounting.User._query(id=self.admin).run()[0]
self.member = blm.accounting.User._query(id=self.member).run()[0]
self.other = blm.accounting.User._query(id=self.other).run()[0]
self.org = blm.accounting.Org._query(id=self.org).run()[0]
self.accounting = blm.accounting.Accounting._query(id=self.accounting).run()[0]
def test_admin(self):
assert self.org.admins == [self.admin]
def test_edit_org(self):
self.ctx.setUser(self.admin)
self.org.name = ['Hepp!']
self.ctx.setUser(self.member)
py.test.raises(ClientError, setattr, self.org, 'name', ['Hupp!'])
self.ctx.setUser(self.other)
py.test.raises(ClientError, setattr, self.org, 'name', ['Hipp!'])
def test_payment_providers(self):
for cls in (blm.accounting.PaymentProvider,
blm.accounting.SimulatorProvider,
blm.accounting.PlusgiroProvider,
#blm.accounting.PaysonProvider
):
self.ctx.setUser(self.admin)
pp = cls(org=[self.org])
self.commit()
pp(account=['1000'])
self.ctx.setUser(self.member)
pp, = cls._query(id=pp).run()
py.test.raises(ClientError, cls, org=[self.org])
py.test.raises(ClientError, pp, account=['2000'])
py.test.raises(ClientError, pp._delete)
def test_pg_order(self):
self.ctx.setUser(self.admin)
pgo = blm.accounting.PGOrder(org=[self.org], contact=['a'],
contactPhone=['b'], contactEmail=['c'],
pgnum=['d'])
self.commit()
pgo(sent=[True])
py.test.raises(ClientError, pgo, pgnum=['x'])
self.ctx.setUser(self.member)
py.test.raises(ClientError, blm.accounting.PGOrder,
org=[self.org], contact=['a'],
contactPhone=['b'], contactEmail=['c'],
pgnum=['d'])
py.test.raises(ClientError, pgo, sent=[False])
py.test.raises(ClientError, pgo, pgnum=['y'])
def test_propagate_name_and_orgnum_to_current_accounting(self):
self.ctx.setUser(self.admin)
self.org(name=['Foo'], orgnum=['123456-7890'])
assert self.org.current_accounting == [self.accounting]
assert self.accounting.orgname == ['Foo']
assert self.accounting.orgnum == ['123456-7890']
class TestPayerPermissions(PermissionTests):
def setup_method(self, method):
super(TestPayerPermissions, self).setup_method(method)
self.payer = blm.accounting.User()
self.member = blm.accounting.User()
self.other = blm.accounting.User()
self.org = blm.accounting.Org()
self.org.ug[0].users = [self.payer, self.member]
self.org.payers = [self.payer]
self.commit()
self.payer, = blm.accounting.User._query(id=self.payer).run()
self.member, = blm.accounting.User._query(id=self.member).run()
self.other, = blm.accounting.User._query(id=self.other).run()
self.org, = blm.accounting.Org._query(id=self.org).run()
def test_supplier_invoice(self):
self.check(blm.accounting.SupplierInvoice,
params=dict(org=self.org,
recipient='Foo Inc.',),
edit=dict(transferAddress='Foo Street 1'),
allow=[self.payer],
deny=[self.member])
class TestAccountantPermissions(PermissionTests):
def setup_method(self, method):
super(TestAccountantPermissions, self).setup_method(method)
self.accountant = blm.accounting.User()
self.member = blm.accounting.User()
self.other = blm.accounting.User()
self.org = blm.accounting.Org()
self.org.ug[0].users = [self.accountant, self.member]
self.org.accountants = [self.accountant]
self.commit()
self.accountant = blm.accounting.User._query(id=self.accountant).run()[0]
self.member = blm.accounting.User._query(id=self.member).run()[0]
self.other = blm.accounting.User._query(id=self.other).run()[0]
self.org = blm.accounting.Org._query(id=self.org).run()[0]
def test_accounting(self):
self.check(blm.accounting.Accounting,
params=dict(org=self.org),
edit=dict(start='2014-01-01'),
allow=[self.accountant], deny=[self.member])
def test_create_accounting(self):
self.ctx.setUser(self.accountant)
self.accounting = blm.accounting.Accounting(org=[self.org])
self.ctx.setUser(self.member)
py.test.raises(ClientError, blm.accounting.Accounting, org=[self.org])
def test_edit_accounting(self):
accounting = blm.accounting.Accounting(org=[self.org])
self.commit()
accounting, = blm.accounting.Accounting._query(id=accounting.id).run()
self.ctx.setUser(self.accountant)
accounting(start=['2010-01-01'])
self.ctx.setUser(self.member)
py.test.raises(ClientError, accounting, start=['2011-01-01'])
def test_edit_and_create_dimensions(self):
accounting = blm.accounting.Accounting(org=[self.org])
self.check(blm.accounting.Dimension,
params=dict(number='42', name='meaning of life',
accounting=accounting),
edit=dict(number='43'),
allow=self.accountant, deny=self.member)
def test_accounting_object(self):
accounting = blm.accounting.Accounting(org=[self.org])
dim = blm.accounting.Dimension(number='42', name='meaning of life',
accounting=accounting)
self.check(blm.accounting.AccountingObject,
params=dict(number='1', name='ao', dimension=dim),
edit=dict(number='2'),
allow=self.accountant, deny=self.member)
def test_edit_and_create_series(self):
accounting = blm.accounting.Accounting(org=[self.org])
self.commit()
self.ctx.setUser(self.accountant)
seriesA = blm.accounting.VerificationSeries(accounting=[accounting],
name=['A'])
self.commit()
seriesA, = blm.accounting.VerificationSeries._query(id=seriesA).run()
seriesA(description=['foo'])
self.ctx.setUser(self.member)
py.test.raises(ClientError, blm.accounting.VerificationSeries,
accounting=[accounting], name=['B'])
py.test.raises(ClientError, seriesA, description=['bar'])
def test_edit_and_create_account_and_account_balance(self):
accounting = blm.accounting.Accounting(org=[self.org])
self.commit()
self.ctx.setUser(self.accountant)
account1000 = blm.accounting.Account(accounting=[accounting],
number=['1000'])
self.commit()
account1000, = blm.accounting.Account._query(number='1000').run()
account1000(name=['the account'])
self.ctx.setUser(self.member)
py.test.raises(ClientError, blm.accounting.Account,
accounting=[accounting], number=['2000'])
py.test.raises(ClientError, account1000,
name=['the account with the new name'])
with py.test.raises(ClientError):
blm.accounting.AccountBalance(account=account1000, year=[-1])
self.ctx.setUser(self.accountant)
ab = blm.accounting.AccountBalance(account=account1000, year=[-1])
self.commit()
ab._clear()
self.ctx.setUser(self.member)
with py.test.raises(ClientError):
ab(year=[-2])
self.ctx.setUser(self.accountant)
ab(year=[-2])
def test_balance_budget_and_object_balance_budget(self):
accounting = blm.accounting.Accounting(org=[self.org])
account = blm.accounting.Account(accounting=[accounting],
number=['1000'])
dim = blm.accounting.Dimension(accounting=[accounting], number=['1'],
name=['dim'])
ao = blm.accounting.AccountingObject(dimension=[dim], number=['1'],
name=['ao'])
# ObjectBalanceBudget
self.ctx.setUser(self.member)
with py.test.raises(ClientError):
blm.accounting.ObjectBalanceBudget(account_balance=[account],
accounting_object=[ao],
period=[''])
self.ctx.setUser(self.accountant)
obb = blm.accounting.ObjectBalanceBudget(account_balance=[account],
accounting_object=[ao],
period=[''])
self.commit()
obb._clear()
self.ctx.setUser(self.member)
with py.test.raises(ClientError):
obb(period=['201401'])
self.ctx.setUser(self.accountant)
obb(period=['201402'])
# BalanceBudget
self.ctx.setUser(self.member)
with py.test.raises(ClientError):
blm.accounting.BalanceBudget(account_balance=[account],
period=['201401'])
self.ctx.setUser(self.accountant)
bb = blm.accounting.BalanceBudget(account_balance=[account],
period=['201401'])
self.commit()
bb._clear()
self.ctx.setUser(self.member)
with py.test.raises(ClientError):
bb(period=['201402'])
self.ctx.setUser(self.accountant)
bb(period=['201403'])
def test_edit_and_create_verification(self):
accounting = blm.accounting.Accounting(org=[self.org])
series = blm.accounting.VerificationSeries(accounting=[accounting],
name=['A'])
account = blm.accounting.Account(accounting=[accounting],
number=['1000'])
self.commit()
accounting, = blm.accounting.Accounting._query().run()
series, = blm.accounting.VerificationSeries._query().run()
account, = blm.accounting.Account._query().run()
self.ctx.setUser(self.accountant)
ver = blm.accounting.Verification(accounting=[accounting],
series=[series])
trans = blm.accounting.Transaction(verification=[ver],
account=[account],
version=[0])
self.commit()
ver(transaction_date=['2010-01-01'])
trans(amount=['10.00'])
self.ctx.setUser(self.member)
py.test.raises(ClientError, blm.accounting.Verification,
accounting=[accounting], series=[series])
py.test.raises(ClientError, blm.accounting.Transaction,
verification=[ver], account=[account], version=[0])
py.test.raises(ClientError, ver, transaction_date=['2011-01-01'])
py.test.raises(ClientError, trans, amount=['20.00'])
class TestInvoiceSenderPermissions(BLMTests):
def setup_method(self, method):
super(TestInvoiceSenderPermissions, self).setup_method(method)
self.org = blm.accounting.Org()
self.invoicesender = blm.accounting.APIUser(roles='invoicesenders')
self.member = blm.accounting.User()
self.other = blm.accounting.User()
self.org.ug[0].users = [self.invoicesender, self.member]
self.commit()
self.org = blm.accounting.Org._query(id=self.org).run()[0]
def test_ocr_counter(self):
self.ctx.setUser(self.other)
with py.test.raises(ClientError):
self.org.get_ocr()
self.ctx.setUser(self.member)
self.org.get_ocr()
self.ctx.setUser(self.invoicesender)
self.org.get_ocr()
class TestVatCode(BLMTests):
def setup_method(self, method):
super(TestVatCode, self).setup_method(method)
self.vat_table = blm.accounting.VatCode.vat_table
def test_create(self):
vc = blm.accounting.VatCode(code=['10'], xmlCode=['mngol'],
description=['meaning of life'])
self.commit()
vc._clear()
vc(code=['66'])
def test_set_percentage(self):
blm.accounting.VatCode.vat_table = {
'10': '10',
'11': '10',
'12': '20'}
vc10 = blm.accounting.VatCode(code=['10'], xmlCode=['10'])
vc11 = blm.accounting.VatCode(code=['11'], xmlCode=['10'])
vc12 = blm.accounting.VatCode(code=['12'], xmlCode=['10'])
vc13 = blm.accounting.VatCode(code=['13'], xmlCode=['10'])
assert vc10.percentage == [Decimal('10')]
assert vc11.percentage == [Decimal('10')]
assert vc12.percentage == [Decimal('20')]
assert vc13.percentage == []
def test_root_only(self):
vc = blm.accounting.VatCode(code=['10'], xmlCode=['mngol'])
self.commit()
vc._clear()
user = blm.accounting.User()
self.ctx.setUser(user)
with py.test.raises(ClientError):
blm.accounting.VatCode(code=['42'], xmlCode=['mngol'],
description=['meaning of life'])
with py.test.raises(ClientError):
vc(code=['17'])
class TestReadPermissions(BLMTests):
def setup_method(self, method):
super(TestReadPermissions, self).setup_method(method)
self.public = blm.accounting.UG(name=['public'])
self.user = blm.accounting.User()
self.ctx.setUser(self.user)
def test_user_setup(self):
assert self.user.allowRead == [self.user]
assert self.user.ugs == [self.public]
def test_ug(self):
ug = blm.accounting.UG()
assert ug.allowRead == [ug]
def test_no_more_public_ugs(self):
assert self.ctx.user
# Only super user may create UGs with names
py.test.raises(Exception, blm.accounting.UG, name=['public'])
def test_org(self):
org = blm.accounting.Org()
assert org.ug[0].users == [self.user]
assert org.ug[0] in self.user.allowRead
assert org.ticketchecker_ug[0] in self.user.allowRead
assert set(org.allowRead) == set(org.ug + org.ticketchecker_ug)
def get_org(self):
return blm.accounting.Org(name=['Acme Corporation'],
email=['<EMAIL>'],
accountants=[self.user])
def test_paymentproviderdata(self):
org = self.get_org()
ppd = blm.accounting.PaymentProvider(org=org)
assert ppd.allowRead == org.ug
def test_pgorder(self):
org = self.get_org()
pgorder = blm.accounting.PGOrder(
org=[org],
contact=['Mr. Foo'],
contactPhone=['1234567'],
contactEmail=['<EMAIL>'],
pgnum=['12345-6'])
assert pgorder.allowRead == org.ug
def test_invitation(self, monkeypatch):
org = self.get_org()
monkeypatch.setattr(mail, 'sendmail', lambda *args, **kw: None)
invitation, = org.invite(['foo<EMAIL>'])
assert invitation.allowRead == org.ug
def test_accounting(self):
org = self.get_org()
acc1 = blm.accounting.Accounting(org=[org])
assert acc1.allowRead == org.ug
def get_accounting(self):
org = self.get_org()
assert org.ug
acc = blm.accounting.Accounting(org=[org])
assert acc.allowRead == org.ug
return acc
def test_dimension(self):
acc = self.get_accounting()
dim = blm.accounting.Dimension(number=['1'], name=['A'],
accounting=[acc])
assert dim.allowRead == acc.allowRead
for dim in blm.accounting.Dimension._query(accounting=acc).run():
assert dim.allowRead == acc.allowRead
def test_accounting_object(self):
acc = self.get_accounting()
dim = blm.accounting.Dimension(number=['1'], name=['A'],
accounting=[acc])
ao = blm.accounting.AccountingObject(number=['1'], name=['A'],
dimension=[dim])
assert ao.allowRead == dim.allowRead
def test_account(self):
acc = self.get_accounting()
account = blm.accounting.Account(number=['1234'], accounting=[acc])
assert account.allowRead == acc.allowRead
def get_account(self):
acc = self.get_accounting()
account = blm.accounting.Account(number=['1234'], accounting=[acc])
assert account.allowRead == acc.allowRead
return account
def test_verification_series(self):
acc = self.get_accounting()
vs = blm.accounting.VerificationSeries(name=['A'], accounting=[acc])
assert vs.allowRead == acc.allowRead
def test_verification(self):
acc = self.get_accounting()
series = blm.accounting.VerificationSeries(accounting=acc, name=['A'])
ver = blm.accounting.Verification(series=[series], number=[1],
accounting=[acc])
assert ver.allowRead == acc.allowRead
def test_transaction(self):
acc = self.get_accounting()
series = blm.accounting.VerificationSeries(accounting=acc, name=['A'])
ver = blm.accounting.Verification(series=[series], number=[1],
accounting=[acc])
account = blm.accounting.Account(number=['1234'], accounting=[acc])
trans = blm.accounting.Transaction(verification=[ver],
version=ver.version,
account=[account])
assert trans.allowRead == ver.allowRead == account.allowRead
def test_account_balance(self):
account = self.get_account()
ab = blm.accounting.AccountBalance(year=[-1], account=[account])
assert ab.allowRead == account.allowRead
def test_object_balance_budget(self):
account = self.get_account()
dim = blm.accounting.Dimension(name=['bar'], number=['23'], accounting=account.accounting)
aobj = blm.accounting.AccountingObject(name=['foo'], number=['42'], dimension=[dim])
obb = blm.accounting.ObjectBalanceBudget(period=[''],
accounting_object=[aobj],
account_balance=[account])
assert obb.allowRead == account.allowRead
def test_balance_budget(self):
account = self.get_account()
bb = blm.accounting.BalanceBudget(period=['201003'],
account_balance=[account])
assert bb.allowRead == account.allowRead
def test_account_template(self):
self.ctx.setUser(None)
at = blm.accounting.AccountTemplate(number=['1234'])
assert at.allowRead == [self.public]
def test_chart_of_accounts(self):
self.ctx.setUser(None)
chart = blm.accounting.ChartOfAccounts(name=['foo'])
assert chart.allowRead == [self.public]
def test_vatcode(self):
self.ctx.setUser(None)
vatCode = blm.accounting.VatCode(code=['66'], xmlCode=['Awsm'],
description=['Awesome'])
assert vatCode.allowRead == [self.public]
class TestCascadingDelete(BLMTests):
def test_delete_all(self):
fname = os.path.join(os.path.dirname(__file__), 'sie', 'delete.si')
importer = sie_import.SIEImporter()
importer.parseFile(fname)
toid = importer.accounting.id[0]
self.commit()
accounting, = blm.accounting.Accounting._query(id=toid).run()
accounting._delete()
self.commit()
assert not blm.accounting.Account._query().run()
assert not blm.accounting.AccountBalance._query().run()
assert not blm.accounting.Accounting._query().run()
assert not blm.accounting.AccountingObject._query().run()
assert not blm.accounting.BalanceBudget._query().run()
assert not blm.accounting.Dimension._query().run()
assert not blm.accounting.ObjectBalanceBudget._query().run()
assert not blm.accounting.Transaction._query().run()
assert not blm.accounting.Verification._query().run()
assert not blm.accounting.VerificationSeries._query().run()
class SIETests(BLMTests):
def import_sie(self, fname, org=[]):
sie = os.path.join(os.path.dirname(__file__), 'sie', fname)
importer = sie_import.SIEImporter(list(org))
importer.ignoretransactions = False
importer.parseFile(sie)
self.commit()
return blm.accounting.Accounting._query(id=importer.accounting).run()[0]
def compare(self, acc1, acc2, require_accounting_objects=True,
require_account_balances=True,
require_object_balance_budgets=True):
# compare basic attribute at root level
assert acc1.contact == acc2.contact
assert acc1.currency == acc2.currency
assert acc1.end == acc2.end
assert acc1.industry_code == acc2.industry_code
assert acc1.layout == acc2.layout
assert acc1.mail_address == acc2.mail_address
assert acc1.orgname == acc2.orgname
assert acc1.orgnum == acc2.orgnum
assert acc1.orgtype == acc2.orgtype
assert acc1.start == acc2.start
assert acc1.taxation_year == acc2.taxation_year
assert acc1.telephone == acc2.telephone
assert acc1.zip_city == acc2.zip_city
assert acc1.closed == acc2.closed
key = lambda t: t.number[0]
acc1_accounts = blm.accounting.Account._query(accounting=acc1).run()
acc2_accounts = blm.accounting.Account._query(accounting=acc2).run()
acc1_accounts.sort(key=key)
acc2_accounts.sort(key=key)
acc1_dimensions = blm.accounting.Dimension._query(accounting=acc1).run()
acc2_dimensions = blm.accounting.Dimension._query(accounting=acc2).run()
acc1_dimensions.sort(key=key)
acc2_dimensions.sort(key=key)
acc1_verifications = blm.accounting.Verification._query(accounting=acc1).run()
acc2_verifications = blm.accounting.Verification._query(accounting=acc2).run()
acc1_verifications.sort(key=key)
acc2_verifications.sort(key=key)
acc1_series = blm.accounting.VerificationSeries._query(
accounting=acc1).run()
acc2_series = blm.accounting.VerificationSeries._query(
accounting=acc2).run()
acc2_series.sort(key=lambda t: t.name[0])
acc2_series.sort(key=lambda t: t.name[0])
for dim1, dim2 in izip_longest(acc1_dimensions, acc2_dimensions):
assert dim1.accounting == [acc1]
assert dim2.accounting == [acc2]
assert dim1.number == dim2.number
assert dim1.name == dim2.name
assert dim1.project == dim2.project
for pdim1, pdim2 in izip_longest(dim1.subdim_of, dim2.subdim_of):
assert pdim1 != pdim2 # do not just copy the toiref
assert pdim1.accounting == [acc1]
assert pdim2.accounting == [acc2]
assert pdim1.number == pdim2.number
assert pdim1.name == pdim2.name
assert pdim1.project == pdim2.project
dim1_aos = blm.accounting.AccountingObject._query(
dimension=dim1).run()
dim2_aos = blm.accounting.AccountingObject._query(
dimension=dim2).run()
dim1_aos.sort(key=key)
dim2_aos.sort(key=key)
for ao1, ao2 in izip_longest(dim1_aos, dim2_aos):
assert ao1 != ao2
assert ao1.dimension == [dim1]
assert ao2.dimension == [dim2]
assert ao1.number == ao2.number
assert ao1.name == ao2.name
for ver1, ver2 in izip_longest(acc1_verifications, acc2_verifications):
assert ver1.accounting == [acc1]
assert ver2.accounting == [acc2]
assert ver1.series[0].name == ver2.series[0].name
assert ver1.number == ver2.number
assert ver1.transaction_date == ver2.transaction_date
assert ver1.text == ver2.text
assert pdim1 and pdim2
if require_accounting_objects:
assert ao1 and ao2
for account1, account2 in izip_longest(acc1_accounts, acc2_accounts):
assert account1.accounting == [acc1]
assert account2.accounting == [acc2]
assert account1.number == account2.number
assert account1.name == account2.name
assert account1.type == account2.type
assert account1.unit == account2.unit
assert account1.sru == account2.sru
assert account1.opening_balance == account2.opening_balance
assert account1.vatCode == account2.vatCode
assert account1.vatPercentage == account2.vatPercentage
acc1_abs = [ab for (year, ab) in sorted(account1.account_balances.value.items())]
acc2_abs = [ab for (year, ab) in sorted(account2.account_balances.value.items())]
for ab1, ab2 in izip_longest(acc1_abs, acc2_abs):
assert ab1.account == [account1]
assert ab2.account == [account2]
assert ab1.year == ab2.year
assert ab1.opening_balance == ab2.opening_balance
assert ab1.opening_quantity == ab2.opening_quantity
assert ab1.balance == ab2.balance
assert ab1.balance_quantity == ab2.balance_quantity
assert ab1.budget == ab2.budget
assert ab1.budget_quantity == ab2.budget_quantity
ab1_obbs = blm.accounting.ObjectBalanceBudget._query(
account_balance=ab1).run()
ab2_obbs = blm.accounting.ObjectBalanceBudget._query(
account_balance=ab2).run()
# xxx sorting?
for obb1, obb2 in izip_longest(ab1_obbs, ab2_obbs):
assert obb1.account_balance == [ab1]
assert obb2.account_balance == [ab2]
# xxx account balances
assert obb1.opening_balance == obb2.opening_balance
assert obb1.opening_quantity == obb2.opening_quantity
assert obb1.balance == obb2.balance
assert obb1.balance_quantity == obb2.balance_quantity
assert obb1.budget == obb2.budget
assert obb1.budget_quantity == obb2.budget_quantity
assert obb1.period == [''] # xxx ????
if require_account_balances:
assert ab1 and ab2
if require_object_balance_budgets:
assert obb1, obb2
assert account1 and account2
for series1, series2 in izip_longest(acc1_series, acc2_series):
assert series1.accounting == [acc1]
assert series2.accounting == [acc2]
assert series1.name == series2.name
assert series1.description == series2.description
assert series1 and series2
class TestNewAccountingYear(SIETests):
def setup_method(self, method):
super(TestNewAccountingYear, self).setup_method(method)
self.tmpdir = py.path.local.make_numbered_dir('pytest-')
def test_newAccountingFromLastYear(self):
org = blm.accounting.Org()
pp = blm.accounting.PaymentProvider(org=[org], account=['1000'],
series=['A'])
for y in range(3):
acc = blm.accounting.Accounting(org=[org], start=['201%d-01-01' % y],
layout=[str(y)])
copy, = blm.accounting.newAccountingFromLastYear([org])
# whiteboxy, abuse layout to check that we copied the latest year
assert copy.layout == ['2']
def test_from_open(self):
original = self.import_sie('new_year_source.si')
expect = self.import_sie('new_year_expected.si')
org = blm.accounting.Org(
name=['org ' + original.orgname[0]],
orgnum=['42' + original.orgnum[0]],
phone=['42' + original.telephone[0]],
)
original.org = [org]
copy, = original.new()
self.commit()
original = blm.accounting.Accounting._query(id=original).run()[0]
expect = blm.accounting.Accounting._query(id=expect).run()[0]
copy = blm.accounting.Accounting._query(id=copy).run()[0]
#import pdb;pdb.set_trace()
self.compare(copy, expect)
class TestSupplierInvoiceProvider(BLMTests):
def setup_method(self, method):
super(TestSupplierInvoiceProvider, self).setup_method(method)
self.org = blm.accounting.Org()
def test_generateTransferAddress(self):
provider = blm.accounting.SupplierInvoiceProvider(
org=self.org, series='A', account='3000', bank_account='4000'
)
transferAddress1 = provider.generateTransferAddress(
clearingnumber='4321', bankaccount='567894321'
)
assert len(transferAddress1) == 6
assert luhn.luhn_checksum(transferAddress1) == 0
assert int(transferAddress1) > 0
transferAddress2 = provider.generateTransferAddress(
clearingnumber='4567', bankaccount='9887654321'
)
assert transferAddress1 != transferAddress2
assert len(transferAddress2) == 6
assert luhn.luhn_checksum(transferAddress2) == 0
assert int(transferAddress2) > 0
# Consistency
assert provider.generateTransferAddress(
clearingnumber='4321', bankaccount='567894321'
) == transferAddress1
class TestSupplierInvoice(BLMTests):
def setup_method(self, method):
super(TestSupplierInvoice, self).setup_method(method)
self.org = blm.accounting.Org(subscriptionLevel='subscriber', orgnum='5164005810')
self.payer = blm.accounting.User()
self.org.ug[0].users = [self.payer]
self.org.payers = [self.payer]
self.accounting = blm.accounting.Accounting(org=self.org)
self.account1000 = blm.accounting.Account(accounting=self.accounting, number='1000')
self.account2000 = blm.accounting.Account(accounting=self.accounting, number='2000')
self.account3000 = blm.accounting.Account(accounting=self.accounting, number='3000')
self.account4000 = blm.accounting.Account(accounting=self.accounting, number='4000')
self.series = blm.accounting.VerificationSeries(accounting=self.accounting, name='A')
self.provider = blm.accounting.SupplierInvoiceProvider(org=self.org, series='A', account='3000', bank_account='4000', plusgiro_sending_bank_account='44580231')
self.bankgiroprovider = blm.accounting.BankgiroProvider(org=self.org, bgnum=['1234566'])
self.invoice0 = {
u'amount': 664000,
u'invoiceIdentifierType': u'message',
u'transferMethod': u'bgnum',
u'message': u'Leverans',
u'recipient': u'Mottagare AB',
u'bgnum': u'8888885',
u'regVerificationLines': None,
u'regVerificationVersion': None,
}
self.invoice1 = {
u'bankaccount': u'',
u'invoiceNumber': u'',
u'invoiceDate': u'2017-03-08',
u'amount': 98000,
u'transferDate': u'2017-05-06', #unicode(datetime.now().strftime('%Y-%m-%d'))
u'invoiceType': u'debit',
u'pgnum': u'',
u'invoiceIdentifierType': u'ocr',
u'transferMethod': u'bgnum',
u'message': u'',
u'ocr': u'56897456986',
u'recipient': u'Mottagar1 AB',
u'dueDate': u'2018-03-25',
u'bgnum': u'8888885',
u'regVerificationVersion': 1,
u'regVerificationLines': [
{
u'text': u'purchases going up',
u'account': py23txtu(self.account2000.id[0]),
u'amount': 5000,
u'version': 1
},
{
u'amount': -5000,
u'account': py23txtu(self.account3000.id[0]),
u'text': u'Supplier debt credit account going up',
u'version': 1
}
]
}
self.invoice2 = {
u'bankaccount': u'',
u'invoiceNumber': u'12356986799',
u'invoiceDate': u'2017-04-08',
u'amount': 21000,
u'transferDate': u'2017-05-03',
u'invoiceType': u'debit',
u'pgnum': u'',
u'invoiceIdentifierType': u'invoiceNumber',
u'transferMethod': u'bgnum',
u'message': u'Leverans två',
u'ocr': u'',
u'recipient': u'Mottagar2 AB',
u'dueDate': u'2018-04-25',
u'bgnum': u'8888885',
u'regVerificationVersion': 1,
u'regVerificationLines': [
{
u'text': u'asdfasdf',
u'account': py23txtu(self.account1000.id[0]),
u'amount': 4000,
u'version': 1
},
{
u'amount': -4000,
u'account': py23txtu(self.account3000.id[0]),
u'text': u'n\xe5gotannat',
u'version': 1
}
]
}
# PGnum
self.invoice3 = {
u'bankclearing': u'3144',
u'bankaccount': u'7805569',
u'invoiceNumber': u'',
u'invoiceDate': u'',
u'amount': 100000,
u'transferDate': u'2011-11-30',
u'invoiceType': u'debit',
u'pgnum': u'8377004',
u'invoiceIdentifierType': u'ocr',
u'transferMethod': u'pgnum',
u'message': u'Stipendium',
u'ocr': u'1234567899',
u'recipient': u'Mottagar1 AB',
u'dueDate': u'2018-03-25',
u'bgnum': u'2374825',
u'regVerificationVersion': 1,
u'regVerificationLines': [
{
u'text': u'purchases going up',
u'account': py23txtu(self.account2000.id[0]),
u'amount': 100000,
u'version': 0
},
{
u'amount': -100000,
u'account': py23txtu(self.account3000.id[0]),
u'text': u'Supplier debt credit account going up',
u'version': 0
}
]
}
def test_sorting(self):
x = blm.accounting.SupplierInvoice(
org=self.org,
recipient='a',
transferMethod='bankaccount',
invoiceIdentifierType='message',
dateInvoiceRegistered=1
)
y = blm.accounting.SupplierInvoice(
org=self.org,
recipient='b',
transferMethod='bankaccount',
invoiceIdentifierType='message',
dateInvoiceRegistered=2
)
# We sort secondarily on dateInvoiceRegistered
ts = [x, y]
ts.sort(key=blm.accounting.SupplierInvoice.sort_transferDate_key)
assert ts == [x, y]
x.transferDate=['2017-02-27']
ts = [x, y]
ts.sort(key=blm.accounting.SupplierInvoice.sort_transferDate_key)
assert ts == [y, x]
x.transferDate=[]
y.transferDate=['2017-02-28']
ts = [x, y]
ts.sort(key=blm.accounting.SupplierInvoice.sort_transferDate_key)
assert ts == [x,y]
x.transferDate=['2017-02-27']
ts = [x, y]
ts.sort(key=blm.accounting.SupplierInvoice.sort_transferDate_key)
assert ts == [x,y]
x.transferDate=['2017-02-29']
ts = [x, y]
ts.sort(key=blm.accounting.SupplierInvoice.sort_transferDate_key)
assert ts == [y,x]
y.transferDate=['2017-02-29']
ts = [x, y]
ts.sort(key=blm.accounting.SupplierInvoice.sort_transferDate_key)
assert ts == [x,y]
# x.dateInvoiceRegistered = y.dateInvoiceRegistered
# ts = [x, y]
# ts = ts.sort(key=blm.accounting.SupplierInvoice.sort_transferDate)
# assert blm.accounting.SupplierInvoice.sort_transferDate(x, y) == 0
def test_saveSupplierInvoice(self):
invoice1 = {
u'amount': 664000,
u'invoiceIdentifierType': u'message',
u'transferMethod': u'bgnum',
u'message': u'Leverans',
u'recipient': u'Mottagar Corp',
u'dueDate': u'2017-03-25',
u'bgnum': u'8888885',
u'regVerificationLines': None,
u'regVerificationVersion': None,
}
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice1])
s1 = result['supplierInvoice']
assert s1.amount[0] == Decimal('6640.00')
assert s1.invoiceState[0] == 'incomplete'
stoid = str(s1.id[0])
# Modify the same SupplierInvoice, add a registrationVerification.
invoice1 = {
u'invoiceDate': u'2017-03-08',
u'amount': 654000,
u'transferDate': u'2017-03-24',
u'invoiceType': u'debit',
u'invoiceIdentifierType': u'message',
u'transferMethod': u'bgnum',
u'message': u'Leverans',
u'recipient': u'Mottagar Corp',
u'dueDate': u'2017-03-25',
u'bgnum': u'8888885',
u'regVerificationVersion': 1,
u'regVerificationLines': [
{
u'text': u'asdfasdf',
u'account': py23txtu(self.account1000.id[0]),
u'amount': 5000,
u'version': 1
},
{
u'amount': -5000,
u'account': py23txtu(self.account2000.id[0]),
u'text': u'n\xe5gotannat',
u'version': 1
}
]
}
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice1], toid=[stoid])
s1 = result['supplierInvoice']
vId = result['saveVerResult']['id']
assert s1.registrationVerification[0] == vId
assert s1.invoiceDate[0] == '2017-03-08'
assert s1.amount[0] == Decimal('6540.00')
assert s1.recipient[0] == 'Mottagar Corp'
rv, = blm.accounting.Verification._query(id=vId).run()
assert rv.externalRef[0] == str(s1.id[0])
t1, t2 = rv.transactions
assert t1.amount[0] == Decimal('50.00')
assert t2.text[0] == u'n\xe5gotannat'
# No regVerificationLine adding to SupplierInvoiceProvider.account (supplier debt account)
# so SI is still incomplete.
assert s1.invoiceState[0] == 'incomplete'
# Modify the same SupplierInvoice.
stoid = str(s1.id[0])
invoice2 = {
u'invoiceDate': u'',
u'amount': 4500,
u'transferDate': u'2017-04-24',
u'invoiceType': u'debit',
u'pgnum': u'',
u'invoiceIdentifierType': u'message',
u'transferMethod': u'bgnum',
u'message': u'Leveransen aer skickad',
u'ocr': u'',
u'recipient': u'Mot<NAME>',
u'dueDate': u'2017-04-25',
u'bgnum': u'8888885',
u'regVerificationVersion': 2,
u'regVerificationLines': [
{
u'text': u'asdf racker',
u'account': py23txtu(self.account1000.id[0]),
u'amount': 4500,
u'version': 2
},
{
u'amount': -4200,
u'account': py23txtu(self.account3000.id[0]),
u'text': u'n\xe5gotannat, som sagt',
u'version': 2
},{
u'amount': -300,
u'account': py23txtu(self.account3000.id[0]),
u'text': u'n\xe5goting tredje',
u'version': 1
}
]
}
invoice3 = copy.deepcopy(invoice2)
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice2], toid=[stoid])
s2 = result['supplierInvoice']
vId2 = result['saveVerResult']['id']
assert s2.registrationVerification[0] == vId2
assert s2.amount[0] == Decimal('45.00')
assert s2.invoiceDate == []
rv, = blm.accounting.Verification._query(id=vId2).run()
assert rv.externalRef[0] == str(s2.id[0])
t1, t2, t3 = rv.transactions
assert t2.amount[0] == Decimal('-42.00')
assert t3.amount[0] == Decimal('-3.00')
assert t3.text[0] == u'n\xe5goting tredje'
# We have accounted against SupplierInvoiceProvider.account (supplier debt account)
# so the SI should have reached registered status.
assert s2.invoiceState[0] == 'registered'
invoice3['invoiceState'] = 'paid'
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice3], toid=[stoid])
si = result['supplierInvoice']
assert si.invoiceState[0] == 'paid'
assert len(si.transaction_date[0]) == 10
def test_prepareVerification(self):
r1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[copy.deepcopy(self.invoice0)])
si1 = r1['supplierInvoice']
regVerLines = [
{
u'text': u'Transaction text',
u'account': self.account1000.id[0],
u'version': 0,
u'amount': 5000
},{
u'text': u'Transaction text 2',
u'amount': -5000,
u'version': 0,
u'account': self.account3000.id[0]
}
]
regVerVersion = 1
regVerId = None
siToid = str(si1.id[0])
siRegDate = datetime.fromtimestamp(si1.dateInvoiceRegistered[0]).date().isoformat()
result1 = blm.accounting.prepareVerification(self.org, regVerId, regVerLines, regVerVersion, siToid, siRegDate)
# Output of prepareVerification should be correct input for saveVerification.
sVr1, = blm.accounting.createVerification([result1])
ver1, = blm.accounting.Verification._query(id=sVr1['id']).run()
assert ver1.number[0] == 1
assert ver1.transaction_date == [date.today().isoformat()]
trans1, = blm.accounting.Transaction._query(text='Transaction text').run()
assert trans1.verification == [ver1]
assert trans1.version == [0]
assert trans1.text == ['Transaction text']
assert trans1.amount == [Decimal('50.00')]
trans2, = blm.accounting.Transaction._query(text='Transaction text 2').run()
assert trans2.verification == [ver1]
assert trans2.version == [0]
assert trans2.text == ['Transaction text 2']
assert trans2.amount == [Decimal('-50.00')]
def test_deleteSupplierInvoice(self):
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[copy.deepcopy(self.invoice0)])
s0 = result['supplierInvoice']
assert s0.invoiceState[0] == 'incomplete'
# Delete should succeed as state 'incomplete', no registrationVerification.
result = blm.accounting.deleteSupplierInvoice(org=[self.org], supInvList=[s0])
assert len(result['deleted']) == 1
assert len(result['untouched']) == 0
self.commit()
assert len(blm.accounting.SupplierInvoice._query().run()) == 0
# For SI with registrationVerification / state = registered the delete should fail.
i1 = copy.deepcopy(self.invoice1)
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i1])
s1 = result['supplierInvoice']
assert s1.invoiceState[0] == 'registered'
# Delete should fail b/c there is a registrationVerification.
result = blm.accounting.deleteSupplierInvoice(org=[self.org], supInvList=[s1])
assert len(result['deleted']) == 0
assert len(result['untouched']) == 1
self.commit()
s1, = blm.accounting.SupplierInvoice._query().run()
# Lets test nullifying the reg ver and deleting.
# i1 was modified by saveSupplierInvoice by popping its regVerificationLines and regVerificationVersion.
i1['regVerificationLines'] = []
i1['regVerificationVersion'] = 1
self.commit()
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i1], toid=[str(s1.id[0])])
s1 = result['supplierInvoice']
# As the regVer has no transactions, the SI should have state incomplete
assert s1.invoiceState[0] == 'incomplete'
result = blm.accounting.deleteSupplierInvoice(org=[self.org], supInvList=[s1])
assert len(result['deleted']) == 1
assert len(result['untouched']) == 0
self.commit()
assert len(blm.accounting.SupplierInvoice._query().run()) == 0
def test_predictSupplierInvoice(self):
i1 = copy.deepcopy(self.invoice0)
i1['invoiceDate'] = u'2017-01-01'
i1['transferDate'] = u'2017-01-20'
i1['dueDate'] = u'2017-01-25'
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i1])
s1 = result['supplierInvoice']
i2 = copy.deepcopy(self.invoice0)
i2['invoiceDate'] = u'2017-02-01'
i2['transferDate'] = u'2017-02-20'
i2['dueDate'] = u'2017-02-25'
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i2])
s2 = result['supplierInvoice']
i3 = copy.deepcopy(self.invoice0)
i3['invoiceDate'] = u'2017-03-01'
i3['transferDate'] = u'2017-03-20'
i3['dueDate'] = u'2017-03-25'
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
s3 = result['supplierInvoice']
i4 = copy.deepcopy(self.invoice0)
i4['invoiceDate'] = u'2017-04-10'
i4['transferDate'] = u'2017-04-25'
i4['dueDate'] = u'2017-04-30'
i4['message'] = u'Odd delivery'
i4['transferMethod'] = u'bankaccount'
i4['bankclearing'] = u'8899'
i4['bankaccount'] = u'987654321'
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i4])
s4 = result['supplierInvoice']
prediction, = blm.accounting.predictSupplierInvoice(org=[self.org], recipient=[self.invoice0['recipient']])
assert prediction['recipient'] == self.invoice0['recipient']
assert prediction['transferMethod'] == self.invoice0['transferMethod']
assert prediction['bgnum'] == self.invoice0['bgnum']
assert prediction['invoiceIdentifierType'] == self.invoice0['invoiceIdentifierType']
assert prediction['message'] == self.invoice0['message']
# Check that dates are in next month, but allow for tweaking.
assert prediction['invoiceDate'].startswith('2017-05')
assert prediction['transferDate'].startswith('2017-05')
assert prediction['dueDate'].startswith('2017-05')
if 'regVerificationLinesPrediction' in prediction:
regverlines = prediction.pop('regVerificationLinesPrediction')
prediction['regVerificationLines'] = regverlines
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[prediction])
sp1 = result['supplierInvoice']
p2, = blm.accounting.predictSupplierInvoice(org=[self.org], recipient=[self.invoice0['recipient']])
assert p2['recipient'] == self.invoice0['recipient']
assert p2['transferMethod'] == self.invoice0['transferMethod']
assert p2['bgnum'] == self.invoice0['bgnum']
assert p2['invoiceIdentifierType'] == self.invoice0['invoiceIdentifierType']
assert p2['message'] == self.invoice0['message']
def test_createTransferVerification(self):
invoice1 = self.invoice1
invoice2 = self.invoice2
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice1])
si1 = result1['supplierInvoice']
result2, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice2])
si2 = result2['supplierInvoice']
#supInvList = [str(si1.id[0]), str(si2.id[0])] # This should work to be useable from client
supInvList = [si1, si2]
# Mark as paid
blm.accounting.setSIState(org=[self.org], supInvList=supInvList, newstate=['paid'])
result, = blm.accounting.createTransferVerification(org=[self.org], supInvList=supInvList)
accounted = result['accounted']
verId = result['verifications'][0]['id']
# Check all SIs where accounted.
assert set(accounted) - set(supInvList) == set()
for si in supInvList:
assert si.accounted[0] is True
transferVerification, = blm.accounting.Verification._query(id=verId).run()
banktrans, = blm.accounting.Transaction._query(verification=verId, account=self.account4000.id[0]).run()
assert banktrans.amount[0] == Decimal('-90') # (-5000 Ore + -4000 Ore)/100 = -90SEK
def test_toid20(self):
testtois = (self.account1000, self.account2000, self.account3000, self.account4000)
for toi in testtois:
toid20 = bankgiro.encode_toid20(toi)
assert len(toid20) == 20
for c in toid20:
assert c in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567'
decoded = bankgiro.decode_toid20(toid20)
assert decoded == str(toi.id[0])
for toi in testtois:
assert str(toi.id[0]) == bankgiro.decode_toid20(bankgiro.encode_toid20(toi))
def test_findToi(self):
tois = (self.account1000, self.account2000, self.account3000, self.account4000)
toid20s = [bankgiro.encode_toid20(t) for t in tois] # map(bankgiro.encode_toid20, tois)
# If user sends payment orders to Bankgirot (not via eutaxia) the
# information_to_sender field will contain other things than
# toid20 encoded toid. We need to protect against that.
garbage = ['hej', 'räksmörgås', '', 'ALHPTK4UQPYAHCOINEUI', '""', '\\', '\n']
testtoid20s = garbage + toid20s
counter = 0
found = []
for s in testtoid20s:
t = bankgiro.findToi(s)
if t is not None:
found.append(t)
# Check that we found all of the valid tois and caught all the errors
assert len(found) == len(tois)
def test_bg_transferdate(self):
si1 = blm.accounting.SupplierInvoice(
org=self.org,
recipient='one',
amount=1,
)
self.commit()
# No transferDate
assert bankgiro.bg_transferdate(si1) == 'GENAST'
si1.transferDate = ['2017-09-20'] # a wednesday
with Time(int(time.mktime(date(2017, 9, 19).timetuple()))) as t:
assert bankgiro.bg_transferdate(si1) == '170920'
with Time(int(time.mktime(date(2017, 9, 20).timetuple()))) as t:
assert bankgiro.bg_transferdate(si1) == 'GENAST'
with Time(int(time.mktime(date(2017, 9, 21).timetuple()))) as t:
assert bankgiro.bg_transferdate(si1) == 'GENAST'
si1.transferDate = ['2017-09-24'] # a sunday
with Time(int(time.mktime(date(2017, 9, 21).timetuple()))) as t:
assert bankgiro.bg_transferdate(si1) == '170922'
with Time(int(time.mktime(date(2017, 9, 22).timetuple()))) as t:
assert bankgiro.bg_transferdate(si1) == 'GENAST'
with Time(int(time.mktime(date(2017, 9, 23).timetuple()))) as t:
assert bankgiro.bg_transferdate(si1) == 'GENAST'
with Time(int(time.mktime(date(2017, 9, 24).timetuple()))) as t:
assert bankgiro.bg_transferdate(si1) == 'GENAST'
with Time(int(time.mktime(date(2017, 9, 25).timetuple()))) as t:
assert bankgiro.bg_transferdate(si1) == 'GENAST'
def test_gen_opening_record(self):
with Time(1494257828) as t:
line = bankgiro.gen_opening_record(self.bankgiroprovider)
assert len(line) == 80
assert line in [
u'110001234566170508LEVERANTÖRSBETALNINGAR ',
u'110001234566170508LEVERANTORSBETALNINGAR '
]
def test_gen_payment_record(self):
si1 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5a',
org=self.org,
recipient='one',
amount=1,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
self.commit()
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[self.invoice1], toid=['591462b6907e1340e0ffbd5a'])
si1 = result['supplierInvoice']
with Time(int(time.mktime(date(2017, 5, 1).timetuple()))) as t:
line = bankgiro.gen_payment_record(si1)
assert len(line) == 80
assert line == u'14000888888556897456986 000000098000170505 LEKGFNUQPYJUBYH7XVNA'
with Time(int(time.mktime(date(2017, 6, 1).timetuple()))) as t:
line = bankgiro.gen_payment_record(si1)
assert len(line) == 80
assert line == u'14000888888556897456986 000000098000GENAST LEKGFNUQPYJUBYH7XVNA'
def test_gen_information_record(self):
s1 = self.invoice1
s1['invoiceIdentifierType'] = 'message'
s1['message'] = ''.join(["{0!s} bottles of beer on the wall, ".format(i) * 2 + "Take one down, pass it around, " for i in range(99, 0, -1)]) + 'no more bottles of beer!'
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[s1])
si1 = result['supplierInvoice']
with Time(int(time.mktime(date(2017, 5, 1).timetuple()))) as t:
lines = bankgiro.gen_information_record(si1)
assert 1 <= len(lines) <= 90
for line in lines:
#print repr(line)
assert line[:12] == '250008888885'
assert len(line[12:62]) == 50
#print line[12:62]
assert line[62:].strip() == ''
assert len(line) == 80
def test_gen_payment_record_plusgiro(self):
si1 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5a',
org=self.org,
recipient='one',
amount=1,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
self.commit()
s1 = copy.deepcopy(self.invoice1)
s1['transferMethod'] = 'pgnum'
s1['pgnum'] = '47651013'
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[s1], toid=['591462b6907e1340e0ffbd5a'])
si1 = result['supplierInvoice']
with Time(int(time.mktime(date(2017, 5, 1).timetuple()))) as t:
line = bankgiro.gen_payment_record_plusgiro(si1)
assert len(line) == 80
assert line == u'54004765101356897456986 000000098000170505 LEKGFNUQPYJUBYH7XVNA'
with Time(int(time.mktime(date(2017, 6, 1).timetuple()))) as t:
line = bankgiro.gen_payment_record_plusgiro(si1)
assert len(line) == 80
assert line == u'54004765101356897456986 000000098000GENAST LEKGFNUQPYJUBYH7XVNA'
def test_gen_information_record_plusgiro(self):
s1 = self.invoice1
s1['transferMethod'] = 'pgnum'
s1['pgnum'] = '47651013'
s1['invoiceIdentifierType'] = 'message'
s1['message'] = ''.join(["{0!s} bottles of beer on the wall, ".format(i) * 2 + "Take one down, pass it around, " for i in range(99, 0, -1)]) + 'no more bottles of beer!'
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[s1])
si1 = result['supplierInvoice']
with Time(1494257828) as t:
lines = bankgiro.gen_information_record_plusgiro(si1)
assert 1 <= len(lines) <= 9
for line in lines:
#print repr(line)
assert line[:12] == '650047651013'
assert len(line[12:47]) == 35
#print line[12:47]
assert line[47:].strip() == ''
assert len(line) == 80
def test_gen_account_number_record(self):
s1 = self.invoice1
s1['transferMethod'] = 'bankaccount'
s1['bankclearing'] = '4321'
s1['bankaccount'] = '47651013'
result, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[s1])
si1 = result['supplierInvoice']
with Time(1494257828) as t:
line = bankgiro.gen_account_number_record(si1)
assert len(line) == 80
assert line[41:].strip() == ''
assert line == '4000000000184321000047651013 56897456986 '
def test_gen_total_amount_record(self):
bgprovider = blm.accounting.BankgiroProvider(org=self.org, bgnum=['1234566'])
line = bankgiro.gen_total_amount_record(
bankgiroProvider=bgprovider,
len_supInvList=7,
totamount=500029900,
sign=' '
)
assert len(line) == 80
assert line == u'29000123456600000007000500029900 '
def test_gen_seal_opening_record(self):
h = bankgiro.gen_seal_opening_record()
assert len(h) == 80
assert h == '00' + time.strftime('%y%m%d', time.localtime(time.time())) + 'HMAC' + ' ' * 68
def test_transferOrderBankgiro(self):
invoice1 = copy.deepcopy(self.invoice1)
invoice2 = copy.deepcopy(self.invoice2)
si1 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5a',
org=self.org,
recipient='one',
amount=1,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
si2 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5e',
org=self.org,
recipient='two',
amount=2,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
self.commit()
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice1], toid=['591462b6907e1340e0ffbd5a'])
si1 = result1['supplierInvoice']
result2, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice2], toid=['591462b6907e1340e0ffbd5e'])
si2 = result2['supplierInvoice']
supInvList = [si1, si2]
with Time(int(time.mktime(date(2017, 5, 1).timetuple()))) as t:
result, = bankgiro.transferOrderBankgiro(bankgiroProvider=self.bankgiroprovider, supInvList=supInvList)
fname = os.path.join(os.path.dirname(__file__), 'LB/LBin-test_generateBankgiroFile.txt')
with open(fname) as f:
filecontent = f.read()
assert filecontent == result
with Time(int(time.mktime(date(2017, 5, 8).timetuple()))) as t:
result, = bankgiro.transferOrderBankgiro(bankgiroProvider=self.bankgiroprovider, supInvList=supInvList)
fname = os.path.join(os.path.dirname(__file__), 'LB/LBin-test_generateBankgiroFile-genast.txt')
with open(fname) as f:
filecontent = f.read()
assert filecontent == result
def test_transferOrderBankgiroRecords(self):
invoice1 = copy.deepcopy(self.invoice1)
invoice2 = copy.deepcopy(self.invoice2)
# Create SIs with predictable toids.
si1 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5a',
org=self.org,
recipient='one',
amount=1,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
si2 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5e',
org=self.org,
recipient='two',
amount=2,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
self.commit()
# Update SIs with reasonable example data.
result1, = blm.accounting.saveSupplierInvoice(
org=[self.org],
invoice=[invoice1],
toid=['591462b6907e1340e0ffbd5a']
)
si1 = result1['supplierInvoice']
result2, = blm.accounting.saveSupplierInvoice(
org=[self.org],
invoice=[invoice2],
toid=['591462b6907e1340e0ffbd5e']
)
si2 = result2['supplierInvoice']
supInvList = [si1, si2]
# With payments in the future
with Time(int(time.mktime(date(2017, 5, 1).timetuple()))) as t:
result = bankgiro.transferOrderBankgiroRecords(bankgiroProvider=self.bankgiroprovider, supInvList=supInvList)
fname = os.path.join(os.path.dirname(__file__), 'LB/LBin-test_generateBankgiroFile.txt')
with open(fname) as f:
filecontent = f.readlines()
if PYT3:
for generatedline, expectedline in zip(result, filecontent):
assert generatedline == expectedline.strip('\r\n')
else:
for generatedline, expectedline in zip(result, filecontent):
assert generatedline.encode('latin-1', 'replace') == expectedline.strip('\r\n')
# With payments immediately
with Time(int(time.mktime(date(2017, 5, 8).timetuple()))) as t:
result = bankgiro.transferOrderBankgiroRecords(bankgiroProvider=self.bankgiroprovider, supInvList=supInvList)
fname = os.path.join(os.path.dirname(__file__), 'LB/LBin-test_generateBankgiroFile-genast.txt')
with open(fname) as f:
filecontent = f.readlines()
if PYT3:
for generatedline, expectedline in zip(result, filecontent):
assert generatedline == expectedline.strip('\r\n')
else:
for generatedline, expectedline in zip(result, filecontent):
assert generatedline.encode('latin-1', 'replace') == expectedline.strip('\r\n')
def test_createBgcOrder(self):
invoice1 = copy.deepcopy(self.invoice1)
result1, = blm.accounting.saveSupplierInvoice(
org=[self.org],
invoice=[invoice1]
)
si1 = result1['supplierInvoice']
with Time(int(time.mktime(date(2017, 5, 8).timetuple()))) as t:
bgcOrder = blm.accounting.createBgcOrder(
org=self.org,
bankgiroProvider=self.bankgiroprovider,
supInvList=[si1]
)
assert bgcOrder.creation_date[0] == Time.time(t)
for line in bgcOrder.order_unsigned[0].splitlines():
assert len(line) == 80
if line[:2] == '11':
pass
if line[:2] == '14':
assert bankgiro.encode_toid20(si1) in line
assert str(int(si1.amount[0])) in line
assert 'GENAST' in line
if line[:2] == 29:
assert str(int(si1.amount[0])) in line
def test_signBgcOrder(self):
# Test python internal hmac algorithm
invoice1 = copy.deepcopy(self.invoice1)
result1, = blm.accounting.saveSupplierInvoice(
org=[self.org],
invoice=[invoice1]
)
si1 = result1['supplierInvoice']
bgcOrder = blm.accounting.createBgcOrder(
org=self.org,
bankgiroProvider=self.bankgiroprovider,
supInvList=[si1]
)
bgcOrder = bankgiro.signBgcOrder(bgcOrder=bgcOrder)
unsigned = bgcOrder.order_unsigned[0]
signed = bgcOrder.order_signed[0]
assert unsigned in signed
assert signed[:2] == '00'
assert '\n99' in signed
if PYT3:
pass # TODO: find a replacement test/ find out why py2 version fails
else:
assert 'ff365893d899291c3bf505fb3175e880'.upper() in signed
for line in signed.splitlines():
assert len(line) == 80
if line[:2] == 99:
assert 'ff365893d899291c3bf505fb3175e880'.upper() in line
def test_hmac_sha256_128(self):
# Tests taken from https://tools.ietf.org/html/draft-ietf-ipsec-ciph-sha-256-01
if PYT3:
key = b'<KEY>'
key = codecs.decode(key, encoding='hex')
data = b'abc'
hmac = 'a21b1f5d4cf4f73a4dd939750f7a066a'
assert bankgiro.hmac_sha256_128(key, data) == hmac
data = b''.join((
b"abcdbcdecdefdefgefghfghighijhijk",
b"ijkljklmklmnlmnomnopnopqabcdbcde",
b"cdefdefgefghfghighijhijkijkljklm",
b"klmnlmnomnopnopq"))
hmac = '470305fc7e40fe34d3eeb3e773d95aab'
assert bankgiro.hmac_sha256_128(key, data) == hmac
key = codecs.decode((b'aa' * 32), encoding='hex')
data = codecs.decode((b'dd' * 50), encoding='hex')
hmac = 'cdcb1220d1ecccea91e53aba3092f962'
assert bankgiro.hmac_sha256_128(key, data) == hmac
key = b''.join((
b'<KEY>',
b'1112131415161718191a1b1c1d1e1f20',
b'2122232425'))
key = codecs.decode(key, encoding='hex')
data = codecs.decode((b'cd' * 50), encoding='hex')
hmac = 'd4633c17f6fb8d744c66dee0f8f07455'
assert bankgiro.hmac_sha256_128(key, data) == hmac
key = codecs.decode((b'0c' * 32), encoding='hex')
data = b"Test With Truncation"
hmac = '7546af01841fc09b1ab9c3749a5f1c17'
assert bankgiro.hmac_sha256_128(key, data) == hmac
else:
key ='<KEY>'.decode('hex')
data = "abc"
hmac = 'a21b1f5d4cf4f73a4dd939750f7a066a'
assert bankgiro.hmac_sha256_128(key, data) == hmac
data = ''.join((
"abcdbcdecdefdefgefghfghighijhijk",
"ijkljklmklmnlmnomnopnopqabcdbcde",
"cdefdefgefghfghighijhijkijkljklm",
"klmnlmnomnopnopq"))
hmac = '470305fc7e40fe34d3eeb3e773d95aab'
assert bankgiro.hmac_sha256_128(key, data) == hmac
key = ('aa' * 32).decode('hex')
data = ('dd' * 50).decode('hex')
hmac = 'cdcb1220d1ecccea91e53aba3092f962'
assert bankgiro.hmac_sha256_128(key, data) == hmac
key = ''.join((
'<KEY>',
'<KEY>',
'2122232425'
)).decode('hex')
data = ('cd' * 50).decode('hex')
hmac = 'd4633c17f6fb8d744c66dee0f8f07455'
assert bankgiro.hmac_sha256_128(key, data) == hmac
key = ('0c' * 32).decode('hex')
data = "Test With Truncation"
hmac = '7546af01841fc09b1ab9c3749a5f1c17'
assert bankgiro.hmac_sha256_128(key, data) == hmac
@pytest.mark.skipif(not os.path.exists("/dev/bgsigner"), reason='No bgsigner connected.')
def test_bgsigner(self):
key = '<KEY>'
lock = '0123456789ABCDEF01234567'
message = 'fish slapping dance' * 51
py_hmac = bankgiro.hmac_sha256_128(key.decode('hex'), message)
bgsigner_hmac = bankgiro.hmac_sha256_128_bgsigner(lock, message)
assert len(bgsigner_hmac) == 32
assert py_hmac == bgsigner_hmac
message = '00000000'
py_hmac = bankgiro.hmac_sha256_128(key.decode('hex'), message)
bgsigner_hmac = bankgiro.hmac_sha256_128_bgsigner(lock, message)
assert len(py_hmac) == len(bgsigner_hmac) == 32
assert py_hmac == bgsigner_hmac
message = ' ' * 80 + '\r\n' * 30 # Whitespace
py_hmac = bankgiro.hmac_sha256_128(key.decode('hex'), message)
bgsigner_hmac = bankgiro.hmac_sha256_128_bgsigner(lock, message)
assert len(bgsigner_hmac) == 32
assert py_hmac == bgsigner_hmac
@pytest.mark.skipif(not os.path.exists("/dev/bgsigner"), reason='No bgsigner connected.')
def test_bgsigner_junk(self):
key = '<KEY>'
lock = '0123456789ABCDEF01234567'
message = '\n'.join((''.join(
random.choice(string.digits + string.letters + string.punctuation) for _ in range(80))
) for _ in range(30)) # Random junk
py_hmac = bankgiro.hmac_sha256_128(key.decode('hex'), message)
bgsigner_hmac = bankgiro.hmac_sha256_128_bgsigner(lock, message)
assert len(py_hmac) == len(bgsigner_hmac) == 32
assert py_hmac == bgsigner_hmac
@pytest.mark.skipif(not os.path.exists("/dev/bgsigner"), reason='No bgsigner connected.')
def test_bgsigner_256truncate(self):
key = '<KEY>'
lock = '0123456789ABCDEF01234567'
message = 'fish slapping dance' * 51
py_hmac = bankgiro.hmac_sha256_128(key, message)
bgsigner_hmac = bankgiro.hmac_sha256_128_bgsigner_truncated_256(lock, message)
assert len(bgsigner_hmac) == 32
assert py_hmac == bgsigner_hmac
def test_create_hmac(self):
message = 'fish slapping dance' * 51
if PYT3:
message = codecs.encode(message, 'latin-1')
hmac_signer = bankgiro.create_hmac(message)
hmac_software = bankgiro.create_hmac(message, force_software_signer=True)
assert hmac_signer == hmac_software
def test_normalize_text(self):
if PYT3:
assert bankgiro.normalize_text('Å'.encode('latin-1')) == b']'
assert bankgiro.normalize_text('Ä'.encode('latin-1')) == b'['
assert bankgiro.normalize_text('Ö'.encode('latin-1')) == b'\\'
assert bankgiro.normalize_text('å'.encode('latin-1')) == b'}'
assert bankgiro.normalize_text('ä'.encode('latin-1')) == b'{'
assert bankgiro.normalize_text('ö'.encode('latin-1')) == b'|'
assert bankgiro.normalize_text('É'.encode('latin-1')) == b'@'
assert bankgiro.normalize_text('é'.encode('latin-1')) == b'`'
assert bankgiro.normalize_text('Ü'.encode('latin-1')) == b'^'
assert bankgiro.normalize_text('ü'.encode('latin-1')) == b'~'
assert bankgiro.normalize_text('\n'.encode('latin-1')) == b''
assert bankgiro.normalize_text('\n\n\n\r\r\n'.encode('latin-1')) == b''
s1 = 'One\nAring Å\nAuml Ä\nOuml Ö\naring å\nauml ä\nouml ö\nEacc É\neacc é\nUuml Ü\nuuml ü\nTwo\nThree'
s1 = s1.encode('latin-1')
s2 = 'OneAring ]Auml [Ouml \\aring }auml {ouml |Eacc @eacc `Uuml ^uuml ~TwoThree'
s2 = s2.encode('latin-1')
assert bankgiro.normalize_text(s1) == s2
n1 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890_-?;"{}@#$%^&*()_'
n1 = n1.encode('latin-1')
assert bankgiro.normalize_text(n1) == n1
else:
assert bankgiro.normalize_text(u'Å') == ']'
assert bankgiro.normalize_text(u'Ä') == '['
assert bankgiro.normalize_text(u'Ö') == '\\'
assert bankgiro.normalize_text(u'å') == '}'
assert bankgiro.normalize_text(u'ä') == '{'
assert bankgiro.normalize_text(u'ö') == '|'
assert bankgiro.normalize_text(u'É') == '@'
assert bankgiro.normalize_text(u'é') == '`'
assert bankgiro.normalize_text(u'Ü') == '^'
assert bankgiro.normalize_text(u'ü') == '~'
assert bankgiro.normalize_text(u'\n') == ''
assert bankgiro.normalize_text(u'\n\n\n\r\r\n') == ''
s1 = u'One\nAring Å\nAuml Ä\nOuml Ö\naring å\nauml ä\nouml ö\nEacc É\neacc é\nUuml Ü\nuuml ü\nTwo\nThree'
s2 = 'OneAring ]Auml [Ouml \\aring }auml {ouml |Eacc @eacc `Uuml ^uuml ~TwoThree'
assert bankgiro.normalize_text(s1) == s2
n1 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890_-?;"{}@#$%^&*()_'
assert bankgiro.normalize_text(n1) == n1
def test_sealTransferOrder(self):
fname = os.path.join(os.path.dirname(__file__), 'LB/LBin-test_generateBankgiroFile.txt')
with open(fname, 'rb') as f:
sourcecontent = f.read()
with Time(int(time.mktime(date(2017, 5, 2).timetuple()))) as t:
result = bankgiro.sealTransferOrder(message=py23txtuc(sourcecontent, 'latin-1'))
fname = os.path.join(os.path.dirname(__file__), 'LB/LBin-test_sealBankgiroFile.txt')
with open(fname, 'rb') as f:
targetcontent = f.read()
assert result == targetcontent
def test_createSignedBgcOrder(self):
invoice1 = copy.deepcopy(self.invoice1)
invoice2 = copy.deepcopy(self.invoice2)
si1 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5a',
org=self.org,
recipient='one',
amount=1,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
si2 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5e',
org=self.org,
recipient='two',
amount=2,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
self.commit()
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice1], toid=['591462b6907e1340e0ffbd5a'])
si1 = result1['supplierInvoice']
result2, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice2], toid=['591462b6907e1340e0ffbd5e'])
si2 = result2['supplierInvoice']
supInvList = [si1, si2]
blm.accounting.enableSIAutomation(supInvList=supInvList)
assert si1.automated[0]
assert si2.automated[0]
signedBgcOrder, = blm.accounting.createSignedBgcOrder(org=[self.org], supInvList=supInvList)
sealed_message = signedBgcOrder.order_signed[0]
if not PYT3:
sealed_message = sealed_message.encode('latin-1')
for si in supInvList:
assert bankgiro.encode_toid20(si) in sealed_message
def test_cancelBgcOrder(self):
py.test.skip()
invoice1 = copy.deepcopy(self.invoice1)
invoice2 = copy.deepcopy(self.invoice2)
si1 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5a',
org=self.org,
recipient='one',
amount=1,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
si2 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5e',
org=self.org,
recipient='two',
amount=2,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
self.commit()
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice1], toid=['591462b6907e1340e0ffbd5a'])
si1 = result1['supplierInvoice']
result2, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[invoice2], toid=['591462b6907e1340e0ffbd5e'])
si2 = result2['supplierInvoice']
supInvList = [si1, si2]
blm.accounting.enableSIAutomation(supInvList=supInvList)
bgcOrder ,= blm.accounting.createSignedBgcOrder(org=[self.org], supInvList=supInvList)
cancellationOrder = blm.accounting.cancelBgcOrder(bgcOrder=[bgcOrder])
assert bgcOrder.sent[0] < cancellationOrder.sent[0]
def test_parseBankgiroResponseSuccess(self):
bankgiroprovider = blm.accounting.BankgiroProvider(org=self.org, bgnum=['1234566'])
si1 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5a',
org=self.org,
recipient='one',
amount=1,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
si2 = blm.accounting.SupplierInvoice(
id='591462b6907e1340e0ffbd5e',
org=self.org,
recipient='two',
amount=2,
transferMethod='bgnum',
invoiceIdentifierType='message'
)
self.commit()
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[self.invoice1], toid=['591462b6907e1340e0ffbd5a'])
si1 = result1['supplierInvoice']
result2, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[self.invoice2], toid=['591462b6907e1340e0ffbd5e'])
si2 = result2['supplierInvoice']
si1.invoiceState = ['sent']
si1.automated = [True]
si2.invoiceState = ['sent']
si2.automated = [True]
self.commit()
supInvList = [si1, si2]
fname = os.path.join(os.path.dirname(__file__), 'LB/LBut-test_parseBankgiroResponse.txt')
if PYT3:
with open(fname, 'rb') as fh:
responsefile = fh.read()
else:
with open(fname) as fh:
responsefile = fh.read()
result = blm.accounting.parseBankgiroResponseSuccess(responsefile=py23txtuc(responsefile, 'latin-1'))
assert len(result) == 2
for si in result:
assert si.invoiceState[0] == 'paid'
assert si.accounted[0]
ver, = blm.TO._query(id=si.transferVerification[0]).run()
assert len(ver.transactions) > 2
def test_fakeBGC_cycle(self):
r1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[copy.deepcopy(self.invoice1)])
si1 = r1['supplierInvoice']
r2, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[copy.deepcopy(self.invoice2)])
si2 = r2['supplierInvoice']
blm.accounting.enableSIAutomation(supInvList=[si1, si2])
bgcOrder, = blm.accounting.createSignedBgcOrder(org=[self.org], supInvList=[si1, si2])
order_signed = bgcOrder.order_signed[0]
order_signed = order_signed.encode('latin-1')
lbin = order_signed
si1.invoiceState = ['sent']
si2.invoiceState = ['sent']
self.commit()
from accounting.test.fake_bg_response import fakeResponseSuccess
lbout = fakeResponseSuccess(lbin.decode().splitlines(True))
result = blm.accounting.parseBankgiroResponseSuccess(responsefile = ''.join(lbout))
assert len(result) == 2
for si in result:
assert si.invoiceState[0] == 'paid'
def test_fakeBGC_cycle_rejected(self):
r1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[copy.deepcopy(self.invoice1)])
si1 = r1['supplierInvoice']
r2, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[copy.deepcopy(self.invoice2)])
si2 = r2['supplierInvoice']
blm.accounting.enableSIAutomation(supInvList=[si1, si2])
bgcOrder, = blm.accounting.createSignedBgcOrder(org=[self.org], supInvList=[si1, si2])
order_signed = bgcOrder.order_signed[0]
order_signed = order_signed.encode('latin-1')
lbin = order_signed
si1.invoiceState = ['sent']
si2.invoiceState = ['sent']
self.commit()
from accounting.test.fake_bg_response import fakeResponseRejected
lbout = fakeResponseRejected(lbin.decode().splitlines(True))
result = blm.accounting.parseBankgiroResponseRejected(responsefile=''.join(lbout))
for si in result:
assert si.invoiceState[0] == 'rejected'
assert si.rejected_log[0] == u'MTRV0082 Stopped after balance check inquiry. Contact your bank.'
assert si.automated[0] is True
assert len(result) == 2
def test_setSIState(self):
i1 = self.invoice1
i1['transferDate'] = py23txtu((date.today() + timedelta(days=1)).isoformat()) # Transfer tomorrow.
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i1])
si1 = result1['supplierInvoice']
result2, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[self.invoice2])
si2 = result2['supplierInvoice']
self.commit()
assert si1.invoiceState[0] == 'registered'
assert si2.invoiceState[0] == 'registered'
self.pushnewctx()
self.ctx.setUser(self.payer)
si1, = blm.TO._query(id=si1).run()
si2, = blm.TO._query(id=si2).run()
# registered -> paid should work.
blm.accounting.setSIState(org=[self.org], supInvList=[si2], newstate=['paid'])
assert si2.invoiceState[0] == 'paid'
# paid -> scheduled should NOT work.
blm.accounting.setSIState(org=[self.org], supInvList=[si1, si2], newstate=['scheduled'])
assert si2.invoiceState[0] == 'paid'
# paid -> registered should work (unless automated).
assert si2.automated[0] is False
blm.accounting.setSIState(org=[self.org], supInvList=[si2], newstate=['registered'])
assert si2.invoiceState[0] == 'registered'
def test_enableSIAutomation(self):
sr1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[copy.deepcopy(self.invoice1)])
si1 = sr1['supplierInvoice']
result1 = blm.accounting.enableSIAutomation(supInvList=[si1])
assert si1.automated[0]
assert si1.invoiceState[0] == 'registered'
def test_disableSIAutomation(self):
sr1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[copy.deepcopy(self.invoice1)])
si1 = sr1['supplierInvoice']
enable = blm.accounting.enableSIAutomation(supInvList=[si1])
assert si1.automated[0]
resutl1 = blm.accounting.disableSIAutomation(org=[self.org], supInvList=[si1])
def test_automation(self):
# Try moving between states
result1, = blm.accounting.saveSupplierInvoice(
org=[self.org], invoice=[copy.deepcopy(self.invoice1)])
si1 = result1['supplierInvoice']
result2, = blm.accounting.saveSupplierInvoice(
org=[self.org], invoice=[copy.deepcopy(self.invoice2)])
si2 = result2['supplierInvoice']
self.commit()
assert si1.invoiceState[0] == 'registered'
assert si2.invoiceState[0] == 'registered'
self.pushnewctx()
self.ctx.setUser(self.payer)
si1, = blm.TO._query(id=si1).run()
si2, = blm.TO._query(id=si2).run()
# Try to schedule both
result = blm.accounting.enableSIAutomation(supInvList=[si1, si2])
assert result == {'updated': [si1, si2],
'complaints': []}
self.commit()
si1, = blm.TO._query(id=si1).run()
si2, = blm.TO._query(id=si2).run()
assert si1.invoiceState[0] == 'registered'
assert si2.invoiceState[0] == 'registered'
assert si1.automated[0]
assert si2.automated[0]
si1, = blm.TO._query(id=si1).run()
si2, = blm.TO._query(id=si2).run()
# Unschedule (back to registered)
result = blm.accounting.disableSIAutomation(org=[self.org],
supInvList=[si1, si2])
assert result == {'selected': 2,
'updated': 2,
'complaints': []}
self.commit()
assert si1.invoiceState[0] == 'registered'
assert si2.invoiceState[0] == 'registered'
assert not si1.automated[0]
assert not si2.automated[0]
def test_gen_cfp_po3_mh00(self):
result = plusgiro.gen_cfp_po3_mh00(
org=self.org,
sending_bank_account=self.provider.plusgiro_sending_bank_account[0]
)
assert result == 'MH00 5164005810 44580231 SEK SEK '
assert len(result) == 80
def test_gen_cfp_po3_pi00(self):
# To Plusgiro number
i3 = copy.deepcopy(self.invoice3)
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si1 = result1['supplierInvoice']
self.commit()
result = plusgiro.gen_cfp_po3_pi00(supplierInvoice=[si1])
assert result == 'PI0000 8377004 2011113000000001000001234567899 '
assert len(result) == 80
# To Bankgiro number
i3['transferMethod'] = 'bgnum'
i3['ocr'] = '1111111116'
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3], toid=[str(si1.id[0])])
si1 = result1['supplierInvoice']
result = plusgiro.gen_cfp_po3_pi00(supplierInvoice=[si1])
assert result == 'PI0005 2374825 2011113000000001000001111111116 '
assert len(result) == 80
# To Bank account
i3['transferMethod'] = 'bankaccount'
i3['invoiceIdentifierType'] = 'message'
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3], toid=[str(si1.id[0])])
si1 = result1['supplierInvoice']
result = plusgiro.gen_cfp_po3_pi00(supplierInvoice=[si1])
assert result == 'PI00093144 7805569 201111300000000100000Stipendium '
assert len(result) == 80
# To Plusgiro number without explicit transferDate
i3 = copy.deepcopy(self.invoice3)
i3['transferDate'] = ''
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si4 = result1['supplierInvoice']
self.commit()
with Time(int(time.mktime(date(2011, 11, 30).timetuple()))) as t:
result = plusgiro.gen_cfp_po3_pi00(supplierInvoice=[si4])
assert result == 'PI0000 8377004 2011113000000001000001234567899 '
assert len(result) == 80
def test_gen_cfp_po3_ba00(self):
# Out payment reference (here be TOID)
i3 = copy.deepcopy(self.invoice3)
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si1 = result1['supplierInvoice']
self.commit()
result = plusgiro.gen_cfp_po3_ba00(supplierInvoice=[si1])
assert result[:4] == 'BA00'
assert result.find(str(si1.id[0]))
assert len(result) == 80
def test_gen_cfp_po3_bm99(self):
# To Plusgiro number with OCR
i3 = copy.deepcopy(self.invoice3)
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si1 = result1['supplierInvoice']
self.commit()
result = plusgiro.gen_cfp_po3_bm99(supplierInvoice=[si1])
assert result == []
# To Plusgiro number with message
i3 = copy.deepcopy(self.invoice3)
i3['invoiceIdentifierType'] = 'message'
i3['message'] = ''.join(["{0!s} bottles of beer on the wall, ".format(i) * 2 + "Take one down, pass it around, " for i in range(99, 0, -1)]) + 'no more bottles of beer!'
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si1 = result1['supplierInvoice']
self.commit()
result = plusgiro.gen_cfp_po3_bm99(supplierInvoice=[si1])
assert len(result) == 5
# To Bankgiro number with OCR
i3 = copy.deepcopy(self.invoice3)
i3['transferMethod'] = 'bgnum'
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3], toid=[str(si1.id[0])])
si1 = result1['supplierInvoice']
result = plusgiro.gen_cfp_po3_bm99(supplierInvoice=[si1])
assert result == []
# To Bankgiro number with message
i3 = copy.deepcopy(self.invoice3)
i3['invoiceIdentifierType'] = 'message'
i3['message'] = ''.join(["{0!s} bottles of beer on the wall, ".format(i) * 2 + "Take one down, pass it around, " for i in range(99, 0, -1)]) + 'no more bottles of beer!'
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si1 = result1['supplierInvoice']
self.commit()
result = plusgiro.gen_cfp_po3_bm99(supplierInvoice=[si1])
assert len(result) == 5
# To Bank account
i3['transferMethod'] = 'bankaccount'
i3['invoiceIdentifierType'] = 'message'
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3], toid=[str(si1.id[0])])
si1 = result1['supplierInvoice']
result = plusgiro.gen_cfp_po3_bm99(supplierInvoice=[si1])
assert result == []
def test_gen_cfp_po3_mt00(self):
# To Plusgiro number
i3 = copy.deepcopy(self.invoice3)
i3['amount'] = '2700'
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si1 = result1['supplierInvoice']
# To Bankgiro number
i3 = copy.deepcopy(self.invoice3)
i3['transferMethod'] = 'bgnum'
i3['amount'] = '2300'
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si2 = result1['supplierInvoice']
result = plusgiro.gen_cfp_po3_mt00(supInvList=[si1, si2])
assert result == 'MT00 0000002000000000005000 '
assert len(result) == 80
def test_generatePlusgiroRecords(self):
# To Plusgiro number with OCR
i3 = copy.deepcopy(self.invoice3)
result1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si1 = result1['supplierInvoice']
# To Plusgiro number with message
i3 = copy.deepcopy(self.invoice3)
i3['invoiceIdentifierType'] = 'message'
i3['message'] = ''.join(["{0!s} bottles of beer on the wall, ".format(i) * 2 + "Take one down, pass it around, " for i in range(99, 0, -1)]) + 'no more bottles of beer!'
result2, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si2 = result2['supplierInvoice']
# To Bankgiro number with OCR
i3 = copy.deepcopy(self.invoice3)
i3['transferMethod'] = 'bgnum'
result3, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3], toid=[str(si1.id[0])])
si3 = result3['supplierInvoice']
# To Bankgiro number with message
i3 = copy.deepcopy(self.invoice3)
i3['invoiceIdentifierType'] = 'message'
i3['message'] = ''.join(["{0!s} bottles of beer on the wall, ".format(i) * 2 + "Take one down, pass it around, " for i in range(99, 0, -1)]) + 'no more bottles of beer!'
result4, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3])
si4 = result4['supplierInvoice']
# To Bank account
i3['transferMethod'] = 'bankaccount'
i3['invoiceIdentifierType'] = 'message'
result5, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[i3], toid=[str(si1.id[0])])
si5 = result5['supplierInvoice']
self.commit()
result = plusgiro.generatePlusgiroRecords(
org=self.org,
sending_bank_account=self.provider.plusgiro_sending_bank_account,
supInvList=[si1, si2, si3, si4, si5]
)
fname = os.path.join(os.path.dirname(__file__), 'cfp/PG_TESTFIL_PO3_CFPinr.txt')
with open(fname, 'rb') as fh:
targetfile = fh.readlines()
# TODO: asserts!
#import pprint
#pprint.pprint(result)
#for generated, target in zip(result, targetfile):
# assert generated == target.rstrip('\n')
resultfile, = blm.accounting.generatePlusgiroFile(org=[self.org], supInvList=[si1, si2, si3, si4, si5])
# Hard to test as we include toid in file.
#fname = os.path.join(os.path.dirname(__file__), 'cfp/generated_manually_checked.txt')
#with open(fname) as fh:
# targetfile = fh.readlines()
#for generated, target in zip(resultfile.splitlines(True), targetfile):
# print generated
# print target
# assert generated == target
class TestBgcReport(BLMTests):
def setup_method(self, method):
super(TestBgcReport, self).setup_method(method)
self.org = blm.accounting.Org(subscriptionLevel='subscriber', orgnum='5164005810')
self.accounting = blm.accounting.Accounting(org=self.org)
self.account1000 = blm.accounting.Account(accounting=self.accounting, number='1000')
self.account2000 = blm.accounting.Account(accounting=self.accounting, number='2000')
self.account3000 = blm.accounting.Account(accounting=self.accounting, number='3000')
self.account4000 = blm.accounting.Account(accounting=self.accounting, number='4000')
self.series = blm.accounting.VerificationSeries(accounting=self.accounting, name='A')
self.provider = blm.accounting.SupplierInvoiceProvider(org=self.org, series='A', account='3000', bank_account='4000', plusgiro_sending_bank_account='44580231')
self.bankgiroprovider = blm.accounting.BankgiroProvider(org=self.org, bgnum=['1234566'])
self.invoice1 = {
u'amount': 664000,
u'invoiceIdentifierType': u'message',
u'transferMethod': u'bgnum',
u'message': u'Leverans',
u'recipient': u'Mottagare AB',
u'bgnum': u'8888885',
u'regVerificationLines': None,
u'regVerificationVersion': None,
}
def test_process_data(self, monkeypatch):
r1, = blm.accounting.saveSupplierInvoice(org=[self.org], invoice=[copy.deepcopy(self.invoice1)])
si1 = r1['supplierInvoice']
def foo(text):
return [si1]
multiline = '1'*47
bgcReport = blm.accounting.BgcReport(multiline=[multiline])
self.commit()
monkeypatch.setattr(blm.accounting, 'parseBankgiroResponseSuccess', foo)
bgcReport.process_data()
assert bgcReport.supplierInvoices == [si1]
class TestBootstrap(BLMTests):
def check_bootstrap(self):
blm.accounting.bootstrap()
ugs = blm.accounting.UG._query(name='public').run()
assert len(ugs) == 1
assert ugs[0].name == ['public']
oe = blm.accounting.Org._query().run()
assert len(oe) == 1
assert oe[0].orgnum == [blm.accounting.Org._oeOrgNum]
#assert blm.accounting.ChartOfAccounts._query().run()
assert blm.accounting.VatCode._query().run()
def test_bootstrap(self):
self.check_bootstrap()
self.check_bootstrap() # reentrant
class TestUpgrade(BLMTests):
def test_reentrant(self):
blm.accounting.upgrade() # don't explode
blm.accounting.upgrade() # reentrant
| [
"blm.accounting.upgrade",
"blm.accounting.createSignedBgcOrder",
"blm.accounting.User",
"blm.accounting.Transaction",
"copy.deepcopy",
"os.fork",
"blm.accounting.Org._query",
"accounting.bankgiro.normalize_text",
"blm.accounting.VatCode",
"blm.accounting.next_verification_data",
"blm.accounting.... | [((12282, 12318), 'os.path.join', 'os.path.join', (['certs', '"""swish.crt.pem"""'], {}), "(certs, 'swish.crt.pem')\n", (12294, 12318), False, 'import bson, email, py, os, time, uuid\n'), ((12330, 12366), 'os.path.join', 'os.path.join', (['certs', '"""swish.key.pem"""'], {}), "(certs, 'swish.key.pem')\n", (12342, 12366), False, 'import bson, email, py, os, time, uuid\n'), ((1874, 1894), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (1892, 1894), False, 'import blm\n'), ((1910, 1931), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (1929, 1931), False, 'import blm\n'), ((2065, 2127), 'blm.accounting.currentUserHasRole', 'blm.accounting.currentUserHasRole', (['org', '*self.roles'], {'user': 'user'}), '(org, *self.roles, user=user)\n', (2098, 2127), False, 'import blm\n'), ((2172, 2192), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (2190, 2192), False, 'import blm\n'), ((2208, 2232), 'blm.accounting.APIUser', 'blm.accounting.APIUser', ([], {}), '()\n', (2230, 2232), False, 'import blm\n'), ((2677, 2736), 'blm.accounting.currentUserHasRole', 'blm.accounting.currentUserHasRole', (['org', '"""admins"""'], {'user': 'apiu'}), "(org, 'admins', user=apiu)\n", (2710, 2736), False, 'import blm\n'), ((2753, 2773), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (2771, 2773), False, 'import blm\n'), ((2937, 2972), 'blm.accounting.Client', 'blm.accounting.Client', ([], {'name': "['foo']"}), "(name=['foo'])\n", (2958, 2972), False, 'import blm\n'), ((3020, 3043), 'blm.accounting.Client', 'blm.accounting.Client', ([], {}), '()\n', (3041, 3043), False, 'import blm\n'), ((3057, 3090), 'blm.accounting.UG', 'blm.accounting.UG', ([], {'users': '[client]'}), '(users=[client])\n', (3074, 3090), False, 'import blm\n'), ((3155, 3178), 'blm.accounting.Client', 'blm.accounting.Client', ([], {}), '()\n', (3176, 3178), False, 'import blm\n'), ((3242, 3302), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'client'], {'super': '[True]'}), '(exceptions.ClientError, client, super=[True])\n', (3256, 3302), False, 'import bson, email, py, os, time, uuid\n'), ((3373, 3432), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['foo']", 'emailAddress': "['<EMAIL>']"}), "(name=['foo'], emailAddress=['<EMAIL>'])\n", (3392, 3432), False, 'import blm\n'), ((3478, 3499), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (3497, 3499), False, 'import blm\n'), ((3513, 3544), 'blm.accounting.UG', 'blm.accounting.UG', ([], {'users': '[user]'}), '(users=[user])\n', (3530, 3544), False, 'import blm\n'), ((3607, 3628), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (3626, 3628), False, 'import blm\n'), ((3690, 3748), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'user'], {'super': '[True]'}), '(exceptions.ClientError, user, super=[True])\n', (3704, 3748), False, 'import bson, email, py, os, time, uuid\n'), ((3805, 3826), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (3824, 3826), False, 'import blm\n'), ((4029, 4050), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (4048, 4050), False, 'import blm\n'), ((4113, 4163), 'py.test.raises', 'py.test.raises', (['ClientError', 'user1'], {'openid': "['bar']"}), "(ClientError, user1, openid=['bar'])\n", (4127, 4163), False, 'import bson, email, py, os, time, uuid\n'), ((4235, 4259), 'blm.accounting.APIUser', 'blm.accounting.APIUser', ([], {}), '()\n', (4257, 4259), False, 'import blm\n'), ((4275, 4294), 'uuid.UUID', 'uuid.UUID', (['u.key[0]'], {}), '(u.key[0])\n', (4284, 4294), False, 'import bson, email, py, os, time, uuid\n'), ((4335, 4359), 'blm.accounting.APIUser', 'blm.accounting.APIUser', ([], {}), '()\n', (4357, 4359), False, 'import blm\n'), ((4483, 4503), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (4501, 4503), False, 'import blm\n'), ((4518, 4557), 'blm.accounting.createAPIUser', 'blm.accounting.createAPIUser', ([], {'org': '[org]'}), '(org=[org])\n', (4546, 4557), False, 'import blm\n'), ((4902, 4963), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['foo inc.']", 'orgnum': "['123456-7890']"}), "(name=['foo inc.'], orgnum=['123456-7890'])\n", (4920, 4963), False, 'import blm\n'), ((4981, 5038), 'blm.accounting.Invitation', 'blm.accounting.Invitation', ([], {'org': '[org]', 'emailTo': "['<EMAIL>']"}), "(org=[org], emailTo=['<EMAIL>'])\n", (5006, 5038), False, 'import blm\n'), ((5283, 5346), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "[u'<NAME>']", 'emailAddress': "['<EMAIL>']"}), "(name=[u'<NAME>'], emailAddress=['<EMAIL>'])\n", (5302, 5346), False, 'import blm\n'), ((5355, 5409), 'accounting.config.config.set', 'config.config.set', (['"""accounting"""', '"""smtp_domain"""', '"""test"""'], {}), "('accounting', 'smtp_domain', 'test')\n", (5372, 5409), False, 'from accounting import config, mail, sie_import\n'), ((5860, 5929), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "[u'Räksmörgåsar AB']", 'orgnum': "['123456-7890']"}), "(name=[u'Räksmörgåsar AB'], orgnum=['123456-7890'])\n", (5878, 5929), False, 'import blm\n'), ((7052, 7085), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['ACME']"}), "(name=['ACME'])\n", (7070, 7085), False, 'import blm\n'), ((7101, 7177), 'blm.accounting.Invitation', 'blm.accounting.Invitation', ([], {'org': '[org]', 'emailTo': "['<EMAIL>']", 'groups': "['admins']"}), "(org=[org], emailTo=['<EMAIL>'], groups=['admins'])\n", (7126, 7177), False, 'import blm\n'), ((7234, 7310), 'blm.accounting.Invitation', 'blm.accounting.Invitation', ([], {'org': '[org]', 'emailTo': "['<EMAIL>']", 'groups': "['admins']"}), "(org=[org], emailTo=['<EMAIL>'], groups=['admins'])\n", (7259, 7310), False, 'import blm\n'), ((7368, 7389), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (7387, 7389), False, 'import blm\n'), ((8086, 8107), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (8105, 8107), False, 'import blm\n'), ((8116, 8175), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'inv.accept', '[user2]'], {}), '(exceptions.ClientError, inv.accept, [user2])\n', (8130, 8175), False, 'import bson, email, py, os, time, uuid\n'), ((8268, 8288), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (8286, 8288), False, 'import blm\n'), ((8303, 8355), 'blm.accounting.PaymentProvider', 'blm.accounting.PaymentProvider', ([], {'org': 'org', 'series': "['']"}), "(org=org, series=[''])\n", (8333, 8355), False, 'import blm\n'), ((8403, 8456), 'blm.accounting.PaymentProvider', 'blm.accounting.PaymentProvider', ([], {'org': 'org', 'series': "['P']"}), "(org=org, series=['P'])\n", (8433, 8456), False, 'import blm\n'), ((8643, 8663), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (8661, 8663), False, 'import blm\n'), ((8679, 8718), 'blm.accounting.PaymentProvider', 'blm.accounting.PaymentProvider', ([], {'org': 'org'}), '(org=org)\n', (8709, 8718), False, 'import blm\n'), ((8734, 8773), 'blm.accounting.PaymentProvider', 'blm.accounting.PaymentProvider', ([], {'org': 'org'}), '(org=org)\n', (8764, 8773), False, 'import blm\n'), ((8793, 8834), 'blm.members.Payment', 'blm.members.Payment', ([], {'paymentProvider': 'ppd1'}), '(paymentProvider=ppd1)\n', (8812, 8834), False, 'import blm\n'), ((8854, 8895), 'blm.members.Payment', 'blm.members.Payment', ([], {'paymentProvider': 'ppd2'}), '(paymentProvider=ppd2)\n', (8873, 8895), False, 'import blm\n'), ((9334, 9354), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (9352, 9354), False, 'import blm\n'), ((9363, 9432), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.PlusgiroProvider'], {'org': 'org'}), '(ClientError, blm.accounting.PlusgiroProvider, org=org)\n', (9377, 9432), False, 'import bson, email, py, os, time, uuid\n'), ((9484, 9536), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'subscriptionLevel': "['subscriber']"}), "(subscriptionLevel=['subscriber'])\n", (9502, 9536), False, 'import blm\n'), ((9551, 9610), 'blm.accounting.PlusgiroProvider', 'blm.accounting.PlusgiroProvider', ([], {'org': 'org', 'pgnum': "['1234566']"}), "(org=org, pgnum=['1234566'])\n", (9582, 9610), False, 'import blm\n'), ((9737, 9797), 'blm.accounting.PlusgiroProvider', 'blm.accounting.PlusgiroProvider', ([], {'org': 'org', 'pgnum': "['123456-6']"}), "(org=org, pgnum=['123456-6'])\n", (9768, 9797), False, 'import blm\n'), ((9925, 9989), 'blm.accounting.PlusgiroProvider', 'blm.accounting.PlusgiroProvider', ([], {'org': 'org', 'pgnum': "['12 34 56 - 6']"}), "(org=org, pgnum=['12 34 56 - 6'])\n", (9956, 9989), False, 'import blm\n'), ((10123, 10143), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (10141, 10143), False, 'import blm\n'), ((10152, 10221), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.BankgiroProvider'], {'org': 'org'}), '(ClientError, blm.accounting.BankgiroProvider, org=org)\n', (10166, 10221), False, 'import bson, email, py, os, time, uuid\n'), ((10273, 10325), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'subscriptionLevel': "['subscriber']"}), "(subscriptionLevel=['subscriber'])\n", (10291, 10325), False, 'import blm\n'), ((10340, 10399), 'blm.accounting.BankgiroProvider', 'blm.accounting.BankgiroProvider', ([], {'org': 'org', 'bgnum': "['1234566']"}), "(org=org, bgnum=['1234566'])\n", (10371, 10399), False, 'import blm\n'), ((10526, 10586), 'blm.accounting.BankgiroProvider', 'blm.accounting.BankgiroProvider', ([], {'org': 'org', 'bgnum': "['123-4566']"}), "(org=org, bgnum=['123-4566'])\n", (10557, 10586), False, 'import blm\n'), ((10714, 10776), 'blm.accounting.BankgiroProvider', 'blm.accounting.BankgiroProvider', ([], {'org': 'org', 'bgnum': "['123 - 4566']"}), "(org=org, bgnum=['123 - 4566'])\n", (10745, 10776), False, 'import blm\n'), ((10897, 10949), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'subscriptionLevel': "['subscriber']"}), "(subscriptionLevel=['subscriber'])\n", (10915, 10949), False, 'import blm\n'), ((10958, 11071), 'blm.accounting.PaysonProvider', 'blm.accounting.PaysonProvider', ([], {'org': 'org', 'apiUserId': "['foo']", 'apiPassword': "['<PASSWORD>']", 'receiverEmail': "['baz']"}), "(org=org, apiUserId=['foo'], apiPassword=[\n '<PASSWORD>'], receiverEmail=['baz'])\n", (10987, 11071), False, 'import blm\n'), ((11121, 11141), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (11139, 11141), False, 'import blm\n'), ((11150, 11291), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.PaysonProvider'], {'org': 'org', 'apiUserId': "['foo']", 'apiPassword': "['<PASSWORD>']", 'receiverEmail': "['baz']"}), "(ClientError, blm.accounting.PaysonProvider, org=org,\n apiUserId=['foo'], apiPassword=['<PASSWORD>'], receiverEmail=['baz'])\n", (11164, 11291), False, 'import bson, email, py, os, time, uuid\n'), ((11366, 11418), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'subscriptionLevel': "['subscriber']"}), "(subscriptionLevel=['subscriber'])\n", (11384, 11418), False, 'import blm\n'), ((11427, 11514), 'blm.accounting.SeqrProvider', 'blm.accounting.SeqrProvider', ([], {'org': 'org', 'principalId': "['foo']", 'password': "['<PASSWORD>']"}), "(org=org, principalId=['foo'], password=[\n '<PASSWORD>'])\n", (11454, 11514), False, 'import blm\n'), ((11564, 11584), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (11582, 11584), False, 'import blm\n'), ((11593, 11708), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.SeqrProvider'], {'org': 'org', 'principalId': "['foo']", 'password': "['<PASSWORD>']"}), "(ClientError, blm.accounting.SeqrProvider, org=org,\n principalId=['foo'], password=['<PASSWORD>'])\n", (11607, 11708), False, 'import bson, email, py, os, time, uuid\n'), ((11808, 11860), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'subscriptionLevel': "['subscriber']"}), "(subscriptionLevel=['subscriber'])\n", (11826, 11860), False, 'import blm\n'), ((11869, 11932), 'blm.accounting.StripeProvider', 'blm.accounting.StripeProvider', ([], {'org': 'org', 'access_token': "['stripe']"}), "(org=org, access_token=['stripe'])\n", (11898, 11932), False, 'import blm\n'), ((11987, 12007), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (12005, 12007), False, 'import blm\n'), ((12016, 12109), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.StripeProvider'], {'org': 'org', 'access_token': "['foo']"}), "(ClientError, blm.accounting.StripeProvider, org=org,\n access_token=['foo'])\n", (12030, 12109), False, 'import bson, email, py, os, time, uuid\n'), ((12483, 12600), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'subscriptionLevel': "['subscriber']", 'name': "[u'<NAME>']", 'email': "[u'<EMAIL>']", 'orgnum': "['1234567890']"}), "(subscriptionLevel=['subscriber'], name=[u'<NAME>'],\n email=[u'<EMAIL>'], orgnum=['1234567890'])\n", (12501, 12600), False, 'import blm\n'), ((12884, 12979), 'blm.accounting.SwishProvider', 'blm.accounting.SwishProvider', ([], {'org': 'self.org', 'swish_id': 'self.org.orgnum', 'cert': 'cert', 'pkey': 'pkey'}), '(org=self.org, swish_id=self.org.orgnum, cert=\n cert, pkey=pkey)\n', (12912, 12979), False, 'import blm\n'), ((13204, 13272), 'blm.accounting.SwishProvider', 'blm.accounting.SwishProvider', ([], {'org': 'self.org', 'swish_id': '"""123 339 93 26"""'}), "(org=self.org, swish_id='123 339 93 26')\n", (13232, 13272), False, 'import blm\n'), ((13451, 13519), 'blm.accounting.SwishProvider', 'blm.accounting.SwishProvider', ([], {'org': 'self.org', 'swish_id': 'self.org.orgnum'}), '(org=self.org, swish_id=self.org.orgnum)\n', (13479, 13519), False, 'import blm\n'), ((13775, 13840), 'blm.accounting.SwishProvider', 'blm.accounting.SwishProvider', ([], {'org': 'self.org', 'swish_id': '"""1231181189"""'}), "(org=self.org, swish_id='1231181189')\n", (13803, 13840), False, 'import blm\n'), ((14501, 14566), 'blm.accounting.SwishProvider', 'blm.accounting.SwishProvider', ([], {'org': 'self.org', 'swish_id': '"""1231181189"""'}), "(org=self.org, swish_id='1231181189')\n", (14529, 14566), False, 'import blm\n'), ((14714, 14785), 'py.test.raises', 'py.test.raises', (['ClientError', 'provider'], {'cert': "['not a valid certificate']"}), "(ClientError, provider, cert=['not a valid certificate'])\n", (14728, 14785), False, 'import bson, email, py, os, time, uuid\n'), ((14830, 14898), 'blm.accounting.SwishProvider', 'blm.accounting.SwishProvider', ([], {'org': 'self.org', 'swish_id': 'self.org.orgnum'}), '(org=self.org, swish_id=self.org.orgnum)\n', (14858, 14898), False, 'import blm\n'), ((15614, 15634), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (15632, 15634), False, 'import blm\n'), ((15643, 15735), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.SwishProvider'], {'org': 'org', 'swish_id': 'org.orgnum'}), '(ClientError, blm.accounting.SwishProvider, org=org, swish_id\n =org.orgnum)\n', (15657, 15735), False, 'import bson, email, py, os, time, uuid\n'), ((16066, 16087), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (16085, 16087), False, 'import blm\n'), ((16134, 16194), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['foo inc.']", 'orgnum': "['1234567890']"}), "(name=['foo inc.'], orgnum=['1234567890'])\n", (16152, 16194), False, 'import blm\n'), ((16378, 16401), 'blm.accounting.Client', 'blm.accounting.Client', ([], {}), '()\n', (16399, 16401), False, 'import blm\n'), ((16420, 16444), 'blm.accounting.APIUser', 'blm.accounting.APIUser', ([], {}), '()\n', (16442, 16444), False, 'import blm\n'), ((16460, 16481), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (16479, 16481), False, 'import blm\n'), ((16497, 16557), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['foo inc.']", 'orgnum': "['1234567890']"}), "(name=['foo inc.'], orgnum=['1234567890'])\n", (16515, 16557), False, 'import blm\n'), ((16693, 16725), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['u1']"}), "(name=['u1'])\n", (16712, 16725), False, 'import blm\n'), ((16742, 16774), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['u2']"}), "(name=['u2'])\n", (16761, 16774), False, 'import blm\n'), ((16789, 16809), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (16807, 16809), False, 'import blm\n'), ((17619, 17639), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (17637, 17639), False, 'import blm\n'), ((17656, 17703), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['admin']", 'ugs': 'org.ug'}), "(name=['admin'], ugs=org.ug)\n", (17675, 17703), False, 'import blm\n'), ((17720, 17764), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['u1']", 'ugs': 'org.ug'}), "(name=['u1'], ugs=org.ug)\n", (17739, 17764), False, 'import blm\n'), ((17781, 17825), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['u2']", 'ugs': 'org.ug'}), "(name=['u2'], ugs=org.ug)\n", (17800, 17825), False, 'import blm\n'), ((19352, 19459), 'blm.accounting.updateMemberRoles', 'blm.accounting.updateMemberRoles', ([], {'org': '[org]', 'roleData': "[{'id': user1.id[0], 'roles': ['ticketchecker']}]"}), "(org=[org], roleData=[{'id': user1.id[0],\n 'roles': ['ticketchecker']}])\n", (19384, 19459), False, 'import blm\n'), ((19804, 19908), 'blm.accounting.updateMemberRoles', 'blm.accounting.updateMemberRoles', ([], {'org': '[org]', 'roleData': "[{'id': user1.id[0], 'roles': ['accountant']}]"}), "(org=[org], roleData=[{'id': user1.id[0],\n 'roles': ['accountant']}])\n", (19836, 19908), False, 'import blm\n'), ((20319, 20380), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['Open End']", 'orgnum': "['556609-2473']"}), "(name=['Open End'], orgnum=['556609-2473'])\n", (20337, 20380), False, 'import blm\n'), ((20389, 20493), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'blm.accounting.Org'], {'name': "['<NAME>.']", 'orgnum': "['556609-2473']"}), "(exceptions.ClientError, blm.accounting.Org, name=['<NAME>.'],\n orgnum=['556609-2473'])\n", (20403, 20493), False, 'import bson, email, py, os, time, uuid\n'), ((20506, 20566), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['<NAME>.']", 'orgnum': "['111111-1111']"}), "(name=['<NAME>.'], orgnum=['111111-1111'])\n", (20524, 20566), False, 'import blm\n'), ((20575, 20643), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'org2'], {'orgnum': "['556609-2473']"}), "(exceptions.ClientError, org2, orgnum=['556609-2473'])\n", (20589, 20643), False, 'import bson, email, py, os, time, uuid\n'), ((20695, 20715), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (20713, 20715), False, 'import blm\n'), ((20735, 20793), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]', 'start': "['2010-01-01']"}), "(org=[org], start=['2010-01-01'])\n", (20760, 20793), False, 'import blm\n'), ((20813, 20871), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]', 'start': "['2011-01-01']"}), "(org=[org], start=['2011-01-01'])\n", (20838, 20871), False, 'import blm\n'), ((21330, 21350), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (21348, 21350), False, 'import blm\n'), ((21365, 21399), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'org'}), '(org=org)\n', (21390, 21399), False, 'import blm\n'), ((21408, 21455), 'blm.accounting.subscribe', 'blm.accounting.subscribe', (['[org]', "['subscriber']"], {}), "([org], ['subscriber'])\n", (21432, 21455), False, 'import blm\n'), ((21871, 21891), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (21889, 21891), False, 'import blm\n'), ((22333, 22353), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (22351, 22353), False, 'import blm\n'), ((23221, 23281), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['foo inc.']", 'orgnum': "['1234567890']"}), "(name=['foo inc.'], orgnum=['1234567890'])\n", (23239, 23281), False, 'import blm\n'), ((23299, 23346), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['user1']", 'ugs': 'org.ug'}), "(name=['user1'], ugs=org.ug)\n", (23318, 23346), False, 'import blm\n'), ((23363, 23410), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['user2']", 'ugs': 'org.ug'}), "(name=['user2'], ugs=org.ug)\n", (23382, 23410), False, 'import blm\n'), ((23487, 23523), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['hacker']"}), "(name=['hacker'])\n", (23506, 23523), False, 'import blm\n'), ((23616, 23689), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.removeMembers', '[org]', '[user1]'], {}), '(ClientError, blm.accounting.removeMembers, [org], [user1])\n', (23630, 23689), False, 'import bson, email, py, os, time, uuid\n'), ((23931, 23975), 'blm.accounting.removeMembers', 'blm.accounting.removeMembers', (['[org]', '[user1]'], {}), '([org], [user1])\n', (23959, 23975), False, 'import blm\n'), ((24177, 24250), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.removeMembers', '[org]', '[user2]'], {}), '(ClientError, blm.accounting.removeMembers, [org], [user2])\n', (24191, 24250), False, 'import bson, email, py, os, time, uuid\n'), ((24268, 24315), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['user3']", 'ugs': 'org.ug'}), "(name=['user3'], ugs=org.ug)\n", (24287, 24315), False, 'import blm\n'), ((24366, 24410), 'blm.accounting.removeMembers', 'blm.accounting.removeMembers', (['[org]', '[user2]'], {}), '([org], [user2])\n', (24394, 24410), False, 'import blm\n'), ((24566, 24586), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (24584, 24586), False, 'import blm\n'), ((25004, 25033), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'created': '(0)'}), '(created=0)\n', (25022, 25033), False, 'import blm\n'), ((26236, 26285), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['u1']", 'ugs': 'self.org.ug'}), "(name=['u1'], ugs=self.org.ug)\n", (26255, 26285), False, 'import blm\n'), ((26655, 26676), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (26674, 26676), False, 'import blm\n'), ((26722, 26742), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (26740, 26742), False, 'import blm\n'), ((26761, 26891), 'blm.accounting.PGOrder', 'blm.accounting.PGOrder', ([], {'org': '[org]', 'contact': "['Mr. Foo']", 'contactPhone': "['1234567']", 'contactEmail': "['<EMAIL>']", 'pgnum': "['12345-6']"}), "(org=[org], contact=['Mr. Foo'], contactPhone=[\n '1234567'], contactEmail=['<EMAIL>'], pgnum=['12345-6'])\n", (26783, 26891), False, 'import blm\n'), ((27036, 27057), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (27055, 27057), False, 'import blm\n'), ((27104, 27124), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (27122, 27124), False, 'import blm\n'), ((27205, 27337), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.orderPG', '[org]', "['Mr. Foo']", "['1234566']", "['<EMAIL>']", "['12345-5']", "['1000']", "['B']"], {}), "(ClientError, blm.accounting.orderPG, [org], ['Mr. Foo'], [\n '1234566'], ['<EMAIL>'], ['12345-5'], ['1000'], ['B'])\n", (27219, 27337), False, 'import bson, email, py, os, time, uuid\n'), ((27506, 27609), 'blm.accounting.orderPG', 'blm.accounting.orderPG', (['[org]', "['Mr. Foo']", "['1234566']", "['<EMAIL>']", "['12345-5']", "['1000']", "['B']"], {}), "([org], ['Mr. Foo'], ['1234566'], ['<EMAIL>'], [\n '12345-5'], ['1000'], ['B'])\n", (27528, 27609), False, 'import blm\n'), ((28257, 28324), 'accounting.config.config.set', 'config.config.set', (['"""plusgiro"""', '"""setup_email_from"""', '"""bounce@<EMAIL>"""'], {}), "('plusgiro', 'setup_email_from', 'bounce@<EMAIL>')\n", (28274, 28324), False, 'from accounting import config, mail, sie_import\n'), ((28333, 28406), 'accounting.config.config.set', 'config.config.set', (['"""plusgiro"""', '"""setup_email_to"""', '"""plusgiro@test, cc@test"""'], {}), "('plusgiro', 'setup_email_to', 'plusgiro@test, cc@test')\n", (28350, 28406), False, 'from accounting import config, mail, sie_import\n'), ((28557, 28599), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'orgnum': "['223344-6677']"}), "(orgnum=['223344-6677'])\n", (28575, 28599), False, 'import blm\n'), ((28644, 28774), 'blm.accounting.PGOrder', 'blm.accounting.PGOrder', ([], {'org': '[org]', 'contact': "['Mr. Foo']", 'contactPhone': "['1234567']", 'contactEmail': "['<EMAIL>']", 'pgnum': "['12345-6']"}), "(org=[org], contact=['Mr. Foo'], contactPhone=[\n '1234567'], contactEmail=['<EMAIL>'], pgnum=['12345-6'])\n", (28666, 28774), False, 'import blm\n'), ((29174, 29205), 'email.message_from_string', 'email.message_from_string', (['body'], {}), '(body)\n', (29199, 29205), False, 'import bson, email, py, os, time, uuid\n'), ((29454, 29474), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (29472, 29474), False, 'import blm\n'), ((29493, 29636), 'blm.accounting.PGOrder', 'blm.accounting.PGOrder', ([], {'org': '[org]', 'contact': "['Mr. Foo']", 'contactPhone': "['1234567']", 'contactEmail': "['<EMAIL>']", 'pgnum': "['12345-6']", 'sent': '[True]'}), "(org=[org], contact=['Mr. Foo'], contactPhone=[\n '1234567'], contactEmail=['<EMAIL>'], pgnum=['12345-6'], sent=[True])\n", (29515, 29636), False, 'import blm\n'), ((30053, 30073), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (30071, 30073), False, 'import blm\n'), ((30200, 30239), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org'}), '(org=self.org)\n', (30225, 30239), False, 'import blm\n'), ((30364, 30423), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org', 'start': '"""2010-01-01"""'}), "(org=self.org, start='2010-01-01')\n", (30389, 30423), False, 'import blm\n'), ((30564, 30623), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org', 'start': '"""2011-01-01"""'}), "(org=self.org, start='2011-01-01')\n", (30589, 30623), False, 'import blm\n'), ((30764, 30823), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org', 'start': '"""2012-01-01"""'}), "(org=self.org, start='2012-01-01')\n", (30789, 30823), False, 'import blm\n'), ((31004, 31063), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org', 'start': '"""2012-01-01"""'}), "(org=self.org, start='2012-01-01')\n", (31029, 31063), False, 'import blm\n'), ((31204, 31263), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org', 'start': '"""2011-01-01"""'}), "(org=self.org, start='2011-01-01')\n", (31229, 31263), False, 'import blm\n'), ((31404, 31463), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org', 'start': '"""2010-01-01"""'}), "(org=self.org, start='2010-01-01')\n", (31429, 31463), False, 'import blm\n'), ((31631, 31690), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org', 'start': '"""2010-01-01"""'}), "(org=self.org, start='2010-01-01')\n", (31656, 31690), False, 'import blm\n'), ((31831, 31890), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org', 'start': '"""2011-01-01"""'}), "(org=self.org, start='2011-01-01')\n", (31856, 31890), False, 'import blm\n'), ((32313, 32372), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org', 'start': '"""2010-01-01"""'}), "(org=self.org, start='2010-01-01')\n", (32338, 32372), False, 'import blm\n'), ((32513, 32572), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org', 'start': '"""2011-01-01"""'}), "(org=self.org, start='2011-01-01')\n", (32538, 32572), False, 'import blm\n'), ((32955, 32982), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (32980, 32982), False, 'import blm\n'), ((33381, 33408), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (33406, 33408), False, 'import blm\n'), ((33558, 33614), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['foo']", 'orgnum': "['123456-7890']"}), "(name=['foo'], orgnum=['123456-7890'])\n", (33576, 33614), False, 'import blm\n'), ((33629, 33665), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]'}), '(org=[org])\n', (33654, 33665), False, 'import blm\n'), ((33802, 33822), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (33820, 33822), False, 'import blm\n'), ((33984, 34039), 'blm.accounting.accountingImport', 'blm.accounting.accountingImport', ([], {'org': '[org]', 'data': '[data]'}), '(org=[org], data=[data])\n', (34015, 34039), False, 'import blm\n'), ((34130, 34178), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['foo']", 'orgnum': "['bar']"}), "(name=['foo'], orgnum=['bar'])\n", (34148, 34178), False, 'import blm\n'), ((34193, 34229), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]'}), '(org=[org])\n', (34218, 34229), False, 'import blm\n'), ((34417, 34453), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]'}), '(org=[org])\n', (34442, 34453), False, 'import blm\n'), ((34722, 34742), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (34740, 34742), False, 'import blm\n'), ((34758, 34836), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]', 'start': "['2009-01-01']", 'end': "['2009-12-31']"}), "(org=[org], start=['2009-01-01'], end=['2009-12-31'])\n", (34783, 34836), False, 'import blm\n'), ((34865, 34943), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]', 'start': "['2010-01-01']", 'end': "['2010-12-31']"}), "(org=[org], start=['2010-01-01'], end=['2010-12-31'])\n", (34890, 34943), False, 'import blm\n'), ((34972, 35050), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]', 'start': "['2011-01-01']", 'end': "['2011-12-31']"}), "(org=[org], start=['2011-01-01'], end=['2011-12-31'])\n", (34997, 35050), False, 'import blm\n'), ((35298, 35368), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': '[number]', 'accounting': '[accounting]'}), '(number=[number], accounting=[accounting], **kw)\n', (35320, 35368), False, 'import blm\n'), ((35453, 35473), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (35471, 35473), False, 'import blm\n'), ((35489, 35567), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]', 'start': "['2009-01-01']", 'end': "['2009-12-31']"}), "(org=[org], start=['2009-01-01'], end=['2009-12-31'])\n", (35514, 35567), False, 'import blm\n'), ((36289, 36367), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]', 'start': "['2010-01-01']", 'end': "['2010-12-31']"}), "(org=[org], start=['2010-01-01'], end=['2010-12-31'])\n", (36314, 36367), False, 'import blm\n'), ((36476, 36538), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': 'curr', 'name': "['A']"}), "(accounting=curr, name=['A'])\n", (36509, 36538), False, 'import blm\n'), ((36553, 36609), 'blm.accounting.Verification', 'blm.accounting.Verification', ([], {'accounting': 'curr', 'series': 'ser'}), '(accounting=curr, series=ser)\n', (36580, 36609), False, 'import blm\n'), ((36618, 36704), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'verification': 'ver', 'account': 'acc', 'version': '[0]', 'amount': '"""5"""'}), "(verification=ver, account=acc, version=[0],\n amount='5')\n", (36644, 36704), False, 'import blm\n'), ((37597, 37617), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (37615, 37617), False, 'import blm\n'), ((37632, 37666), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'org'}), '(org=org)\n', (37657, 37666), False, 'import blm\n'), ((37850, 37903), 'blm.accounting.PaymentProvider', 'blm.accounting.PaymentProvider', ([], {'org': 'org', 'series': "['X']"}), "(org=org, series=['X'])\n", (37880, 37903), False, 'import blm\n'), ((38044, 38097), 'blm.accounting.PaymentProvider', 'blm.accounting.PaymentProvider', ([], {'org': 'org', 'series': "['X']"}), "(org=org, series=['X'])\n", (38074, 38097), False, 'import blm\n'), ((38300, 38320), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (38318, 38320), False, 'import blm\n'), ((38335, 38369), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'org'}), '(org=org)\n', (38360, 38369), False, 'import blm\n'), ((38382, 38441), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': 'acc', 'name': '"""B"""'}), "(accounting=acc, name='B')\n", (38415, 38441), False, 'import blm\n'), ((38684, 38711), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (38709, 38711), False, 'import blm\n'), ((38726, 38802), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'number': "['27']", 'name': "['Customer']", 'accounting': '[acc]'}), "(number=['27'], name=['Customer'], accounting=[acc])\n", (38750, 38802), False, 'import blm\n'), ((38850, 38929), 'blm.accounting.AccountingObject', 'blm.accounting.AccountingObject', ([], {'number': "['27']", 'name': "['Nisse']", 'dimension': '[dim]'}), "(number=['27'], name=['Nisse'], dimension=[dim])\n", (38881, 38929), False, 'import blm\n'), ((39122, 39149), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (39147, 39149), False, 'import blm\n'), ((39186, 39279), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'number': "['27']", 'name': "['Customer']", 'accounting': '[self.accounting]'}), "(number=['27'], name=['Customer'], accounting=[self\n .accounting])\n", (39210, 39279), False, 'import blm\n'), ((39356, 39449), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'number': "['27']", 'name': "['Customer']", 'accounting': '[self.accounting]'}), "(number=['27'], name=['Customer'], accounting=[self\n .accounting])\n", (39380, 39449), False, 'import blm\n'), ((39503, 39616), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'number': "['28']", 'name': "['Customer']", 'subdim_of': '[parent]', 'accounting': '[self.accounting]'}), "(number=['28'], name=['Customer'], subdim_of=[\n parent], accounting=[self.accounting])\n", (39527, 39616), False, 'import blm\n'), ((39750, 39858), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'number': "['27']", 'name': "['Customer']", 'project': '[True]', 'accounting': '[self.accounting]'}), "(number=['27'], name=['Customer'], project=[True],\n accounting=[self.accounting])\n", (39774, 39858), False, 'import blm\n'), ((39955, 40068), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'number': "['28']", 'name': "['Customer']", 'subdim_of': '[parent]', 'accounting': '[self.accounting]'}), "(number=['28'], name=['Customer'], subdim_of=[\n parent], accounting=[self.accounting])\n", (39979, 40068), False, 'import blm\n'), ((40924, 40967), 'blm.accounting.BaseAccount', 'blm.accounting.BaseAccount', ([], {'number': "['1234']"}), "(number=['1234'])\n", (40950, 40967), False, 'import blm\n'), ((41291, 41312), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (41310, 41312), False, 'import blm\n'), ((41332, 41397), 'blm.accounting.AccountTemplate', 'blm.accounting.AccountTemplate', ([], {'number': "['1111']", 'allowRead': '[user]'}), "(number=['1111'], allowRead=[user])\n", (41362, 41397), False, 'import blm\n'), ((41882, 41927), 'blm.accounting.ChartOfAccounts', 'blm.accounting.ChartOfAccounts', ([], {'name': "['test']"}), "(name=['test'])\n", (41912, 41927), False, 'import blm\n'), ((42227, 42288), 'blm.accounting.ChartOfAccounts', 'blm.accounting.ChartOfAccounts', ([], {'name': "['test']", 'accounts': 'accts'}), "(name=['test'], accounts=accts)\n", (42257, 42288), False, 'import blm\n'), ((42474, 42501), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (42499, 42501), False, 'import blm\n'), ((42964, 42984), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (42982, 42984), False, 'import blm\n'), ((43008, 43059), 'blm.accounting.accountingFromTemplate', 'blm.accounting.accountingFromTemplate', (['[coa]', '[org]'], {}), '([coa], [org])\n', (43045, 43059), False, 'import blm\n'), ((43439, 43460), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (43458, 43460), False, 'import blm\n'), ((43475, 43544), 'blm.accounting.ChartOfAccounts', 'blm.accounting.ChartOfAccounts', ([], {'name': "['The chart!']", 'allowRead': '[user]'}), "(name=['The chart!'], allowRead=[user])\n", (43505, 43544), False, 'import blm\n'), ((44081, 44108), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (44106, 44108), False, 'import blm\n'), ((44131, 44206), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': '[self.accounting]', 'name': "['A']"}), "(accounting=[self.accounting], name=['A'])\n", (44164, 44206), False, 'import blm\n'), ((44239, 44337), 'blm.accounting.Verification', 'blm.accounting.Verification', ([], {'series': '[self.series]', 'number': "['1']", 'accounting': '[self.accounting]'}), "(series=[self.series], number=['1'], accounting=\n [self.accounting])\n", (44266, 44337), False, 'import blm\n'), ((44388, 44457), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['1234']", 'accounting': '[self.accounting]'}), "(number=['1234'], accounting=[self.accounting])\n", (44410, 44457), False, 'import blm\n'), ((44704, 44753), 'blm.accounting.VatCode', 'blm.accounting.VatCode', ([], {'code': '"""10"""', 'xmlCode': '"""gorp"""'}), "(code='10', xmlCode='gorp')\n", (44726, 44753), False, 'import blm\n'), ((44769, 44818), 'blm.accounting.VatCode', 'blm.accounting.VatCode', ([], {'code': '"""11"""', 'xmlCode': '"""gorp"""'}), "(code='11', xmlCode='gorp')\n", (44791, 44818), False, 'import blm\n'), ((44840, 44929), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['1234']", 'accounting': '[self.accounting]', 'vatCode': "['10']"}), "(number=['1234'], accounting=[self.accounting],\n vatCode=['10'])\n", (44862, 44929), False, 'import blm\n'), ((45259, 45384), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['1234']", 'opening_balance': "['42.00']", 'opening_quantity': "['10']", 'accounting': '[self.accounting]'}), "(number=['1234'], opening_balance=['42.00'],\n opening_quantity=['10'], accounting=[self.accounting])\n", (45281, 45384), False, 'import blm\n'), ((46310, 46433), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['1234']", 'opening_balance': "['0.00']", 'opening_quantity': "['0']", 'accounting': '[self.accounting]'}), "(number=['1234'], opening_balance=['0.00'],\n opening_quantity=['0'], accounting=[self.accounting])\n", (46332, 46433), False, 'import blm\n'), ((47100, 47127), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (47125, 47127), False, 'import blm\n'), ((47142, 47185), 'blm.accounting.BaseAccount', 'blm.accounting.BaseAccount', ([], {'number': "['1234']"}), "(number=['1234'])\n", (47168, 47185), False, 'import blm\n'), ((47202, 47267), 'blm.accounting.Account.fromtemplate', 'blm.accounting.Account.fromtemplate', (['acc'], {'accounting': '[accounting]'}), '(acc, accounting=[accounting])\n', (47237, 47267), False, 'import blm\n'), ((47478, 47505), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (47503, 47505), False, 'import blm\n'), ((47521, 47548), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (47546, 47548), False, 'import blm\n'), ((47563, 47627), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'name': "['A']", 'accounting': '[acc1]'}), "(name=['A'], accounting=[acc1])\n", (47596, 47627), False, 'import blm\n'), ((47642, 47706), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'name': "['B']", 'accounting': '[acc1]'}), "(name=['B'], accounting=[acc1])\n", (47675, 47706), False, 'import blm\n'), ((47716, 47813), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.VerificationSeries'], {'name': "['A']", 'accounting': '[acc1]'}), "(ClientError, blm.accounting.VerificationSeries, name=['A'],\n accounting=[acc1])\n", (47730, 47813), False, 'import bson, email, py, os, time, uuid\n'), ((47848, 47912), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'name': "['A']", 'accounting': '[acc2]'}), "(name=['A'], accounting=[acc2])\n", (47881, 47912), False, 'import blm\n'), ((47947, 47991), 'py.test.raises', 'py.test.raises', (['ClientError', 'vs2'], {'name': "['A']"}), "(ClientError, vs2, name=['A'])\n", (47961, 47991), False, 'import bson, email, py, os, time, uuid\n'), ((48048, 48068), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (48066, 48068), False, 'import blm\n'), ((48083, 48117), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'org'}), '(org=org)\n', (48108, 48117), False, 'import blm\n'), ((48132, 48195), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'name': "['A']", 'accounting': '[acc]'}), "(name=['A'], accounting=[acc])\n", (48165, 48195), False, 'import blm\n'), ((48209, 48262), 'blm.accounting.PaymentProvider', 'blm.accounting.PaymentProvider', ([], {'org': 'org', 'series': "['A']"}), "(org=org, series=['A'])\n", (48239, 48262), False, 'import blm\n'), ((48271, 48311), 'py.test.raises', 'py.test.raises', (['ClientError', 'ser._delete'], {}), '(ClientError, ser._delete)\n', (48285, 48311), False, 'import bson, email, py, os, time, uuid\n'), ((48404, 48467), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'name': "['B']", 'accounting': '[acc]'}), "(name=['B'], accounting=[acc])\n", (48437, 48467), False, 'import blm\n'), ((48576, 48623), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'start': "['2010-01-01']"}), "(start=['2010-01-01'])\n", (48601, 48623), False, 'import blm\n'), ((48667, 48730), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'name': "['A']", 'accounting': '[acc]'}), "(name=['A'], accounting=[acc])\n", (48700, 48730), False, 'import blm\n'), ((48749, 48796), 'blm.accounting.next_verification_data', 'blm.accounting.next_verification_data', (['[series]'], {}), '([series])\n', (48786, 48796), False, 'import blm\n'), ((48950, 49061), 'blm.accounting.Verification', 'blm.accounting.Verification', ([], {'accounting': '[acc]', 'series': '[series]', 'number': '[1]', 'transaction_date': "['2010-01-04']"}), "(accounting=[acc], series=[series], number=[1],\n transaction_date=['2010-01-04'])\n", (48977, 49061), False, 'import blm\n'), ((49112, 49159), 'blm.accounting.next_verification_data', 'blm.accounting.next_verification_data', (['[series]'], {}), '([series])\n', (49149, 49159), False, 'import blm\n'), ((49313, 49425), 'blm.accounting.Verification', 'blm.accounting.Verification', ([], {'accounting': '[acc]', 'series': '[series]', 'number': '[27]', 'transaction_date': "['2011-03-14']"}), "(accounting=[acc], series=[series], number=[27],\n transaction_date=['2011-03-14'])\n", (49340, 49425), False, 'import blm\n'), ((49475, 49522), 'blm.accounting.next_verification_data', 'blm.accounting.next_verification_data', (['[series]'], {}), '([series])\n', (49512, 49522), False, 'import blm\n'), ((49820, 49856), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['<NAME>']"}), "(name=['<NAME>'])\n", (49839, 49856), False, 'import blm\n'), ((49913, 49956), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'accountants': '[self.user]'}), '(accountants=[self.user])\n', (49931, 49956), False, 'import blm\n'), ((49976, 50052), 'blm.accounting.PaymentProvider', 'blm.accounting.PaymentProvider', ([], {'org': 'self.org', 'account': "['1000']", 'series': "['A']"}), "(org=self.org, account=['1000'], series=['A'])\n", (50006, 50052), False, 'import blm\n'), ((50079, 50118), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org'}), '(org=self.org)\n', (50104, 50118), False, 'import blm\n'), ((50214, 50281), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': 'self.accounting', 'number': "['1000']"}), "(accounting=self.accounting, number=['1000'])\n", (50236, 50281), False, 'import blm\n'), ((50445, 50478), 'blm.accounting.Verification', 'blm.accounting.Verification', ([], {}), '(**kw)\n', (50472, 50478), False, 'import blm\n'), ((51013, 51163), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'blm.accounting.Verification'], {'series': '[self.series]', 'accounting': '[self.accounting]', 'transaction_date': "['foo']"}), "(exceptions.ClientError, blm.accounting.Verification, series=\n [self.series], accounting=[self.accounting], transaction_date=['foo'])\n", (51027, 51163), False, 'import bson, email, py, os, time, uuid\n'), ((51258, 51409), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'blm.accounting.Verification'], {'series': '[self.series]', 'accounting': '[self.accounting]', 'registration_date': "['foo']"}), "(exceptions.ClientError, blm.accounting.Verification, series=\n [self.series], accounting=[self.accounting], registration_date=['foo'])\n", (51272, 51409), False, 'import bson, email, py, os, time, uuid\n'), ((51506, 51642), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'blm.accounting.Verification'], {'series': '[self.series]', 'accounting': '[self.accounting]', 'number': '[0]'}), '(exceptions.ClientError, blm.accounting.Verification, series=\n [self.series], accounting=[self.accounting], number=[0])\n', (51520, 51642), False, 'import bson, email, py, os, time, uuid\n'), ((51737, 51873), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'blm.accounting.Verification'], {'series': '[self.series]', 'accounting': '[self.accounting]', 'number': '[1]'}), '(exceptions.ClientError, blm.accounting.Verification, series=\n [self.series], accounting=[self.accounting], number=[1])\n', (51751, 51873), False, 'import bson, email, py, os, time, uuid\n'), ((51983, 52019), 'blm.accounting.User', 'blm.accounting.User', ([], {'name': "['<NAME>']"}), "(name=['<NAME>'])\n", (52002, 52019), False, 'import blm\n'), ((52889, 52960), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': 'self.accounting', 'name': '"""B"""'}), "(accounting=self.accounting, name='B')\n", (52922, 52960), False, 'import blm\n'), ((52983, 53039), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'ver'], {'number': '[12]'}), '(exceptions.ClientError, ver, number=[12])\n', (52997, 53039), False, 'import bson, email, py, os, time, uuid\n'), ((53048, 53109), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'ver'], {'series': '[series2]'}), '(exceptions.ClientError, ver, series=[series2])\n', (53062, 53109), False, 'import bson, email, py, os, time, uuid\n'), ((53816, 53857), 'blm.accounting.createVerification', 'blm.accounting.createVerification', (['[data]'], {}), '([data])\n', (53849, 53857), False, 'import blm\n'), ((55377, 55416), 'blm.accounting.editVerification', 'blm.accounting.editVerification', (['[data]'], {}), '([data])\n', (55408, 55416), False, 'import blm\n'), ((58161, 58202), 'blm.accounting.createVerification', 'blm.accounting.createVerification', (['[data]'], {}), '([data])\n', (58194, 58202), False, 'import blm\n'), ((59296, 59335), 'blm.accounting.editVerification', 'blm.accounting.editVerification', (['[data]'], {}), '([data])\n', (59327, 59335), False, 'import blm\n'), ((60593, 60620), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (60618, 60620), False, 'import blm\n'), ((60644, 60706), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': '[self.acc]', 'number': "['1234']"}), "(accounting=[self.acc], number=['1234'])\n", (60666, 60706), False, 'import blm\n'), ((60726, 60802), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'name': "['bar']", 'number': "['23']", 'accounting': '[self.acc]'}), "(name=['bar'], number=['23'], accounting=[self.acc])\n", (60750, 60802), False, 'import blm\n'), ((60823, 60910), 'blm.accounting.AccountingObject', 'blm.accounting.AccountingObject', ([], {'name': "['foo']", 'number': "['42']", 'dimension': '[self.dim]'}), "(name=['foo'], number=['42'], dimension=[\n self.dim])\n", (60854, 60910), False, 'import blm\n'), ((60947, 61068), 'blm.accounting.ObjectBalanceBudget', 'blm.accounting.ObjectBalanceBudget', ([], {'period': "['201003']", 'accounting_object': '[self.aobj]', 'account_balance': '[self.account]'}), "(period=['201003'], accounting_object=[\n self.aobj], account_balance=[self.account])\n", (60981, 61068), False, 'import blm\n'), ((61174, 61289), 'blm.accounting.ObjectBalanceBudget', 'blm.accounting.ObjectBalanceBudget', ([], {'period': "['']", 'accounting_object': '[self.aobj]', 'account_balance': '[self.account]'}), "(period=[''], accounting_object=[self.\n aobj], account_balance=[self.account])\n", (61208, 61289), False, 'import blm\n'), ((61440, 61590), 'py.test.raises', 'py.test.raises', (['Exception', 'blm.accounting.ObjectBalanceBudget'], {'account_balance': '[self.account]', 'period': "['2010']", 'accounting_object': '[self.aobj]'}), "(Exception, blm.accounting.ObjectBalanceBudget,\n account_balance=[self.account], period=['2010'], accounting_object=[\n self.aobj])\n", (61454, 61590), False, 'import bson, email, py, os, time, uuid\n'), ((61804, 61831), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (61829, 61831), False, 'import blm\n'), ((61855, 61917), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': '[self.acc]', 'number': "['1234']"}), "(accounting=[self.acc], number=['1234'])\n", (61877, 61917), False, 'import blm\n'), ((61954, 62033), 'blm.accounting.BalanceBudget', 'blm.accounting.BalanceBudget', ([], {'account_balance': '[self.account]', 'period': "['201003']"}), "(account_balance=[self.account], period=['201003'])\n", (61982, 62033), False, 'import blm\n'), ((62196, 62223), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {}), '()\n', (62221, 62223), False, 'import blm\n'), ((62247, 62316), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['1234']", 'accounting': '[self.accounting]'}), "(number=['1234'], accounting=[self.accounting])\n", (62269, 62316), False, 'import blm\n'), ((62405, 62469), 'blm.accounting.AccountBalance', 'blm.accounting.AccountBalance', ([], {'year': '[-3]', 'account': '[self.account]'}), '(year=[-3], account=[self.account])\n', (62434, 62469), False, 'import blm\n'), ((62677, 62697), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (62695, 62697), False, 'import blm\n'), ((62724, 62765), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[self.org]'}), '(org=[self.org])\n', (62749, 62765), False, 'import blm\n'), ((62788, 62861), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': 'self.accounting', 'name': "['A']"}), "(accounting=self.accounting, name=['A'])\n", (62821, 62861), False, 'import blm\n'), ((62894, 63024), 'blm.accounting.Verification', 'blm.accounting.Verification', ([], {'series': '[self.series]', 'number': "['1']", 'transaction_date': "['2012-01-02']", 'accounting': '[self.accounting]'}), "(series=[self.series], number=['1'],\n transaction_date=['2012-01-02'], accounting=[self.accounting])\n", (62921, 63024), False, 'import blm\n'), ((63158, 63179), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (63177, 63179), False, 'import blm\n'), ((63336, 63464), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['1000']", 'type': "['T']", 'opening_balance': "['10']", 'opening_quantity': "['2']", 'accounting': '[accounting]'}), "(number=['1000'], type=['T'], opening_balance=['10'],\n opening_quantity=['2'], accounting=[accounting])\n", (63358, 63464), False, 'import blm\n'), ((63608, 63736), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['3234']", 'type': "['I']", 'accounting': '[accounting]', 'opening_balance': "['20']", 'opening_quantity': "['5']"}), "(number=['3234'], type=['I'], accounting=[accounting],\n opening_balance=['20'], opening_quantity=['5'])\n", (63630, 63736), False, 'import blm\n'), ((63879, 63981), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': '[accIncome]', 'verification': '[self.ver]', 'version': 'self.ver.version'}), '(account=[accIncome], verification=[self.ver],\n version=self.ver.version)\n', (63905, 63981), False, 'import blm\n'), ((64402, 64572), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': '[accAsset]', 'verification': '[self.ver]', 'version': 'self.ver.version', 'transaction_date': "['2012-04-05']", 'amount': "['40']", 'quantity': "['4']"}), "(account=[accAsset], verification=[self.ver],\n version=self.ver.version, transaction_date=['2012-04-05'], amount=['40'\n ], quantity=['4'])\n", (64428, 64572), False, 'import blm\n'), ((64919, 65090), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': '[accIncome]', 'verification': '[self.ver]', 'version': 'self.ver.version', 'transaction_date': "['2012-04-05']", 'amount': "['30']", 'quantity': "['2']"}), "(account=[accIncome], verification=[self.ver],\n version=self.ver.version, transaction_date=['2012-04-05'], amount=['30'\n ], quantity=['2'])\n", (64945, 65090), False, 'import blm\n'), ((65504, 65593), 'blm.accounting.AccountingObject', 'blm.accounting.AccountingObject', ([], {'number': "['101']", 'name': "['foo']", 'dimension': '[dimension]'}), "(number=['101'], name=['foo'], dimension=[\n dimension])\n", (65535, 65593), False, 'import blm\n'), ((65838, 66047), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': '[accAsset]', 'verification': '[self.ver]', 'version': 'self.ver.version', 'transaction_date': "['2012-04-05']", 'accounting_objects': '[accounting_object]', 'amount': "['30']", 'quantity': "['2']"}), "(account=[accAsset], verification=[self.ver],\n version=self.ver.version, transaction_date=['2012-04-05'],\n accounting_objects=[accounting_object], amount=['30'], quantity=['2'])\n", (65864, 66047), False, 'import blm\n'), ((66473, 66682), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': '[accAsset]', 'verification': '[self.ver]', 'version': 'self.ver.version', 'transaction_date': "['2012-04-05']", 'accounting_objects': '[accounting_object]', 'amount': "['10']", 'quantity': "['3']"}), "(account=[accAsset], verification=[self.ver],\n version=self.ver.version, transaction_date=['2012-04-05'],\n accounting_objects=[accounting_object], amount=['10'], quantity=['3'])\n", (66499, 66682), False, 'import blm\n'), ((67008, 67218), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': '[accIncome]', 'verification': '[self.ver]', 'version': 'self.ver.version', 'transaction_date': "['2012-04-05']", 'accounting_objects': '[accounting_object]', 'amount': "['20']", 'quantity': "['7']"}), "(account=[accIncome], verification=[self.ver],\n version=self.ver.version, transaction_date=['2012-04-05'],\n accounting_objects=[accounting_object], amount=['20'], quantity=['7'])\n", (67034, 67218), False, 'import blm\n'), ((67633, 67702), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['9999']", 'accounting': '[self.accounting]'}), "(number=['9999'], accounting=[self.accounting])\n", (67655, 67702), False, 'import blm\n'), ((68115, 68162), 'blm.accounting.transactionIndex', 'blm.accounting.transactionIndex', (['[direct_query]'], {}), '([direct_query])\n', (68146, 68162), False, 'import blm\n'), ((68532, 68601), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['9999']", 'accounting': '[self.accounting]'}), "(number=['9999'], accounting=[self.accounting])\n", (68554, 68601), False, 'import blm\n'), ((68655, 68767), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': 'account', 'verification': '[self.ver]', 'version': 'self.ver.version', 'text': "['foo']"}), "(account=account, verification=[self.ver],\n version=self.ver.version, text=['foo'])\n", (68681, 68767), False, 'import blm\n'), ((68849, 68965), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': 'account', 'verification': '[self.ver]', 'version': 'self.ver.version', 'text': "[' foo ']"}), "(account=account, verification=[self.ver],\n version=self.ver.version, text=[' foo '])\n", (68875, 68965), False, 'import blm\n'), ((69176, 69245), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['9999']", 'accounting': '[self.accounting]'}), "(number=['9999'], accounting=[self.accounting])\n", (69198, 69245), False, 'import blm\n'), ((69306, 69375), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['5000']", 'accounting': '[self.accounting]'}), "(number=['5000'], accounting=[self.accounting])\n", (69328, 69375), False, 'import blm\n'), ((69430, 69547), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': 'account1', 'verification': '[self.ver]', 'version': 'self.ver.version', 'amount': "['10.00']"}), "(account=account1, verification=[self.ver],\n version=self.ver.version, amount=['10.00'])\n", (69456, 69547), False, 'import blm\n'), ((69596, 69713), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': 'account1', 'verification': '[self.ver]', 'version': 'self.ver.version', 'amount': "['20.00']"}), "(account=account1, verification=[self.ver],\n version=self.ver.version, amount=['20.00'])\n", (69622, 69713), False, 'import blm\n'), ((70223, 70292), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['9999']", 'accounting': '[self.accounting]'}), "(number=['9999'], accounting=[self.accounting])\n", (70245, 70292), False, 'import blm\n'), ((70347, 70464), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': 'account1', 'verification': '[self.ver]', 'version': 'self.ver.version', 'amount': "['10.00']"}), "(account=account1, verification=[self.ver],\n version=self.ver.version, amount=['10.00'])\n", (70373, 70464), False, 'import blm\n'), ((70513, 70630), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': 'account1', 'verification': '[self.ver]', 'version': 'self.ver.version', 'amount': "['20.00']"}), "(account=account1, verification=[self.ver],\n version=self.ver.version, amount=['20.00'])\n", (70539, 70630), False, 'import blm\n'), ((70679, 70796), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': 'account1', 'verification': '[self.ver]', 'version': 'self.ver.version', 'amount': "['15.00']"}), "(account=account1, verification=[self.ver],\n version=self.ver.version, amount=['15.00'])\n", (70705, 70796), False, 'import blm\n'), ((71374, 71394), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (71392, 71394), False, 'import blm\n'), ((71416, 71452), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]'}), '(org=[org])\n', (71441, 71452), False, 'import blm\n'), ((71470, 71538), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': 'accounting', 'name': "['A']"}), "(accounting=accounting, name=['A'])\n", (71503, 71538), False, 'import blm\n'), ((71568, 71632), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['1000']", 'accounting': '[accounting]'}), "(number=['1000'], accounting=[accounting])\n", (71590, 71632), False, 'import blm\n'), ((71648, 71712), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['2000']", 'accounting': '[accounting]'}), "(number=['2000'], accounting=[accounting])\n", (71670, 71712), False, 'import blm\n'), ((71728, 71850), 'blm.accounting.Verification', 'blm.accounting.Verification', ([], {'accounting': 'accounting', 'series': 'series', 'text': "['original']", 'transaction_date': "['2010-01-01']"}), "(accounting=accounting, series=series, text=[\n 'original'], transaction_date=['2010-01-01'])\n", (71755, 71850), False, 'import blm\n'), ((71980, 72086), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'verification': '[ver]', 'version': 'ver.version', 'account': '[acc1]', 'amount': "['10.00']"}), "(verification=[ver], version=ver.version, account\n =[acc1], amount=['10.00'])\n", (72006, 72086), False, 'import blm\n'), ((73410, 73516), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'verification': 'self.ver', 'version': '[1]', 'account': '[self.acc1]', 'amount': "['40.00']"}), "(verification=self.ver, version=[1], account=[\n self.acc1], amount=['40.00'])\n", (73436, 73516), False, 'import blm\n'), ((75833, 75892), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'self.ver'], {'version': '(5)'}), '(exceptions.ClientError, self.ver, version=5)\n', (75847, 75892), False, 'import bson, email, py, os, time, uuid\n'), ((76053, 76074), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (76072, 76074), False, 'import blm\n'), ((76097, 76118), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (76116, 76118), False, 'import blm\n'), ((76140, 76161), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (76159, 76161), False, 'import blm\n'), ((76218, 76270), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'subscriptionLevel': "['subscriber']"}), "(subscriptionLevel=['subscriber'])\n", (76236, 76270), False, 'import blm\n'), ((76385, 76426), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[self.org]'}), '(org=[self.org])\n', (76410, 76426), False, 'import blm\n'), ((77043, 77108), 'py.test.raises', 'py.test.raises', (['ClientError', 'setattr', 'self.org', '"""name"""', "['Hupp!']"], {}), "(ClientError, setattr, self.org, 'name', ['Hupp!'])\n", (77057, 77108), False, 'import bson, email, py, os, time, uuid\n'), ((77154, 77219), 'py.test.raises', 'py.test.raises', (['ClientError', 'setattr', 'self.org', '"""name"""', "['Hipp!']"], {}), "(ClientError, setattr, self.org, 'name', ['Hipp!'])\n", (77168, 77219), False, 'import bson, email, py, os, time, uuid\n'), ((77970, 78080), 'blm.accounting.PGOrder', 'blm.accounting.PGOrder', ([], {'org': '[self.org]', 'contact': "['a']", 'contactPhone': "['b']", 'contactEmail': "['c']", 'pgnum': "['d']"}), "(org=[self.org], contact=['a'], contactPhone=['b'],\n contactEmail=['c'], pgnum=['d'])\n", (77992, 78080), False, 'import blm\n'), ((78206, 78251), 'py.test.raises', 'py.test.raises', (['ClientError', 'pgo'], {'pgnum': "['x']"}), "(ClientError, pgo, pgnum=['x'])\n", (78220, 78251), False, 'import bson, email, py, os, time, uuid\n'), ((78299, 78439), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.PGOrder'], {'org': '[self.org]', 'contact': "['a']", 'contactPhone': "['b']", 'contactEmail': "['c']", 'pgnum': "['d']"}), "(ClientError, blm.accounting.PGOrder, org=[self.org], contact\n =['a'], contactPhone=['b'], contactEmail=['c'], pgnum=['d'])\n", (78313, 78439), False, 'import bson, email, py, os, time, uuid\n'), ((78512, 78558), 'py.test.raises', 'py.test.raises', (['ClientError', 'pgo'], {'sent': '[False]'}), '(ClientError, pgo, sent=[False])\n', (78526, 78558), False, 'import bson, email, py, os, time, uuid\n'), ((78567, 78612), 'py.test.raises', 'py.test.raises', (['ClientError', 'pgo'], {'pgnum': "['y']"}), "(ClientError, pgo, pgnum=['y'])\n", (78581, 78612), False, 'import bson, email, py, os, time, uuid\n'), ((79113, 79134), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (79132, 79134), False, 'import blm\n'), ((79157, 79178), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (79176, 79178), False, 'import blm\n'), ((79200, 79221), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (79219, 79221), False, 'import blm\n'), ((79241, 79261), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (79259, 79261), False, 'import blm\n'), ((80168, 80189), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (80187, 80189), False, 'import blm\n'), ((80212, 80233), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (80231, 80233), False, 'import blm\n'), ((80255, 80276), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (80274, 80276), False, 'import blm\n'), ((80296, 80316), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (80314, 80316), False, 'import blm\n'), ((81090, 81131), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[self.org]'}), '(org=[self.org])\n', (81115, 81131), False, 'import blm\n'), ((81179, 81249), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.Accounting'], {'org': '[self.org]'}), '(ClientError, blm.accounting.Accounting, org=[self.org])\n', (81193, 81249), False, 'import bson, email, py, os, time, uuid\n'), ((81308, 81349), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[self.org]'}), '(org=[self.org])\n', (81333, 81349), False, 'import blm\n'), ((81582, 81643), 'py.test.raises', 'py.test.raises', (['ClientError', 'accounting'], {'start': "['2011-01-01']"}), "(ClientError, accounting, start=['2011-01-01'])\n", (81596, 81643), False, 'import bson, email, py, os, time, uuid\n'), ((81713, 81754), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[self.org]'}), '(org=[self.org])\n', (81738, 81754), False, 'import blm\n'), ((82086, 82127), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[self.org]'}), '(org=[self.org])\n', (82111, 82127), False, 'import blm\n'), ((82142, 82231), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'number': '"""42"""', 'name': '"""meaning of life"""', 'accounting': 'accounting'}), "(number='42', name='meaning of life', accounting=\n accounting)\n", (82166, 82231), False, 'import blm\n'), ((82555, 82596), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[self.org]'}), '(org=[self.org])\n', (82580, 82596), False, 'import blm\n'), ((82680, 82750), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': '[accounting]', 'name': "['A']"}), "(accounting=[accounting], name=['A'])\n", (82713, 82750), False, 'import blm\n'), ((82987, 83091), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.VerificationSeries'], {'accounting': '[accounting]', 'name': "['B']"}), "(ClientError, blm.accounting.VerificationSeries, accounting=[\n accounting], name=['B'])\n", (83001, 83091), False, 'import bson, email, py, os, time, uuid\n'), ((83118, 83175), 'py.test.raises', 'py.test.raises', (['ClientError', 'seriesA'], {'description': "['bar']"}), "(ClientError, seriesA, description=['bar'])\n", (83132, 83175), False, 'import bson, email, py, os, time, uuid\n'), ((83262, 83303), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[self.org]'}), '(org=[self.org])\n', (83287, 83303), False, 'import blm\n'), ((83391, 83455), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': '[accounting]', 'number': "['1000']"}), "(accounting=[accounting], number=['1000'])\n", (83413, 83455), False, 'import blm\n'), ((83686, 83783), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.Account'], {'accounting': '[accounting]', 'number': "['2000']"}), "(ClientError, blm.accounting.Account, accounting=[accounting],\n number=['2000'])\n", (83700, 83783), False, 'import bson, email, py, os, time, uuid\n'), ((83811, 83896), 'py.test.raises', 'py.test.raises', (['ClientError', 'account1000'], {'name': "['the account with the new name']"}), "(ClientError, account1000, name=['the account with the new name']\n )\n", (83825, 83896), False, 'import bson, email, py, os, time, uuid\n'), ((84088, 84149), 'blm.accounting.AccountBalance', 'blm.accounting.AccountBalance', ([], {'account': 'account1000', 'year': '[-1]'}), '(account=account1000, year=[-1])\n', (84117, 84149), False, 'import blm\n'), ((84447, 84488), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[self.org]'}), '(org=[self.org])\n', (84472, 84488), False, 'import blm\n'), ((84507, 84571), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': '[accounting]', 'number': "['1000']"}), "(accounting=[accounting], number=['1000'])\n", (84529, 84571), False, 'import blm\n'), ((84627, 84704), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'accounting': '[accounting]', 'number': "['1']", 'name': "['dim']"}), "(accounting=[accounting], number=['1'], name=['dim'])\n", (84651, 84704), False, 'import blm\n'), ((84757, 84832), 'blm.accounting.AccountingObject', 'blm.accounting.AccountingObject', ([], {'dimension': '[dim]', 'number': "['1']", 'name': "['ao']"}), "(dimension=[dim], number=['1'], name=['ao'])\n", (84788, 84832), False, 'import blm\n'), ((85250, 85352), 'blm.accounting.ObjectBalanceBudget', 'blm.accounting.ObjectBalanceBudget', ([], {'account_balance': '[account]', 'accounting_object': '[ao]', 'period': "['']"}), "(account_balance=[account],\n accounting_object=[ao], period=[''])\n", (85284, 85352), False, 'import blm\n'), ((85969, 86043), 'blm.accounting.BalanceBudget', 'blm.accounting.BalanceBudget', ([], {'account_balance': '[account]', 'period': "['201401']"}), "(account_balance=[account], period=['201401'])\n", (85997, 86043), False, 'import blm\n'), ((86388, 86429), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[self.org]'}), '(org=[self.org])\n', (86413, 86429), False, 'import blm\n'), ((86447, 86517), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': '[accounting]', 'name': "['A']"}), "(accounting=[accounting], name=['A'])\n", (86480, 86517), False, 'import blm\n'), ((86587, 86651), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': '[accounting]', 'number': "['1000']"}), "(accounting=[accounting], number=['1000'])\n", (86609, 86651), False, 'import blm\n'), ((86960, 87029), 'blm.accounting.Verification', 'blm.accounting.Verification', ([], {'accounting': '[accounting]', 'series': '[series]'}), '(accounting=[accounting], series=[series])\n', (86987, 87029), False, 'import blm\n'), ((87088, 87166), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'verification': '[ver]', 'account': '[account]', 'version': '[0]'}), '(verification=[ver], account=[account], version=[0])\n', (87114, 87166), False, 'import blm\n'), ((87399, 87502), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.Verification'], {'accounting': '[accounting]', 'series': '[series]'}), '(ClientError, blm.accounting.Verification, accounting=[\n accounting], series=[series])\n', (87413, 87502), False, 'import bson, email, py, os, time, uuid\n'), ((87529, 87640), 'py.test.raises', 'py.test.raises', (['ClientError', 'blm.accounting.Transaction'], {'verification': '[ver]', 'account': '[account]', 'version': '[0]'}), '(ClientError, blm.accounting.Transaction, verification=[ver],\n account=[account], version=[0])\n', (87543, 87640), False, 'import bson, email, py, os, time, uuid\n'), ((87668, 87733), 'py.test.raises', 'py.test.raises', (['ClientError', 'ver'], {'transaction_date': "['2011-01-01']"}), "(ClientError, ver, transaction_date=['2011-01-01'])\n", (87682, 87733), False, 'import bson, email, py, os, time, uuid\n'), ((87742, 87794), 'py.test.raises', 'py.test.raises', (['ClientError', 'trans'], {'amount': "['20.00']"}), "(ClientError, trans, amount=['20.00'])\n", (87756, 87794), False, 'import bson, email, py, os, time, uuid\n'), ((87969, 87989), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (87987, 87989), False, 'import blm\n'), ((88019, 88065), 'blm.accounting.APIUser', 'blm.accounting.APIUser', ([], {'roles': '"""invoicesenders"""'}), "(roles='invoicesenders')\n", (88041, 88065), False, 'import blm\n'), ((88088, 88109), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (88107, 88109), False, 'import blm\n'), ((88131, 88152), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (88150, 88152), False, 'import blm\n'), ((88810, 88902), 'blm.accounting.VatCode', 'blm.accounting.VatCode', ([], {'code': "['10']", 'xmlCode': "['mngol']", 'description': "['meaning of life']"}), "(code=['10'], xmlCode=['mngol'], description=[\n 'meaning of life'])\n", (88832, 88902), False, 'import blm\n'), ((89168, 89219), 'blm.accounting.VatCode', 'blm.accounting.VatCode', ([], {'code': "['10']", 'xmlCode': "['10']"}), "(code=['10'], xmlCode=['10'])\n", (89190, 89219), False, 'import blm\n'), ((89235, 89286), 'blm.accounting.VatCode', 'blm.accounting.VatCode', ([], {'code': "['11']", 'xmlCode': "['10']"}), "(code=['11'], xmlCode=['10'])\n", (89257, 89286), False, 'import blm\n'), ((89302, 89353), 'blm.accounting.VatCode', 'blm.accounting.VatCode', ([], {'code': "['12']", 'xmlCode': "['10']"}), "(code=['12'], xmlCode=['10'])\n", (89324, 89353), False, 'import blm\n'), ((89369, 89420), 'blm.accounting.VatCode', 'blm.accounting.VatCode', ([], {'code': "['13']", 'xmlCode': "['10']"}), "(code=['13'], xmlCode=['10'])\n", (89391, 89420), False, 'import blm\n'), ((89653, 89707), 'blm.accounting.VatCode', 'blm.accounting.VatCode', ([], {'code': "['10']", 'xmlCode': "['mngol']"}), "(code=['10'], xmlCode=['mngol'])\n", (89675, 89707), False, 'import blm\n'), ((89766, 89787), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (89785, 89787), False, 'import blm\n'), ((90227, 90261), 'blm.accounting.UG', 'blm.accounting.UG', ([], {'name': "['public']"}), "(name=['public'])\n", (90244, 90261), False, 'import blm\n'), ((90282, 90303), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (90301, 90303), False, 'import blm\n'), ((90505, 90524), 'blm.accounting.UG', 'blm.accounting.UG', ([], {}), '()\n', (90522, 90524), False, 'import blm\n'), ((90690, 90751), 'py.test.raises', 'py.test.raises', (['Exception', 'blm.accounting.UG'], {'name': "['public']"}), "(Exception, blm.accounting.UG, name=['public'])\n", (90704, 90751), False, 'import bson, email, py, os, time, uuid\n'), ((90791, 90811), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (90809, 90811), False, 'import blm\n'), ((91079, 91172), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['Acme Corporation']", 'email': "['<EMAIL>']", 'accountants': '[self.user]'}), "(name=['Acme Corporation'], email=['<EMAIL>'],\n accountants=[self.user])\n", (91097, 91172), False, 'import blm\n'), ((91321, 91360), 'blm.accounting.PaymentProvider', 'blm.accounting.PaymentProvider', ([], {'org': 'org'}), '(org=org)\n', (91351, 91360), False, 'import blm\n'), ((91476, 91606), 'blm.accounting.PGOrder', 'blm.accounting.PGOrder', ([], {'org': '[org]', 'contact': "['Mr. Foo']", 'contactPhone': "['1234567']", 'contactEmail': "['<EMAIL>']", 'pgnum': "['12345-6']"}), "(org=[org], contact=['Mr. Foo'], contactPhone=[\n '1234567'], contactEmail=['<EMAIL>'], pgnum=['12345-6'])\n", (91498, 91606), False, 'import blm\n'), ((92024, 92060), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]'}), '(org=[org])\n', (92049, 92060), False, 'import blm\n'), ((92197, 92233), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': '[org]'}), '(org=[org])\n', (92222, 92233), False, 'import blm\n'), ((92373, 92441), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'number': "['1']", 'name': "['A']", 'accounting': '[acc]'}), "(number=['1'], name=['A'], accounting=[acc])\n", (92397, 92441), False, 'import blm\n'), ((92741, 92809), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'number': "['1']", 'name': "['A']", 'accounting': '[acc]'}), "(number=['1'], name=['A'], accounting=[acc])\n", (92765, 92809), False, 'import blm\n'), ((92862, 92936), 'blm.accounting.AccountingObject', 'blm.accounting.AccountingObject', ([], {'number': "['1']", 'name': "['A']", 'dimension': '[dim]'}), "(number=['1'], name=['A'], dimension=[dim])\n", (92893, 92936), False, 'import blm\n'), ((93110, 93167), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['1234']", 'accounting': '[acc]'}), "(number=['1234'], accounting=[acc])\n", (93132, 93167), False, 'import blm\n'), ((93300, 93357), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['1234']", 'accounting': '[acc]'}), "(number=['1234'], accounting=[acc])\n", (93322, 93357), False, 'import blm\n'), ((93521, 93584), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'name': "['A']", 'accounting': '[acc]'}), "(name=['A'], accounting=[acc])\n", (93554, 93584), False, 'import blm\n'), ((93717, 93778), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': 'acc', 'name': "['A']"}), "(accounting=acc, name=['A'])\n", (93750, 93778), False, 'import blm\n'), ((93793, 93867), 'blm.accounting.Verification', 'blm.accounting.Verification', ([], {'series': '[series]', 'number': '[1]', 'accounting': '[acc]'}), '(series=[series], number=[1], accounting=[acc])\n', (93820, 93867), False, 'import blm\n'), ((94042, 94103), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': 'acc', 'name': "['A']"}), "(accounting=acc, name=['A'])\n", (94075, 94103), False, 'import blm\n'), ((94118, 94192), 'blm.accounting.Verification', 'blm.accounting.Verification', ([], {'series': '[series]', 'number': '[1]', 'accounting': '[acc]'}), '(series=[series], number=[1], accounting=[acc])\n', (94145, 94192), False, 'import blm\n'), ((94253, 94310), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'number': "['1234']", 'accounting': '[acc]'}), "(number=['1234'], accounting=[acc])\n", (94275, 94310), False, 'import blm\n'), ((94327, 94418), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'verification': '[ver]', 'version': 'ver.version', 'account': '[account]'}), '(verification=[ver], version=ver.version, account\n =[account])\n', (94353, 94418), False, 'import blm\n'), ((94656, 94715), 'blm.accounting.AccountBalance', 'blm.accounting.AccountBalance', ([], {'year': '[-1]', 'account': '[account]'}), '(year=[-1], account=[account])\n', (94685, 94715), False, 'import blm\n'), ((94859, 94948), 'blm.accounting.Dimension', 'blm.accounting.Dimension', ([], {'name': "['bar']", 'number': "['23']", 'accounting': 'account.accounting'}), "(name=['bar'], number=['23'], accounting=account.\n accounting)\n", (94883, 94948), False, 'import blm\n'), ((94959, 95036), 'blm.accounting.AccountingObject', 'blm.accounting.AccountingObject', ([], {'name': "['foo']", 'number': "['42']", 'dimension': '[dim]'}), "(name=['foo'], number=['42'], dimension=[dim])\n", (94990, 95036), False, 'import blm\n'), ((95051, 95155), 'blm.accounting.ObjectBalanceBudget', 'blm.accounting.ObjectBalanceBudget', ([], {'period': "['']", 'accounting_object': '[aobj]', 'account_balance': '[account]'}), "(period=[''], accounting_object=[aobj],\n account_balance=[account])\n", (95085, 95155), False, 'import blm\n'), ((95386, 95460), 'blm.accounting.BalanceBudget', 'blm.accounting.BalanceBudget', ([], {'period': "['201003']", 'account_balance': '[account]'}), "(period=['201003'], account_balance=[account])\n", (95414, 95460), False, 'import blm\n'), ((95634, 95681), 'blm.accounting.AccountTemplate', 'blm.accounting.AccountTemplate', ([], {'number': "['1234']"}), "(number=['1234'])\n", (95664, 95681), False, 'import blm\n'), ((95813, 95857), 'blm.accounting.ChartOfAccounts', 'blm.accounting.ChartOfAccounts', ([], {'name': "['foo']"}), "(name=['foo'])\n", (95843, 95857), False, 'import blm\n'), ((95984, 96062), 'blm.accounting.VatCode', 'blm.accounting.VatCode', ([], {'code': "['66']", 'xmlCode': "['Awsm']", 'description': "['Awesome']"}), "(code=['66'], xmlCode=['Awsm'], description=['Awesome'])\n", (96006, 96062), False, 'import blm\n'), ((96320, 96344), 'accounting.sie_import.SIEImporter', 'sie_import.SIEImporter', ([], {}), '()\n', (96342, 96344), False, 'from accounting import config, mail, sie_import\n'), ((99507, 99553), 'itertools.zip_longest', 'izip_longest', (['acc1_dimensions', 'acc2_dimensions'], {}), '(acc1_dimensions, acc2_dimensions)\n', (99519, 99553), True, 'from itertools import zip_longest as izip_longest\n'), ((100766, 100818), 'itertools.zip_longest', 'izip_longest', (['acc1_verifications', 'acc2_verifications'], {}), '(acc1_verifications, acc2_verifications)\n', (100778, 100818), True, 'from itertools import zip_longest as izip_longest\n'), ((101263, 101305), 'itertools.zip_longest', 'izip_longest', (['acc1_accounts', 'acc2_accounts'], {}), '(acc1_accounts, acc2_accounts)\n', (101275, 101305), True, 'from itertools import zip_longest as izip_longest\n'), ((103788, 103826), 'itertools.zip_longest', 'izip_longest', (['acc1_series', 'acc2_series'], {}), '(acc1_series, acc2_series)\n', (103800, 103826), True, 'from itertools import zip_longest as izip_longest\n'), ((104234, 104276), 'py.path.local.make_numbered_dir', 'py.path.local.make_numbered_dir', (['"""pytest-"""'], {}), "('pytest-')\n", (104265, 104276), False, 'import bson, email, py, os, time, uuid\n'), ((104338, 104358), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (104356, 104358), False, 'import blm\n'), ((104372, 104445), 'blm.accounting.PaymentProvider', 'blm.accounting.PaymentProvider', ([], {'org': '[org]', 'account': "['1000']", 'series': "['A']"}), "(org=[org], account=['1000'], series=['A'])\n", (104402, 104445), False, 'import blm\n'), ((104677, 104724), 'blm.accounting.newAccountingFromLastYear', 'blm.accounting.newAccountingFromLastYear', (['[org]'], {}), '([org])\n', (104717, 104724), False, 'import blm\n'), ((104995, 105128), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'name': "['org ' + original.orgname[0]]", 'orgnum': "['42' + original.orgnum[0]]", 'phone': "['42' + original.telephone[0]]"}), "(name=['org ' + original.orgname[0]], orgnum=['42' +\n original.orgnum[0]], phone=['42' + original.telephone[0]])\n", (105013, 105128), False, 'import blm\n'), ((105707, 105727), 'blm.accounting.Org', 'blm.accounting.Org', ([], {}), '()\n', (105725, 105727), False, 'import blm\n'), ((105792, 105898), 'blm.accounting.SupplierInvoiceProvider', 'blm.accounting.SupplierInvoiceProvider', ([], {'org': 'self.org', 'series': '"""A"""', 'account': '"""3000"""', 'bank_account': '"""4000"""'}), "(org=self.org, series='A', account=\n '3000', bank_account='4000')\n", (105830, 105898), False, 'import blm\n'), ((106829, 106900), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'subscriptionLevel': '"""subscriber"""', 'orgnum': '"""5164005810"""'}), "(subscriptionLevel='subscriber', orgnum='5164005810')\n", (106847, 106900), False, 'import blm\n'), ((106922, 106943), 'blm.accounting.User', 'blm.accounting.User', ([], {}), '()\n', (106941, 106943), False, 'import blm\n'), ((107053, 107092), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org'}), '(org=self.org)\n', (107078, 107092), False, 'import blm\n'), ((107120, 107185), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': 'self.accounting', 'number': '"""1000"""'}), "(accounting=self.accounting, number='1000')\n", (107142, 107185), False, 'import blm\n'), ((107213, 107278), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': 'self.accounting', 'number': '"""2000"""'}), "(accounting=self.accounting, number='2000')\n", (107235, 107278), False, 'import blm\n'), ((107306, 107371), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': 'self.accounting', 'number': '"""3000"""'}), "(accounting=self.accounting, number='3000')\n", (107328, 107371), False, 'import blm\n'), ((107399, 107464), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': 'self.accounting', 'number': '"""4000"""'}), "(accounting=self.accounting, number='4000')\n", (107421, 107464), False, 'import blm\n'), ((107487, 107558), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': 'self.accounting', 'name': '"""A"""'}), "(accounting=self.accounting, name='A')\n", (107520, 107558), False, 'import blm\n'), ((107583, 107731), 'blm.accounting.SupplierInvoiceProvider', 'blm.accounting.SupplierInvoiceProvider', ([], {'org': 'self.org', 'series': '"""A"""', 'account': '"""3000"""', 'bank_account': '"""4000"""', 'plusgiro_sending_bank_account': '"""44580231"""'}), "(org=self.org, series='A', account=\n '3000', bank_account='4000', plusgiro_sending_bank_account='44580231')\n", (107621, 107731), False, 'import blm\n'), ((107759, 107823), 'blm.accounting.BankgiroProvider', 'blm.accounting.BankgiroProvider', ([], {'org': 'self.org', 'bgnum': "['1234566']"}), "(org=self.org, bgnum=['1234566'])\n", (107790, 107823), False, 'import blm\n'), ((111692, 111844), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'org': 'self.org', 'recipient': '"""a"""', 'transferMethod': '"""bankaccount"""', 'invoiceIdentifierType': '"""message"""', 'dateInvoiceRegistered': '(1)'}), "(org=self.org, recipient='a', transferMethod=\n 'bankaccount', invoiceIdentifierType='message', dateInvoiceRegistered=1)\n", (111722, 111844), False, 'import blm\n'), ((111922, 112074), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'org': 'self.org', 'recipient': '"""b"""', 'transferMethod': '"""bankaccount"""', 'invoiceIdentifierType': '"""message"""', 'dateInvoiceRegistered': '(2)'}), "(org=self.org, recipient='b', transferMethod=\n 'bankaccount', invoiceIdentifierType='message', dateInvoiceRegistered=2)\n", (111952, 112074), False, 'import blm\n'), ((113836, 113906), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice1]'}), '(org=[self.org], invoice=[invoice1])\n', (113870, 113906), False, 'import blm\n'), ((115134, 115223), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice1]', 'toid': '[stoid]'}), '(org=[self.org], invoice=[invoice1], toid\n =[stoid])\n', (115168, 115223), False, 'import blm\n'), ((117259, 117282), 'copy.deepcopy', 'copy.deepcopy', (['invoice2'], {}), '(invoice2)\n', (117272, 117282), False, 'import copy\n'), ((117301, 117390), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice2]', 'toid': '[stoid]'}), '(org=[self.org], invoice=[invoice2], toid\n =[stoid])\n', (117335, 117390), False, 'import blm\n'), ((118169, 118258), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice3]', 'toid': '[stoid]'}), '(org=[self.org], invoice=[invoice3], toid\n =[stoid])\n', (118203, 118258), False, 'import blm\n'), ((119163, 119268), 'blm.accounting.prepareVerification', 'blm.accounting.prepareVerification', (['self.org', 'regVerId', 'regVerLines', 'regVerVersion', 'siToid', 'siRegDate'], {}), '(self.org, regVerId, regVerLines,\n regVerVersion, siToid, siRegDate)\n', (119197, 119268), False, 'import blm\n'), ((119368, 119412), 'blm.accounting.createVerification', 'blm.accounting.createVerification', (['[result1]'], {}), '([result1])\n', (119401, 119412), False, 'import blm\n'), ((120470, 120539), 'blm.accounting.deleteSupplierInvoice', 'blm.accounting.deleteSupplierInvoice', ([], {'org': '[self.org]', 'supInvList': '[s0]'}), '(org=[self.org], supInvList=[s0])\n', (120506, 120539), False, 'import blm\n'), ((120827, 120855), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (120840, 120855), False, 'import copy\n'), ((120874, 120938), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i1]'}), '(org=[self.org], invoice=[i1])\n', (120908, 120938), False, 'import blm\n'), ((121115, 121184), 'blm.accounting.deleteSupplierInvoice', 'blm.accounting.deleteSupplierInvoice', ([], {'org': '[self.org]', 'supInvList': '[s1]'}), '(org=[self.org], supInvList=[s1])\n', (121151, 121184), False, 'import blm\n'), ((121921, 121990), 'blm.accounting.deleteSupplierInvoice', 'blm.accounting.deleteSupplierInvoice', ([], {'org': '[self.org]', 'supInvList': '[s1]'}), '(org=[self.org], supInvList=[s1])\n', (121957, 121990), False, 'import blm\n'), ((122229, 122257), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice0'], {}), '(self.invoice0)\n', (122242, 122257), False, 'import copy\n'), ((122405, 122469), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i1]'}), '(org=[self.org], invoice=[i1])\n', (122439, 122469), False, 'import blm\n'), ((122523, 122551), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice0'], {}), '(self.invoice0)\n', (122536, 122551), False, 'import copy\n'), ((122699, 122763), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i2]'}), '(org=[self.org], invoice=[i2])\n', (122733, 122763), False, 'import blm\n'), ((122817, 122845), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice0'], {}), '(self.invoice0)\n', (122830, 122845), False, 'import copy\n'), ((122993, 123057), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (123027, 123057), False, 'import blm\n'), ((123111, 123139), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice0'], {}), '(self.invoice0)\n', (123124, 123139), False, 'import copy\n'), ((123456, 123520), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i4]'}), '(org=[self.org], invoice=[i4])\n', (123490, 123520), False, 'import blm\n'), ((123583, 123681), 'blm.accounting.predictSupplierInvoice', 'blm.accounting.predictSupplierInvoice', ([], {'org': '[self.org]', 'recipient': "[self.invoice0['recipient']]"}), "(org=[self.org], recipient=[self.\n invoice0['recipient']])\n", (123620, 123681), False, 'import blm\n'), ((124515, 124587), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[prediction]'}), '(org=[self.org], invoice=[prediction])\n', (124549, 124587), False, 'import blm\n'), ((124642, 124740), 'blm.accounting.predictSupplierInvoice', 'blm.accounting.predictSupplierInvoice', ([], {'org': '[self.org]', 'recipient': "[self.invoice0['recipient']]"}), "(org=[self.org], recipient=[self.\n invoice0['recipient']])\n", (124679, 124740), False, 'import blm\n'), ((125198, 125268), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice1]'}), '(org=[self.org], invoice=[invoice1])\n', (125232, 125268), False, 'import blm\n'), ((125329, 125399), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice2]'}), '(org=[self.org], invoice=[invoice2])\n', (125363, 125399), False, 'import blm\n'), ((125604, 125692), 'blm.accounting.setSIState', 'blm.accounting.setSIState', ([], {'org': '[self.org]', 'supInvList': 'supInvList', 'newstate': "['paid']"}), "(org=[self.org], supInvList=supInvList, newstate=[\n 'paid'])\n", (125629, 125692), False, 'import blm\n'), ((125707, 125792), 'blm.accounting.createTransferVerification', 'blm.accounting.createTransferVerification', ([], {'org': '[self.org]', 'supInvList': 'supInvList'}), '(org=[self.org], supInvList=supInvList\n )\n', (125748, 125792), False, 'import blm\n'), ((127768, 127839), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'org': 'self.org', 'recipient': '"""one"""', 'amount': '(1)'}), "(org=self.org, recipient='one', amount=1)\n", (127798, 127839), False, 'import blm\n'), ((129632, 129799), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5a"""', 'org': 'self.org', 'recipient': '"""one"""', 'amount': '(1)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5a', org=self.org,\n recipient='one', amount=1, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (129662, 129799), False, 'import blm\n'), ((129914, 130028), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[self.invoice1]', 'toid': "['591462b6907e1340e0ffbd5a']"}), "(org=[self.org], invoice=[self.invoice1],\n toid=['591462b6907e1340e0ffbd5a'])\n", (129948, 130028), False, 'import blm\n'), ((130904, 130968), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[s1]'}), '(org=[self.org], invoice=[s1])\n', (130938, 130968), False, 'import blm\n'), ((131494, 131661), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5a"""', 'org': 'self.org', 'recipient': '"""one"""', 'amount': '(1)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5a', org=self.org,\n recipient='one', amount=1, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (131524, 131661), False, 'import blm\n'), ((131771, 131799), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (131784, 131799), False, 'import copy\n'), ((131890, 131994), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[s1]', 'toid': "['591462b6907e1340e0ffbd5a']"}), "(org=[self.org], invoice=[s1], toid=[\n '591462b6907e1340e0ffbd5a'])\n", (131924, 131994), False, 'import blm\n'), ((132968, 133032), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[s1]'}), '(org=[self.org], invoice=[s1])\n', (133002, 133032), False, 'import blm\n'), ((133680, 133744), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[s1]'}), '(org=[self.org], invoice=[s1])\n', (133714, 133744), False, 'import blm\n'), ((134123, 134187), 'blm.accounting.BankgiroProvider', 'blm.accounting.BankgiroProvider', ([], {'org': 'self.org', 'bgnum': "['1234566']"}), "(org=self.org, bgnum=['1234566'])\n", (134154, 134187), False, 'import blm\n'), ((134203, 134317), 'accounting.bankgiro.gen_total_amount_record', 'bankgiro.gen_total_amount_record', ([], {'bankgiroProvider': 'bgprovider', 'len_supInvList': '(7)', 'totamount': '(500029900)', 'sign': '""" """'}), "(bankgiroProvider=bgprovider,\n len_supInvList=7, totamount=500029900, sign=' ')\n", (134235, 134317), False, 'from accounting import bankgiro, plusgiro\n'), ((134567, 134601), 'accounting.bankgiro.gen_seal_opening_record', 'bankgiro.gen_seal_opening_record', ([], {}), '()\n', (134599, 134601), False, 'from accounting import bankgiro, plusgiro\n'), ((134793, 134821), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (134806, 134821), False, 'import copy\n'), ((134841, 134869), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice2'], {}), '(self.invoice2)\n', (134854, 134869), False, 'import copy\n'), ((134884, 135051), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5a"""', 'org': 'self.org', 'recipient': '"""one"""', 'amount': '(1)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5a', org=self.org,\n recipient='one', amount=1, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (134914, 135051), False, 'import blm\n'), ((135140, 135307), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5e"""', 'org': 'self.org', 'recipient': '"""two"""', 'amount': '(2)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5e', org=self.org,\n recipient='two', amount=2, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (135170, 135307), False, 'import blm\n'), ((135423, 135533), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice1]', 'toid': "['591462b6907e1340e0ffbd5a']"}), "(org=[self.org], invoice=[invoice1], toid\n =['591462b6907e1340e0ffbd5a'])\n", (135457, 135533), False, 'import blm\n'), ((135589, 135699), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice2]', 'toid': "['591462b6907e1340e0ffbd5e']"}), "(org=[self.org], invoice=[invoice2], toid\n =['591462b6907e1340e0ffbd5e'])\n", (135623, 135699), False, 'import blm\n'), ((136623, 136651), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (136636, 136651), False, 'import copy\n'), ((136671, 136699), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice2'], {}), '(self.invoice2)\n', (136684, 136699), False, 'import copy\n'), ((136759, 136926), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5a"""', 'org': 'self.org', 'recipient': '"""one"""', 'amount': '(1)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5a', org=self.org,\n recipient='one', amount=1, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (136789, 136926), False, 'import blm\n'), ((137015, 137182), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5e"""', 'org': 'self.org', 'recipient': '"""two"""', 'amount': '(2)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5e', org=self.org,\n recipient='two', amount=2, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (137045, 137182), False, 'import blm\n'), ((137349, 137459), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice1]', 'toid': "['591462b6907e1340e0ffbd5a']"}), "(org=[self.org], invoice=[invoice1], toid\n =['591462b6907e1340e0ffbd5a'])\n", (137383, 137459), False, 'import blm\n'), ((137561, 137671), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice2]', 'toid': "['591462b6907e1340e0ffbd5e']"}), "(org=[self.org], invoice=[invoice2], toid\n =['591462b6907e1340e0ffbd5e'])\n", (137595, 137671), False, 'import blm\n'), ((139331, 139359), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (139344, 139359), False, 'import copy\n'), ((139379, 139449), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice1]'}), '(org=[self.org], invoice=[invoice1])\n', (139413, 139449), False, 'import blm\n'), ((140368, 140396), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (140381, 140396), False, 'import copy\n'), ((140416, 140486), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice1]'}), '(org=[self.org], invoice=[invoice1])\n', (140450, 140486), False, 'import blm\n'), ((140581, 140687), 'blm.accounting.createBgcOrder', 'blm.accounting.createBgcOrder', ([], {'org': 'self.org', 'bankgiroProvider': 'self.bankgiroprovider', 'supInvList': '[si1]'}), '(org=self.org, bankgiroProvider=self.\n bankgiroprovider, supInvList=[si1])\n', (140610, 140687), False, 'import blm\n'), ((140748, 140788), 'accounting.bankgiro.signBgcOrder', 'bankgiro.signBgcOrder', ([], {'bgcOrder': 'bgcOrder'}), '(bgcOrder=bgcOrder)\n', (140769, 140788), False, 'from accounting import bankgiro, plusgiro\n'), ((144469, 144517), 'accounting.bankgiro.hmac_sha256_128_bgsigner', 'bankgiro.hmac_sha256_128_bgsigner', (['lock', 'message'], {}), '(lock, message)\n', (144502, 144517), False, 'from accounting import bankgiro, plusgiro\n'), ((144723, 144771), 'accounting.bankgiro.hmac_sha256_128_bgsigner', 'bankgiro.hmac_sha256_128_bgsigner', (['lock', 'message'], {}), '(lock, message)\n', (144756, 144771), False, 'from accounting import bankgiro, plusgiro\n'), ((145019, 145067), 'accounting.bankgiro.hmac_sha256_128_bgsigner', 'bankgiro.hmac_sha256_128_bgsigner', (['lock', 'message'], {}), '(lock, message)\n', (145052, 145067), False, 'from accounting import bankgiro, plusgiro\n'), ((145618, 145666), 'accounting.bankgiro.hmac_sha256_128_bgsigner', 'bankgiro.hmac_sha256_128_bgsigner', (['lock', 'message'], {}), '(lock, message)\n', (145651, 145666), False, 'from accounting import bankgiro, plusgiro\n'), ((146027, 146065), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'message'], {}), '(key, message)\n', (146051, 146065), False, 'from accounting import bankgiro, plusgiro\n'), ((146090, 146152), 'accounting.bankgiro.hmac_sha256_128_bgsigner_truncated_256', 'bankgiro.hmac_sha256_128_bgsigner_truncated_256', (['lock', 'message'], {}), '(lock, message)\n', (146137, 146152), False, 'from accounting import bankgiro, plusgiro\n'), ((146407, 146436), 'accounting.bankgiro.create_hmac', 'bankgiro.create_hmac', (['message'], {}), '(message)\n', (146427, 146436), False, 'from accounting import bankgiro, plusgiro\n'), ((146461, 146518), 'accounting.bankgiro.create_hmac', 'bankgiro.create_hmac', (['message'], {'force_software_signer': '(True)'}), '(message, force_software_signer=True)\n', (146481, 146518), False, 'from accounting import bankgiro, plusgiro\n'), ((149803, 149831), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (149816, 149831), False, 'import copy\n'), ((149851, 149879), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice2'], {}), '(self.invoice2)\n', (149864, 149879), False, 'import copy\n'), ((149894, 150061), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5a"""', 'org': 'self.org', 'recipient': '"""one"""', 'amount': '(1)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5a', org=self.org,\n recipient='one', amount=1, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (149924, 150061), False, 'import blm\n'), ((150150, 150317), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5e"""', 'org': 'self.org', 'recipient': '"""two"""', 'amount': '(2)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5e', org=self.org,\n recipient='two', amount=2, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (150180, 150317), False, 'import blm\n'), ((150433, 150543), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice1]', 'toid': "['591462b6907e1340e0ffbd5a']"}), "(org=[self.org], invoice=[invoice1], toid\n =['591462b6907e1340e0ffbd5a'])\n", (150467, 150543), False, 'import blm\n'), ((150599, 150709), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice2]', 'toid': "['591462b6907e1340e0ffbd5e']"}), "(org=[self.org], invoice=[invoice2], toid\n =['591462b6907e1340e0ffbd5e'])\n", (150633, 150709), False, 'import blm\n'), ((150786, 150842), 'blm.accounting.enableSIAutomation', 'blm.accounting.enableSIAutomation', ([], {'supInvList': 'supInvList'}), '(supInvList=supInvList)\n', (150819, 150842), False, 'import blm\n'), ((150933, 151007), 'blm.accounting.createSignedBgcOrder', 'blm.accounting.createSignedBgcOrder', ([], {'org': '[self.org]', 'supInvList': 'supInvList'}), '(org=[self.org], supInvList=supInvList)\n', (150968, 151007), False, 'import blm\n'), ((151285, 151299), 'py.test.skip', 'py.test.skip', ([], {}), '()\n', (151297, 151299), False, 'import bson, email, py, os, time, uuid\n'), ((151319, 151347), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (151332, 151347), False, 'import copy\n'), ((151367, 151395), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice2'], {}), '(self.invoice2)\n', (151380, 151395), False, 'import copy\n'), ((151410, 151577), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5a"""', 'org': 'self.org', 'recipient': '"""one"""', 'amount': '(1)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5a', org=self.org,\n recipient='one', amount=1, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (151440, 151577), False, 'import blm\n'), ((151666, 151833), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5e"""', 'org': 'self.org', 'recipient': '"""two"""', 'amount': '(2)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5e', org=self.org,\n recipient='two', amount=2, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (151696, 151833), False, 'import blm\n'), ((151949, 152059), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice1]', 'toid': "['591462b6907e1340e0ffbd5a']"}), "(org=[self.org], invoice=[invoice1], toid\n =['591462b6907e1340e0ffbd5a'])\n", (151983, 152059), False, 'import blm\n'), ((152115, 152225), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[invoice2]', 'toid': "['591462b6907e1340e0ffbd5e']"}), "(org=[self.org], invoice=[invoice2], toid\n =['591462b6907e1340e0ffbd5e'])\n", (152149, 152225), False, 'import blm\n'), ((152302, 152358), 'blm.accounting.enableSIAutomation', 'blm.accounting.enableSIAutomation', ([], {'supInvList': 'supInvList'}), '(supInvList=supInvList)\n', (152335, 152358), False, 'import blm\n'), ((152379, 152453), 'blm.accounting.createSignedBgcOrder', 'blm.accounting.createSignedBgcOrder', ([], {'org': '[self.org]', 'supInvList': 'supInvList'}), '(org=[self.org], supInvList=supInvList)\n', (152414, 152453), False, 'import blm\n'), ((152482, 152532), 'blm.accounting.cancelBgcOrder', 'blm.accounting.cancelBgcOrder', ([], {'bgcOrder': '[bgcOrder]'}), '(bgcOrder=[bgcOrder])\n', (152511, 152532), False, 'import blm\n'), ((152671, 152735), 'blm.accounting.BankgiroProvider', 'blm.accounting.BankgiroProvider', ([], {'org': 'self.org', 'bgnum': "['1234566']"}), "(org=self.org, bgnum=['1234566'])\n", (152702, 152735), False, 'import blm\n'), ((152750, 152917), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5a"""', 'org': 'self.org', 'recipient': '"""one"""', 'amount': '(1)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5a', org=self.org,\n recipient='one', amount=1, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (152780, 152917), False, 'import blm\n'), ((153006, 153173), 'blm.accounting.SupplierInvoice', 'blm.accounting.SupplierInvoice', ([], {'id': '"""591462b6907e1340e0ffbd5e"""', 'org': 'self.org', 'recipient': '"""two"""', 'amount': '(2)', 'transferMethod': '"""bgnum"""', 'invoiceIdentifierType': '"""message"""'}), "(id='591462b6907e1340e0ffbd5e', org=self.org,\n recipient='two', amount=2, transferMethod='bgnum',\n invoiceIdentifierType='message')\n", (153036, 153173), False, 'import blm\n'), ((153289, 153403), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[self.invoice1]', 'toid': "['591462b6907e1340e0ffbd5a']"}), "(org=[self.org], invoice=[self.invoice1],\n toid=['591462b6907e1340e0ffbd5a'])\n", (153323, 153403), False, 'import blm\n'), ((153460, 153574), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[self.invoice2]', 'toid': "['591462b6907e1340e0ffbd5e']"}), "(org=[self.org], invoice=[self.invoice2],\n toid=['591462b6907e1340e0ffbd5e'])\n", (153494, 153574), False, 'import blm\n'), ((154781, 154837), 'blm.accounting.enableSIAutomation', 'blm.accounting.enableSIAutomation', ([], {'supInvList': '[si1, si2]'}), '(supInvList=[si1, si2])\n', (154814, 154837), False, 'import blm\n'), ((154858, 154932), 'blm.accounting.createSignedBgcOrder', 'blm.accounting.createSignedBgcOrder', ([], {'org': '[self.org]', 'supInvList': '[si1, si2]'}), '(org=[self.org], supInvList=[si1, si2])\n', (154893, 154932), False, 'import blm\n'), ((155832, 155888), 'blm.accounting.enableSIAutomation', 'blm.accounting.enableSIAutomation', ([], {'supInvList': '[si1, si2]'}), '(supInvList=[si1, si2])\n', (155865, 155888), False, 'import blm\n'), ((155909, 155983), 'blm.accounting.createSignedBgcOrder', 'blm.accounting.createSignedBgcOrder', ([], {'org': '[self.org]', 'supInvList': '[si1, si2]'}), '(org=[self.org], supInvList=[si1, si2])\n', (155944, 155983), False, 'import blm\n'), ((156891, 156955), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i1]'}), '(org=[self.org], invoice=[i1])\n', (156925, 156955), False, 'import blm\n'), ((157016, 157091), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[self.invoice2]'}), '(org=[self.org], invoice=[self.invoice2])\n', (157050, 157091), False, 'import blm\n'), ((157459, 157537), 'blm.accounting.setSIState', 'blm.accounting.setSIState', ([], {'org': '[self.org]', 'supInvList': '[si2]', 'newstate': "['paid']"}), "(org=[self.org], supInvList=[si2], newstate=['paid'])\n", (157484, 157537), False, 'import blm\n'), ((157637, 157730), 'blm.accounting.setSIState', 'blm.accounting.setSIState', ([], {'org': '[self.org]', 'supInvList': '[si1, si2]', 'newstate': "['scheduled']"}), "(org=[self.org], supInvList=[si1, si2], newstate=[\n 'scheduled'])\n", (157662, 157730), False, 'import blm\n'), ((157882, 157971), 'blm.accounting.setSIState', 'blm.accounting.setSIState', ([], {'org': '[self.org]', 'supInvList': '[si2]', 'newstate': "['registered']"}), "(org=[self.org], supInvList=[si2], newstate=[\n 'registered'])\n", (157907, 157971), False, 'import blm\n'), ((158219, 158270), 'blm.accounting.enableSIAutomation', 'blm.accounting.enableSIAutomation', ([], {'supInvList': '[si1]'}), '(supInvList=[si1])\n', (158252, 158270), False, 'import blm\n'), ((158555, 158606), 'blm.accounting.enableSIAutomation', 'blm.accounting.enableSIAutomation', ([], {'supInvList': '[si1]'}), '(supInvList=[si1])\n', (158588, 158606), False, 'import blm\n'), ((158657, 158725), 'blm.accounting.disableSIAutomation', 'blm.accounting.disableSIAutomation', ([], {'org': '[self.org]', 'supInvList': '[si1]'}), '(org=[self.org], supInvList=[si1])\n', (158691, 158725), False, 'import blm\n'), ((159446, 159502), 'blm.accounting.enableSIAutomation', 'blm.accounting.enableSIAutomation', ([], {'supInvList': '[si1, si2]'}), '(supInvList=[si1, si2])\n', (159479, 159502), False, 'import blm\n'), ((160019, 160092), 'blm.accounting.disableSIAutomation', 'blm.accounting.disableSIAutomation', ([], {'org': '[self.org]', 'supInvList': '[si1, si2]'}), '(org=[self.org], supInvList=[si1, si2])\n', (160053, 160092), False, 'import blm\n'), ((160521, 160634), 'accounting.plusgiro.gen_cfp_po3_mh00', 'plusgiro.gen_cfp_po3_mh00', ([], {'org': 'self.org', 'sending_bank_account': 'self.provider.plusgiro_sending_bank_account[0]'}), '(org=self.org, sending_bank_account=self.provider.\n plusgiro_sending_bank_account[0])\n', (160546, 160634), False, 'from accounting import bankgiro, plusgiro\n'), ((160885, 160913), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (160898, 160913), False, 'import copy\n'), ((160933, 160997), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (160967, 160997), False, 'import blm\n'), ((161078, 161126), 'accounting.plusgiro.gen_cfp_po3_pi00', 'plusgiro.gen_cfp_po3_pi00', ([], {'supplierInvoice': '[si1]'}), '(supplierInvoice=[si1])\n', (161103, 161126), False, 'from accounting import bankgiro, plusgiro\n'), ((161535, 161583), 'accounting.plusgiro.gen_cfp_po3_pi00', 'plusgiro.gen_cfp_po3_pi00', ([], {'supplierInvoice': '[si1]'}), '(supplierInvoice=[si1])\n', (161560, 161583), False, 'from accounting import bankgiro, plusgiro\n'), ((162010, 162058), 'accounting.plusgiro.gen_cfp_po3_pi00', 'plusgiro.gen_cfp_po3_pi00', ([], {'supplierInvoice': '[si1]'}), '(supplierInvoice=[si1])\n', (162035, 162058), False, 'from accounting import bankgiro, plusgiro\n'), ((162273, 162301), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (162286, 162301), False, 'import copy\n'), ((162353, 162417), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (162387, 162417), False, 'import blm\n'), ((162864, 162892), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (162877, 162892), False, 'import copy\n'), ((162912, 162976), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (162946, 162976), False, 'import blm\n'), ((163057, 163105), 'accounting.plusgiro.gen_cfp_po3_ba00', 'plusgiro.gen_cfp_po3_ba00', ([], {'supplierInvoice': '[si1]'}), '(supplierInvoice=[si1])\n', (163082, 163105), False, 'from accounting import bankgiro, plusgiro\n'), ((163307, 163335), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (163320, 163335), False, 'import copy\n'), ((163355, 163419), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (163389, 163419), False, 'import blm\n'), ((163500, 163548), 'accounting.plusgiro.gen_cfp_po3_bm99', 'plusgiro.gen_cfp_po3_bm99', ([], {'supplierInvoice': '[si1]'}), '(supplierInvoice=[si1])\n', (163525, 163548), False, 'from accounting import bankgiro, plusgiro\n'), ((163633, 163661), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (163646, 163661), False, 'import copy\n'), ((163907, 163971), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (163941, 163971), False, 'import blm\n'), ((164052, 164100), 'accounting.plusgiro.gen_cfp_po3_bm99', 'plusgiro.gen_cfp_po3_bm99', ([], {'supplierInvoice': '[si1]'}), '(supplierInvoice=[si1])\n', (164077, 164100), False, 'from accounting import bankgiro, plusgiro\n'), ((164185, 164213), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (164198, 164213), False, 'import copy\n'), ((164418, 164466), 'accounting.plusgiro.gen_cfp_po3_bm99', 'plusgiro.gen_cfp_po3_bm99', ([], {'supplierInvoice': '[si1]'}), '(supplierInvoice=[si1])\n', (164443, 164466), False, 'from accounting import bankgiro, plusgiro\n'), ((164551, 164579), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (164564, 164579), False, 'import copy\n'), ((164825, 164889), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (164859, 164889), False, 'import blm\n'), ((164970, 165018), 'accounting.plusgiro.gen_cfp_po3_bm99', 'plusgiro.gen_cfp_po3_bm99', ([], {'supplierInvoice': '[si1]'}), '(supplierInvoice=[si1])\n', (164995, 165018), False, 'from accounting import bankgiro, plusgiro\n'), ((165336, 165384), 'accounting.plusgiro.gen_cfp_po3_bm99', 'plusgiro.gen_cfp_po3_bm99', ([], {'supplierInvoice': '[si1]'}), '(supplierInvoice=[si1])\n', (165361, 165384), False, 'from accounting import bankgiro, plusgiro\n'), ((165493, 165521), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (165506, 165521), False, 'import copy\n'), ((165571, 165635), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (165605, 165635), False, 'import blm\n'), ((165719, 165747), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (165732, 165747), False, 'import copy\n'), ((165836, 165900), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (165870, 165900), False, 'import blm\n'), ((165959, 166007), 'accounting.plusgiro.gen_cfp_po3_mt00', 'plusgiro.gen_cfp_po3_mt00', ([], {'supInvList': '[si1, si2]'}), '(supInvList=[si1, si2])\n', (165984, 166007), False, 'from accounting import bankgiro, plusgiro\n'), ((166245, 166273), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (166258, 166273), False, 'import copy\n'), ((166293, 166357), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (166327, 166357), False, 'import blm\n'), ((166455, 166483), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (166468, 166483), False, 'import copy\n'), ((166729, 166793), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (166763, 166793), False, 'import blm\n'), ((166888, 166916), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (166901, 166916), False, 'import copy\n'), ((167160, 167188), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice3'], {}), '(self.invoice3)\n', (167173, 167188), False, 'import copy\n'), ((167434, 167498), 'blm.accounting.saveSupplierInvoice', 'blm.accounting.saveSupplierInvoice', ([], {'org': '[self.org]', 'invoice': '[i3]'}), '(org=[self.org], invoice=[i3])\n', (167468, 167498), False, 'import blm\n'), ((167849, 168008), 'accounting.plusgiro.generatePlusgiroRecords', 'plusgiro.generatePlusgiroRecords', ([], {'org': 'self.org', 'sending_bank_account': 'self.provider.plusgiro_sending_bank_account', 'supInvList': '[si1, si2, si3, si4, si5]'}), '(org=self.org, sending_bank_account=self.\n provider.plusgiro_sending_bank_account, supInvList=[si1, si2, si3, si4,\n si5])\n', (167881, 168008), False, 'from accounting import bankgiro, plusgiro\n'), ((168428, 168521), 'blm.accounting.generatePlusgiroFile', 'blm.accounting.generatePlusgiroFile', ([], {'org': '[self.org]', 'supInvList': '[si1, si2, si3, si4, si5]'}), '(org=[self.org], supInvList=[si1, si2,\n si3, si4, si5])\n', (168463, 168521), False, 'import blm\n'), ((169059, 169130), 'blm.accounting.Org', 'blm.accounting.Org', ([], {'subscriptionLevel': '"""subscriber"""', 'orgnum': '"""5164005810"""'}), "(subscriptionLevel='subscriber', orgnum='5164005810')\n", (169077, 169130), False, 'import blm\n'), ((169157, 169196), 'blm.accounting.Accounting', 'blm.accounting.Accounting', ([], {'org': 'self.org'}), '(org=self.org)\n', (169182, 169196), False, 'import blm\n'), ((169224, 169289), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': 'self.accounting', 'number': '"""1000"""'}), "(accounting=self.accounting, number='1000')\n", (169246, 169289), False, 'import blm\n'), ((169317, 169382), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': 'self.accounting', 'number': '"""2000"""'}), "(accounting=self.accounting, number='2000')\n", (169339, 169382), False, 'import blm\n'), ((169410, 169475), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': 'self.accounting', 'number': '"""3000"""'}), "(accounting=self.accounting, number='3000')\n", (169432, 169475), False, 'import blm\n'), ((169503, 169568), 'blm.accounting.Account', 'blm.accounting.Account', ([], {'accounting': 'self.accounting', 'number': '"""4000"""'}), "(accounting=self.accounting, number='4000')\n", (169525, 169568), False, 'import blm\n'), ((169591, 169662), 'blm.accounting.VerificationSeries', 'blm.accounting.VerificationSeries', ([], {'accounting': 'self.accounting', 'name': '"""A"""'}), "(accounting=self.accounting, name='A')\n", (169624, 169662), False, 'import blm\n'), ((169687, 169835), 'blm.accounting.SupplierInvoiceProvider', 'blm.accounting.SupplierInvoiceProvider', ([], {'org': 'self.org', 'series': '"""A"""', 'account': '"""3000"""', 'bank_account': '"""4000"""', 'plusgiro_sending_bank_account': '"""44580231"""'}), "(org=self.org, series='A', account=\n '3000', bank_account='4000', plusgiro_sending_bank_account='44580231')\n", (169725, 169835), False, 'import blm\n'), ((169863, 169927), 'blm.accounting.BankgiroProvider', 'blm.accounting.BankgiroProvider', ([], {'org': 'self.org', 'bgnum': "['1234566']"}), "(org=self.org, bgnum=['1234566'])\n", (169894, 169927), False, 'import blm\n'), ((170571, 170618), 'blm.accounting.BgcReport', 'blm.accounting.BgcReport', ([], {'multiline': '[multiline]'}), '(multiline=[multiline])\n', (170595, 170618), False, 'import blm\n'), ((170879, 170905), 'blm.accounting.bootstrap', 'blm.accounting.bootstrap', ([], {}), '()\n', (170903, 170905), False, 'import blm\n'), ((171465, 171489), 'blm.accounting.upgrade', 'blm.accounting.upgrade', ([], {}), '()\n', (171487, 171489), False, 'import blm\n'), ((171515, 171539), 'blm.accounting.upgrade', 'blm.accounting.upgrade', ([], {}), '()\n', (171537, 171539), False, 'import blm\n'), ((1952, 2014), 'blm.accounting.currentUserHasRole', 'blm.accounting.currentUserHasRole', (['org', '*self.roles'], {'user': 'user'}), '(org, *self.roles, user=user)\n', (1985, 2014), False, 'import blm\n'), ((2327, 2384), 'blm.accounting.currentUserHasRole', 'blm.accounting.currentUserHasRole', (['org', '*roles'], {'user': 'apiu'}), '(org, *roles, user=apiu)\n', (2360, 2384), False, 'import blm\n'), ((2439, 2496), 'blm.accounting.currentUserHasRole', 'blm.accounting.currentUserHasRole', (['org', '*roles'], {'user': 'apiu'}), '(org, *roles, user=apiu)\n', (2472, 2496), False, 'import blm\n'), ((2573, 2628), 'blm.accounting.currentUserHasRole', 'blm.accounting.currentUserHasRole', (['org', 'role'], {'user': 'apiu'}), '(org, role, user=apiu)\n', (2606, 2628), False, 'import blm\n'), ((2793, 2861), 'blm.accounting.currentUserHasRole', 'blm.accounting.currentUserHasRole', (['org2', '"""invoicesenders"""'], {'user': 'apiu'}), "(org2, 'invoicesenders', user=apiu)\n", (2826, 2861), False, 'import blm\n'), ((4605, 4643), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError'], {}), '(exceptions.ClientError)\n', (4619, 4643), False, 'import bson, email, py, os, time, uuid\n'), ((4704, 4743), 'blm.accounting.createAPIUser', 'blm.accounting.createAPIUser', ([], {'org': '[org]'}), '(org=[org])\n', (4732, 4743), False, 'import blm\n'), ((16903, 16918), 'pytransact.runtime.setuid', 'ri.setuid', (['user'], {}), '(user)\n', (16912, 16918), True, 'import pytransact.runtime as ri\n'), ((17484, 17499), 'pytransact.runtime.setuid', 'ri.setuid', (['user'], {}), '(user)\n', (17493, 17499), True, 'import pytransact.runtime as ri\n'), ((18008, 18024), 'pytransact.runtime.setuid', 'ri.setuid', (['admin'], {}), '(admin)\n', (18017, 18024), True, 'import pytransact.runtime as ri\n'), ((18038, 18238), 'blm.accounting.updateMemberRoles', 'blm.accounting.updateMemberRoles', ([], {'org': '[org]', 'roleData': "[{'id': user1.id[0], 'roles': ['admin', 'accountant', 'payer',\n 'storekeeper', 'ticketchecker']}, {'id': user2.id[0], 'roles': ['member']}]"}), "(org=[org], roleData=[{'id': user1.id[0],\n 'roles': ['admin', 'accountant', 'payer', 'storekeeper',\n 'ticketchecker']}, {'id': user2.id[0], 'roles': ['member']}])\n", (18070, 18238), False, 'import blm\n'), ((18885, 18901), 'pytransact.runtime.setuid', 'ri.setuid', (['admin'], {}), '(admin)\n', (18894, 18901), True, 'import pytransact.runtime as ri\n'), ((18915, 19059), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'blm.accounting.updateMemberRoles'], {'org': '[org]', 'roleData': "[{'id': user1.id[0], 'roles': ['nosuchrole']}]"}), "(exceptions.ClientError, blm.accounting.updateMemberRoles,\n org=[org], roleData=[{'id': user1.id[0], 'roles': ['nosuchrole']}])\n", (18929, 19059), False, 'import bson, email, py, os, time, uuid\n'), ((19102, 19118), 'pytransact.runtime.setuid', 'ri.setuid', (['user2'], {}), '(user2)\n', (19111, 19118), True, 'import pytransact.runtime as ri\n'), ((19175, 19314), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError', 'blm.accounting.updateMemberRoles'], {'org': '[org]', 'roleData': "[{'id': user2.id[0], 'roles': ['admin']}]"}), "(exceptions.ClientError, blm.accounting.updateMemberRoles,\n org=[org], roleData=[{'id': user2.id[0], 'roles': ['admin']}])\n", (19189, 19314), False, 'import bson, email, py, os, time, uuid\n'), ((19579, 19617), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError'], {}), '(exceptions.ClientError)\n', (19593, 19617), False, 'import bson, email, py, os, time, uuid\n'), ((19673, 19765), 'blm.accounting.updateMemberRoles', 'blm.accounting.updateMemberRoles', ([], {'org': '[org]', 'roleData': "[{'id': user1.id[0], 'roles': []}]"}), "(org=[org], roleData=[{'id': user1.id[0],\n 'roles': []}])\n", (19705, 19765), False, 'import blm\n'), ((19980, 19996), 'pytransact.runtime.setuid', 'ri.setuid', (['admin'], {}), '(admin)\n', (19989, 19996), True, 'import pytransact.runtime as ri\n'), ((22471, 22480), 'os.fork', 'os.fork', ([], {}), '()\n', (22478, 22480), False, 'import bson, email, py, os, time, uuid\n'), ((23063, 23081), 'os.waitpid', 'os.waitpid', (['pid', '(0)'], {}), '(pid, 0)\n', (23073, 23081), False, 'import bson, email, py, os, time, uuid\n'), ((25084, 25091), 'pytransact.testsupport.Time', 'Time', (['(0)'], {}), '(0)\n', (25088, 25091), False, 'from pytransact.testsupport import BLMTests, Time\n'), ((25148, 25189), 'blm.accounting.expireTrialOrg', 'blm.accounting.expireTrialOrg', (['[self.org]'], {}), '([self.org])\n', (25177, 25189), False, 'import blm\n'), ((25204, 25211), 'pytransact.testsupport.Time', 'Time', (['(0)'], {}), '(0)\n', (25208, 25211), False, 'from pytransact.testsupport import BLMTests, Time\n'), ((25397, 25438), 'blm.accounting.expireTrialOrg', 'blm.accounting.expireTrialOrg', (['[self.org]'], {}), '([self.org])\n', (25426, 25438), False, 'import blm\n'), ((25516, 25523), 'pytransact.testsupport.Time', 'Time', (['(0)'], {}), '(0)\n', (25520, 25523), False, 'from pytransact.testsupport import BLMTests, Time\n'), ((25611, 25652), 'blm.accounting.expireTrialOrg', 'blm.accounting.expireTrialOrg', (['[self.org]'], {}), '([self.org])\n', (25640, 25652), False, 'import blm\n'), ((25746, 25753), 'pytransact.testsupport.Time', 'Time', (['(0)'], {}), '(0)\n', (25750, 25753), False, 'from pytransact.testsupport import BLMTests, Time\n'), ((25832, 25873), 'blm.accounting.expireTrialOrg', 'blm.accounting.expireTrialOrg', (['[self.org]'], {}), '([self.org])\n', (25861, 25873), False, 'import blm\n'), ((25967, 25974), 'pytransact.testsupport.Time', 'Time', (['(0)'], {}), '(0)\n', (25971, 25974), False, 'from pytransact.testsupport import BLMTests, Time\n'), ((26057, 26098), 'blm.accounting.expireTrialOrg', 'blm.accounting.expireTrialOrg', (['[self.org]'], {}), '([self.org])\n', (26086, 26098), False, 'import blm\n'), ((41541, 41568), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (41555, 41568), False, 'import bson, email, py, os, time, uuid\n'), ((41582, 41629), 'blm.accounting.AccountTemplate', 'blm.accounting.AccountTemplate', ([], {'number': "['2222']"}), "(number=['2222'])\n", (41612, 41629), False, 'import blm\n'), ((41675, 41702), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (41689, 41702), False, 'import bson, email, py, os, time, uuid\n'), ((43678, 43705), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (43692, 43705), False, 'import bson, email, py, os, time, uuid\n'), ((43719, 43770), 'blm.accounting.ChartOfAccounts', 'blm.accounting.ChartOfAccounts', ([], {'name': "['Fake chart']"}), "(name=['Fake chart'])\n", (43749, 43770), False, 'import blm\n'), ((43816, 43843), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (43830, 43843), False, 'import bson, email, py, os, time, uuid\n'), ((45603, 45737), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'verification': '[self.ver]', 'account': '[account]', 'version': 'self.ver.version', 'amount': "['10.00']", 'quantity': "['5']"}), "(verification=[self.ver], account=[account],\n version=self.ver.version, amount=['10.00'], quantity=['5'])\n", (45629, 45737), False, 'import blm\n'), ((45864, 45999), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'verification': '[self.ver]', 'account': '[account]', 'version': 'self.ver.version', 'amount': "['-5.00']", 'quantity': "['-2']"}), "(verification=[self.ver], account=[account],\n version=self.ver.version, amount=['-5.00'], quantity=['-2'])\n", (45890, 45999), False, 'import blm\n'), ((46598, 46732), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'verification': '[self.ver]', 'account': '[account]', 'version': 'self.ver.version', 'amount': "['10.00']", 'quantity': "['5']"}), "(verification=[self.ver], account=[account],\n version=self.ver.version, amount=['10.00'], quantity=['5'])\n", (46624, 46732), False, 'import blm\n'), ((52339, 52345), 'pytransact.testsupport.Time', 'Time', ([], {}), '()\n', (52343, 52345), False, 'from pytransact.testsupport import BLMTests, Time\n'), ((57056, 57097), 'blm.accounting.createVerification', 'blm.accounting.createVerification', (['[data]'], {}), '([data])\n', (57089, 57097), False, 'import blm\n'), ((60349, 60379), 'py.test.raises', 'py.test.raises', (['AssertionError'], {}), '(AssertionError)\n', (60363, 60379), False, 'import bson, email, py, os, time, uuid\n'), ((60403, 60427), 'blm.accounting.Balance', 'blm.accounting.Balance', ([], {}), '()\n', (60425, 60427), False, 'import blm\n'), ((67804, 67915), 'blm.accounting.Transaction', 'blm.accounting.Transaction', ([], {'account': 'account', 'verification': '[self.ver]', 'version': 'self.ver.version', 'text': '[text]'}), '(account=account, verification=[self.ver],\n version=self.ver.version, text=[text])\n', (67830, 67915), False, 'import blm\n'), ((77726, 77774), 'py.test.raises', 'py.test.raises', (['ClientError', 'cls'], {'org': '[self.org]'}), '(ClientError, cls, org=[self.org])\n', (77740, 77774), False, 'import bson, email, py, os, time, uuid\n'), ((77787, 77836), 'py.test.raises', 'py.test.raises', (['ClientError', 'pp'], {'account': "['2000']"}), "(ClientError, pp, account=['2000'])\n", (77801, 77836), False, 'import bson, email, py, os, time, uuid\n'), ((77849, 77888), 'py.test.raises', 'py.test.raises', (['ClientError', 'pp._delete'], {}), '(ClientError, pp._delete)\n', (77863, 77888), False, 'import bson, email, py, os, time, uuid\n'), ((83929, 83956), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (83943, 83956), False, 'import bson, email, py, os, time, uuid\n'), ((83970, 84031), 'blm.accounting.AccountBalance', 'blm.accounting.AccountBalance', ([], {'account': 'account1000', 'year': '[-1]'}), '(account=account1000, year=[-1])\n', (83999, 84031), False, 'import blm\n'), ((84244, 84271), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (84258, 84271), False, 'import bson, email, py, os, time, uuid\n'), ((84959, 84986), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (84973, 84986), False, 'import bson, email, py, os, time, uuid\n'), ((85000, 85102), 'blm.accounting.ObjectBalanceBudget', 'blm.accounting.ObjectBalanceBudget', ([], {'account_balance': '[account]', 'accounting_object': '[ao]', 'period': "['']"}), "(account_balance=[account],\n accounting_object=[ao], period=[''])\n", (85034, 85102), False, 'import blm\n'), ((85542, 85569), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (85556, 85569), False, 'import bson, email, py, os, time, uuid\n'), ((85756, 85783), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (85770, 85783), False, 'import bson, email, py, os, time, uuid\n'), ((85797, 85871), 'blm.accounting.BalanceBudget', 'blm.accounting.BalanceBudget', ([], {'account_balance': '[account]', 'period': "['201401']"}), "(account_balance=[account], period=['201401'])\n", (85825, 85871), False, 'import blm\n'), ((86181, 86208), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (86195, 86208), False, 'import bson, email, py, os, time, uuid\n'), ((88390, 88417), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (88404, 88417), False, 'import bson, email, py, os, time, uuid\n'), ((89832, 89859), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (89846, 89859), False, 'import bson, email, py, os, time, uuid\n'), ((89873, 89965), 'blm.accounting.VatCode', 'blm.accounting.VatCode', ([], {'code': "['42']", 'xmlCode': "['mngol']", 'description': "['meaning of life']"}), "(code=['42'], xmlCode=['mngol'], description=[\n 'meaning of life'])\n", (89895, 89965), False, 'import blm\n'), ((90010, 90037), 'py.test.raises', 'py.test.raises', (['ClientError'], {}), '(ClientError)\n', (90024, 90037), False, 'import bson, email, py, os, time, uuid\n'), ((96254, 96279), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (96269, 96279), False, 'import bson, email, py, os, time, uuid\n'), ((97291, 97316), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (97306, 97316), False, 'import bson, email, py, os, time, uuid\n'), ((99814, 99858), 'itertools.zip_longest', 'izip_longest', (['dim1.subdim_of', 'dim2.subdim_of'], {}), '(dim1.subdim_of, dim2.subdim_of)\n', (99826, 99858), True, 'from itertools import zip_longest as izip_longest\n'), ((100485, 100517), 'itertools.zip_longest', 'izip_longest', (['dim1_aos', 'dim2_aos'], {}), '(dim1_aos, dim2_aos)\n', (100497, 100517), True, 'from itertools import zip_longest as izip_longest\n'), ((102071, 102103), 'itertools.zip_longest', 'izip_longest', (['acc1_abs', 'acc2_abs'], {}), '(acc1_abs, acc2_abs)\n', (102083, 102103), True, 'from itertools import zip_longest as izip_longest\n'), ((106104, 106140), 'accounting.luhn.luhn_checksum', 'luhn.luhn_checksum', (['transferAddress1'], {}), '(transferAddress1)\n', (106122, 106140), False, 'from accounting import luhn\n'), ((106428, 106464), 'accounting.luhn.luhn_checksum', 'luhn.luhn_checksum', (['transferAddress2'], {}), '(transferAddress2)\n', (106446, 106464), False, 'from accounting import luhn\n'), ((113977, 113995), 'decimal.Decimal', 'Decimal', (['"""6640.00"""'], {}), "('6640.00')\n", (113984, 113995), False, 'from decimal import Decimal\n'), ((115435, 115453), 'decimal.Decimal', 'Decimal', (['"""6540.00"""'], {}), "('6540.00')\n", (115442, 115453), False, 'from decimal import Decimal\n'), ((115681, 115697), 'decimal.Decimal', 'Decimal', (['"""50.00"""'], {}), "('50.00')\n", (115688, 115697), False, 'from decimal import Decimal\n'), ((117555, 117571), 'decimal.Decimal', 'Decimal', (['"""45.00"""'], {}), "('45.00')\n", (117562, 117571), False, 'from decimal import Decimal\n'), ((117790, 117807), 'decimal.Decimal', 'Decimal', (['"""-42.00"""'], {}), "('-42.00')\n", (117797, 117807), False, 'from decimal import Decimal\n'), ((117839, 117855), 'decimal.Decimal', 'Decimal', (['"""-3.00"""'], {}), "('-3.00')\n", (117846, 117855), False, 'from decimal import Decimal\n'), ((126283, 126297), 'decimal.Decimal', 'Decimal', (['"""-90"""'], {}), "('-90')\n", (126290, 126297), False, 'from decimal import Decimal\n'), ((126508, 126535), 'accounting.bankgiro.encode_toid20', 'bankgiro.encode_toid20', (['toi'], {}), '(toi)\n', (126530, 126535), False, 'from accounting import bankgiro, plusgiro\n'), ((126688, 126718), 'accounting.bankgiro.decode_toid20', 'bankgiro.decode_toid20', (['toid20'], {}), '(toid20)\n', (126710, 126718), False, 'from accounting import bankgiro, plusgiro\n'), ((127019, 127044), 'accounting.bankgiro.encode_toid20', 'bankgiro.encode_toid20', (['t'], {}), '(t)\n', (127041, 127044), False, 'from accounting import bankgiro, plusgiro\n'), ((127517, 127536), 'accounting.bankgiro.findToi', 'bankgiro.findToi', (['s'], {}), '(s)\n', (127533, 127536), False, 'from accounting import bankgiro, plusgiro\n'), ((127950, 127979), 'accounting.bankgiro.bg_transferdate', 'bankgiro.bg_transferdate', (['si1'], {}), '(si1)\n', (127974, 127979), False, 'from accounting import bankgiro, plusgiro\n'), ((129226, 129242), 'pytransact.testsupport.Time', 'Time', (['(1494257828)'], {}), '(1494257828)\n', (129230, 129242), False, 'from pytransact.testsupport import BLMTests, Time\n'), ((129268, 129318), 'accounting.bankgiro.gen_opening_record', 'bankgiro.gen_opening_record', (['self.bankgiroprovider'], {}), '(self.bankgiroprovider)\n', (129295, 129318), False, 'from accounting import bankgiro, plusgiro\n'), ((130156, 130188), 'accounting.bankgiro.gen_payment_record', 'bankgiro.gen_payment_record', (['si1'], {}), '(si1)\n', (130183, 130188), False, 'from accounting import bankgiro, plusgiro\n'), ((130418, 130450), 'accounting.bankgiro.gen_payment_record', 'bankgiro.gen_payment_record', (['si1'], {}), '(si1)\n', (130445, 130450), False, 'from accounting import bankgiro, plusgiro\n'), ((131102, 131138), 'accounting.bankgiro.gen_information_record', 'bankgiro.gen_information_record', (['si1'], {}), '(si1)\n', (131133, 131138), False, 'from accounting import bankgiro, plusgiro\n'), ((132121, 132162), 'accounting.bankgiro.gen_payment_record_plusgiro', 'bankgiro.gen_payment_record_plusgiro', (['si1'], {}), '(si1)\n', (132157, 132162), False, 'from accounting import bankgiro, plusgiro\n'), ((132392, 132433), 'accounting.bankgiro.gen_payment_record_plusgiro', 'bankgiro.gen_payment_record_plusgiro', (['si1'], {}), '(si1)\n', (132428, 132433), False, 'from accounting import bankgiro, plusgiro\n'), ((133086, 133102), 'pytransact.testsupport.Time', 'Time', (['(1494257828)'], {}), '(1494257828)\n', (133090, 133102), False, 'from pytransact.testsupport import BLMTests, Time\n'), ((133130, 133175), 'accounting.bankgiro.gen_information_record_plusgiro', 'bankgiro.gen_information_record_plusgiro', (['si1'], {}), '(si1)\n', (133170, 133175), False, 'from accounting import bankgiro, plusgiro\n'), ((133798, 133814), 'pytransact.testsupport.Time', 'Time', (['(1494257828)'], {}), '(1494257828)\n', (133802, 133814), False, 'from pytransact.testsupport import BLMTests, Time\n'), ((133841, 133880), 'accounting.bankgiro.gen_account_number_record', 'bankgiro.gen_account_number_record', (['si1'], {}), '(si1)\n', (133875, 133880), False, 'from accounting import bankgiro, plusgiro\n'), ((135862, 135959), 'accounting.bankgiro.transferOrderBankgiro', 'bankgiro.transferOrderBankgiro', ([], {'bankgiroProvider': 'self.bankgiroprovider', 'supInvList': 'supInvList'}), '(bankgiroProvider=self.bankgiroprovider,\n supInvList=supInvList)\n', (135892, 135959), False, 'from accounting import bankgiro, plusgiro\n'), ((135986, 136011), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (136001, 136011), False, 'import bson, email, py, os, time, uuid\n'), ((136251, 136348), 'accounting.bankgiro.transferOrderBankgiro', 'bankgiro.transferOrderBankgiro', ([], {'bankgiroProvider': 'self.bankgiroprovider', 'supInvList': 'supInvList'}), '(bankgiroProvider=self.bankgiroprovider,\n supInvList=supInvList)\n', (136281, 136348), False, 'from accounting import bankgiro, plusgiro\n'), ((136375, 136400), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (136390, 136400), False, 'import bson, email, py, os, time, uuid\n'), ((137918, 138023), 'accounting.bankgiro.transferOrderBankgiroRecords', 'bankgiro.transferOrderBankgiroRecords', ([], {'bankgiroProvider': 'self.bankgiroprovider', 'supInvList': 'supInvList'}), '(bankgiroProvider=self.\n bankgiroprovider, supInvList=supInvList)\n', (137955, 138023), False, 'from accounting import bankgiro, plusgiro\n'), ((138048, 138073), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (138063, 138073), False, 'import bson, email, py, os, time, uuid\n'), ((138658, 138763), 'accounting.bankgiro.transferOrderBankgiroRecords', 'bankgiro.transferOrderBankgiroRecords', ([], {'bankgiroProvider': 'self.bankgiroprovider', 'supInvList': 'supInvList'}), '(bankgiroProvider=self.\n bankgiroprovider, supInvList=supInvList)\n', (138695, 138763), False, 'from accounting import bankgiro, plusgiro\n'), ((138788, 138813), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (138803, 138813), False, 'import bson, email, py, os, time, uuid\n'), ((139620, 139726), 'blm.accounting.createBgcOrder', 'blm.accounting.createBgcOrder', ([], {'org': 'self.org', 'bankgiroProvider': 'self.bankgiroprovider', 'supInvList': '[si1]'}), '(org=self.org, bankgiroProvider=self.\n bankgiroprovider, supInvList=[si1])\n', (139649, 139726), False, 'import blm\n'), ((141530, 141564), 'codecs.decode', 'codecs.decode', (['key'], {'encoding': '"""hex"""'}), "(key, encoding='hex')\n", (141543, 141564), False, 'import codecs\n'), ((142072, 142113), 'codecs.decode', 'codecs.decode', (["(b'aa' * 32)"], {'encoding': '"""hex"""'}), "(b'aa' * 32, encoding='hex')\n", (142085, 142113), False, 'import codecs\n'), ((142135, 142176), 'codecs.decode', 'codecs.decode', (["(b'dd' * 50)"], {'encoding': '"""hex"""'}), "(b'dd' * 50, encoding='hex')\n", (142148, 142176), False, 'import codecs\n'), ((142455, 142489), 'codecs.decode', 'codecs.decode', (['key'], {'encoding': '"""hex"""'}), "(key, encoding='hex')\n", (142468, 142489), False, 'import codecs\n'), ((142510, 142551), 'codecs.decode', 'codecs.decode', (["(b'cd' * 50)"], {'encoding': '"""hex"""'}), "(b'cd' * 50, encoding='hex')\n", (142523, 142551), False, 'import codecs\n'), ((142691, 142732), 'codecs.decode', 'codecs.decode', (["(b'0c' * 32)"], {'encoding': '"""hex"""'}), "(b'0c' * 32, encoding='hex')\n", (142704, 142732), False, 'import codecs\n'), ((144170, 144201), 'os.path.exists', 'os.path.exists', (['"""/dev/bgsigner"""'], {}), "('/dev/bgsigner')\n", (144184, 144201), False, 'import bson, email, py, os, time, uuid\n'), ((145177, 145208), 'os.path.exists', 'os.path.exists', (['"""/dev/bgsigner"""'], {}), "('/dev/bgsigner')\n", (145191, 145208), False, 'import bson, email, py, os, time, uuid\n'), ((145793, 145824), 'os.path.exists', 'os.path.exists', (['"""/dev/bgsigner"""'], {}), "('/dev/bgsigner')\n", (145807, 145824), False, 'import bson, email, py, os, time, uuid\n'), ((146351, 146384), 'codecs.encode', 'codecs.encode', (['message', '"""latin-1"""'], {}), "(message, 'latin-1')\n", (146364, 146384), False, 'import codecs\n'), ((149229, 149254), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (149244, 149254), False, 'import bson, email, py, os, time, uuid\n'), ((149565, 149590), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (149580, 149590), False, 'import bson, email, py, os, time, uuid\n'), ((153829, 153854), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (153844, 153854), False, 'import bson, email, py, os, time, uuid\n'), ((162576, 162624), 'accounting.plusgiro.gen_cfp_po3_pi00', 'plusgiro.gen_cfp_po3_pi00', ([], {'supplierInvoice': '[si4]'}), '(supplierInvoice=[si4])\n', (162601, 162624), False, 'from accounting import bankgiro, plusgiro\n'), ((168075, 168100), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (168090, 168100), False, 'import bson, email, py, os, time, uuid\n'), ((6005, 6046), 'blm.accounting.Invitation._query', 'blm.accounting.Invitation._query', ([], {'org': 'org'}), '(org=org)\n', (6037, 6046), False, 'import blm\n'), ((8936, 8985), 'blm.accounting.PaymentProvider._query', 'blm.accounting.PaymentProvider._query', ([], {'id': 'ppd1.id'}), '(id=ppd1.id)\n', (8973, 8985), False, 'import blm\n'), ((9058, 9086), 'blm.members.Payment._query', 'blm.members.Payment._query', ([], {}), '()\n', (9084, 9086), False, 'import blm\n'), ((12233, 12258), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (12248, 12258), False, 'import bson, email, py, os, time, uuid\n'), ((17893, 17920), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {}), '()\n', (17918, 17920), False, 'import blm\n'), ((20015, 20053), 'py.test.raises', 'py.test.raises', (['exceptions.ClientError'], {}), '(exceptions.ClientError)\n', (20029, 20053), False, 'import bson, email, py, os, time, uuid\n'), ((20130, 20230), 'blm.accounting.updateMemberRoles', 'blm.accounting.updateMemberRoles', ([], {'org': '[org]', 'roleData': "[{'id': admin.id[0], 'roles': ['member']}]"}), "(org=[org], roleData=[{'id': admin.id[0],\n 'roles': ['member']}])\n", (20162, 20230), False, 'import blm\n'), ((23738, 23777), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {'id': 'org.id[0]'}), '(id=org.id[0])\n', (23763, 23777), False, 'import blm\n'), ((23802, 23842), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'name': '"""user1"""'}), "(name='user1')\n", (23828, 23842), False, 'import blm\n'), ((23866, 23906), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'name': '"""user2"""'}), "(name='user2')\n", (23892, 23906), False, 'import blm\n'), ((26324, 26351), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {}), '()\n', (26349, 26351), False, 'import blm\n'), ((27163, 27190), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {}), '()\n', (27188, 27190), False, 'import blm\n'), ((27417, 27444), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {}), '()\n', (27442, 27444), False, 'import blm\n'), ((27674, 27701), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {}), '()\n', (27699, 27701), False, 'import blm\n'), ((27728, 27759), 'blm.accounting.PGOrder._query', 'blm.accounting.PGOrder._query', ([], {}), '()\n', (27757, 27759), False, 'import blm\n'), ((28010, 28057), 'blm.accounting.PlusgiroProvider._query', 'blm.accounting.PlusgiroProvider._query', ([], {'org': 'org'}), '(org=org)\n', (28048, 28057), False, 'import blm\n'), ((32048, 32077), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'self.org.id'}), '(id=self.org.id)\n', (32061, 32077), False, 'import blm\n'), ((32110, 32145), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'accounting2011.id'}), '(id=accounting2011.id)\n', (32123, 32145), False, 'import blm\n'), ((32736, 32771), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'accounting2010.id'}), '(id=accounting2010.id)\n', (32749, 32771), False, 'import blm\n'), ((33005, 33052), 'blm.accounting.Dimension._query', 'blm.accounting.Dimension._query', ([], {'accounting': 'acc'}), '(accounting=acc)\n', (33036, 33052), False, 'import blm\n'), ((33112, 33161), 'blm.accounting.Dimension._query', 'blm.accounting.Dimension._query', ([], {'name': "['Projekt']"}), "(name=['Projekt'])\n", (33143, 33161), False, 'import blm\n'), ((34260, 34285), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (34273, 34285), False, 'import bson, email, py, os, time, uuid\n'), ((35703, 35719), 'decimal.Decimal', 'Decimal', (['"""10.00"""'], {}), "('10.00')\n", (35710, 35719), False, 'from decimal import Decimal\n'), ((35943, 35959), 'decimal.Decimal', 'Decimal', (['"""27.00"""'], {}), "('27.00')\n", (35950, 35959), False, 'from decimal import Decimal\n'), ((36094, 36110), 'decimal.Decimal', 'Decimal', (['"""10.00"""'], {}), "('10.00')\n", (36101, 36110), False, 'from decimal import Decimal\n'), ((36246, 36262), 'decimal.Decimal', 'Decimal', (['"""10.00"""'], {}), "('10.00')\n", (36253, 36262), False, 'from decimal import Decimal\n'), ((36748, 36794), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {'accounting': 'curr'}), '(accounting=curr)\n', (36777, 36794), False, 'import blm\n'), ((37006, 37019), 'decimal.Decimal', 'Decimal', (['"""10"""'], {}), "('10')\n", (37013, 37019), False, 'from decimal import Decimal\n'), ((37065, 37078), 'decimal.Decimal', 'Decimal', (['"""15"""'], {}), "('15')\n", (37072, 37078), False, 'from decimal import Decimal\n'), ((37184, 37197), 'decimal.Decimal', 'Decimal', (['"""11"""'], {}), "('11')\n", (37191, 37197), False, 'from decimal import Decimal\n'), ((37243, 37256), 'decimal.Decimal', 'Decimal', (['"""11"""'], {}), "('11')\n", (37250, 37256), False, 'from decimal import Decimal\n'), ((37363, 37376), 'decimal.Decimal', 'Decimal', (['"""27"""'], {}), "('27')\n", (37370, 37376), False, 'from decimal import Decimal\n'), ((37422, 37435), 'decimal.Decimal', 'Decimal', (['"""27"""'], {}), "('27')\n", (37429, 37435), False, 'from decimal import Decimal\n'), ((37481, 37491), 'decimal.Decimal', 'Decimal', (['(0)'], {}), '(0)\n', (37488, 37491), False, 'from decimal import Decimal\n'), ((37537, 37547), 'decimal.Decimal', 'Decimal', (['(0)'], {}), '(0)\n', (37544, 37547), False, 'from decimal import Decimal\n'), ((37939, 37989), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {'name': '"""X"""'}), "(name='X')\n", (37979, 37989), False, 'import blm\n'), ((38154, 38196), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {}), '()\n', (38194, 38196), False, 'import blm\n'), ((38497, 38553), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {'accounting': 'acc'}), '(accounting=acc)\n', (38537, 38553), False, 'import blm\n'), ((43119, 43171), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {'accounting': 'accounting'}), '(accounting=accounting)\n', (43148, 43171), False, 'import blm\n'), ((43239, 43302), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {'accounting': 'accounting'}), '(accounting=accounting)\n', (43279, 43302), False, 'import blm\n'), ((45037, 45068), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {}), '()\n', (45066, 45068), False, 'import blm\n'), ((45112, 45128), 'decimal.Decimal', 'Decimal', (['"""25.00"""'], {}), "('25.00')\n", (45119, 45128), False, 'from decimal import Decimal\n'), ((45194, 45210), 'decimal.Decimal', 'Decimal', (['"""12.00"""'], {}), "('12.00')\n", (45201, 45210), False, 'from decimal import Decimal\n'), ((45539, 45555), 'decimal.Decimal', 'Decimal', (['"""42.00"""'], {}), "('42.00')\n", (45546, 45555), False, 'from decimal import Decimal\n'), ((46123, 46139), 'decimal.Decimal', 'Decimal', (['"""47.00"""'], {}), "('47.00')\n", (46130, 46139), False, 'from decimal import Decimal\n'), ((46199, 46212), 'decimal.Decimal', 'Decimal', (['"""13"""'], {}), "('13')\n", (46206, 46212), False, 'from decimal import Decimal\n'), ((46899, 46930), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {}), '()\n', (46928, 46930), False, 'import blm\n'), ((47014, 47030), 'decimal.Decimal', 'Decimal', (['"""15.00"""'], {}), "('15.00')\n", (47021, 47030), False, 'from decimal import Decimal\n'), ((50142, 50184), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {}), '()\n', (50182, 50184), False, 'import blm\n'), ((50631, 50656), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (50644, 50656), False, 'import bson, email, py, os, time, uuid\n'), ((50699, 50724), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (50712, 50724), False, 'import bson, email, py, os, time, uuid\n'), ((52203, 52239), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (52237, 52239), False, 'import blm\n'), ((52827, 52863), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (52861, 52863), False, 'import blm\n'), ((53873, 53909), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (53907, 53909), False, 'import blm\n'), ((54084, 54142), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'text': '"""Transaction text"""'}), "(text='Transaction text')\n", (54117, 54142), False, 'import blm\n'), ((54314, 54331), 'decimal.Decimal', 'Decimal', (['"""100.00"""'], {}), "('100.00')\n", (54321, 54331), False, 'from decimal import Decimal\n'), ((54352, 54412), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'text': '"""Transaction text 2"""'}), "(text='Transaction text 2')\n", (54385, 54412), False, 'import blm\n'), ((54586, 54604), 'decimal.Decimal', 'Decimal', (['"""-100.00"""'], {}), "('-100.00')\n", (54593, 54604), False, 'from decimal import Decimal\n'), ((55432, 55468), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (55466, 55468), False, 'import blm\n'), ((55677, 55727), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'id': 'trans1.id[0]'}), '(id=trans1.id[0])\n', (55710, 55727), False, 'import blm\n'), ((55907, 55924), 'decimal.Decimal', 'Decimal', (['"""200.00"""'], {}), "('200.00')\n", (55914, 55924), False, 'from decimal import Decimal\n'), ((56022, 56082), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'text': '"""Transaction text 3"""'}), "(text='Transaction text 3')\n", (56055, 56082), False, 'import blm\n'), ((56256, 56274), 'decimal.Decimal', 'Decimal', (['"""-200.00"""'], {}), "('-200.00')\n", (56263, 56274), False, 'from decimal import Decimal\n'), ((57267, 57303), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (57301, 57303), False, 'import blm\n'), ((57356, 57391), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {}), '()\n', (57389, 57391), False, 'import blm\n'), ((58240, 58276), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (58274, 58276), False, 'import blm\n'), ((58301, 58359), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'text': '"""Transaction text"""'}), "(text='Transaction text')\n", (58334, 58359), False, 'import blm\n'), ((58384, 58444), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'text': '"""Transaction text 2"""'}), "(text='Transaction text 2')\n", (58417, 58444), False, 'import blm\n'), ((59351, 59387), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (59385, 59387), False, 'import blm\n'), ((59596, 59646), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'id': 'trans1.id[0]'}), '(id=trans1.id[0])\n', (59629, 59646), False, 'import blm\n'), ((59826, 59843), 'decimal.Decimal', 'Decimal', (['"""200.00"""'], {}), "('200.00')\n", (59833, 59843), False, 'from decimal import Decimal\n'), ((59941, 60001), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'text': '"""Transaction text 3"""'}), "(text='Transaction text 3')\n", (59974, 60001), False, 'import blm\n'), ((60175, 60193), 'decimal.Decimal', 'Decimal', (['"""-200.00"""'], {}), "('-200.00')\n", (60182, 60193), False, 'from decimal import Decimal\n'), ((64141, 64153), 'decimal.Decimal', 'Decimal', (['"""0"""'], {}), "('0')\n", (64148, 64153), False, 'from decimal import Decimal\n'), ((64232, 64244), 'decimal.Decimal', 'Decimal', (['"""0"""'], {}), "('0')\n", (64239, 64244), False, 'from decimal import Decimal\n'), ((64828, 64841), 'decimal.Decimal', 'Decimal', (['"""50"""'], {}), "('50')\n", (64835, 64841), False, 'from decimal import Decimal\n'), ((64888, 64900), 'decimal.Decimal', 'Decimal', (['"""6"""'], {}), "('6')\n", (64895, 64900), False, 'from decimal import Decimal\n'), ((65291, 65304), 'decimal.Decimal', 'Decimal', (['"""50"""'], {}), "('50')\n", (65298, 65304), False, 'from decimal import Decimal\n'), ((65352, 65364), 'decimal.Decimal', 'Decimal', (['"""7"""'], {}), "('7')\n", (65359, 65364), False, 'from decimal import Decimal\n'), ((65424, 65469), 'blm.accounting.Dimension._query', 'blm.accounting.Dimension._query', ([], {'number': "['1']"}), "(number=['1'])\n", (65455, 65469), False, 'import blm\n'), ((66336, 66349), 'decimal.Decimal', 'Decimal', (['"""30"""'], {}), "('30')\n", (66343, 66349), False, 'from decimal import Decimal\n'), ((66391, 66403), 'decimal.Decimal', 'Decimal', (['"""2"""'], {}), "('2')\n", (66398, 66403), False, 'from decimal import Decimal\n'), ((66922, 66935), 'decimal.Decimal', 'Decimal', (['"""40"""'], {}), "('40')\n", (66929, 66935), False, 'from decimal import Decimal\n'), ((66977, 66989), 'decimal.Decimal', 'Decimal', (['"""5"""'], {}), "('5')\n", (66984, 66989), False, 'from decimal import Decimal\n'), ((67508, 67521), 'decimal.Decimal', 'Decimal', (['"""20"""'], {}), "('20')\n", (67515, 67521), False, 'from decimal import Decimal\n'), ((67563, 67575), 'decimal.Decimal', 'Decimal', (['"""7"""'], {}), "('7')\n", (67570, 67575), False, 'from decimal import Decimal\n'), ((69792, 69838), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {'number': "['9999']"}), "(number=['9999'])\n", (69821, 69838), False, 'import blm\n'), ((69881, 69897), 'decimal.Decimal', 'Decimal', (['"""30.00"""'], {}), "('30.00')\n", (69888, 69897), False, 'from decimal import Decimal\n'), ((69923, 69969), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'id': 't1.id[0]'}), '(id=t1.id[0])\n', (69956, 69969), False, 'import blm\n'), ((70041, 70057), 'decimal.Decimal', 'Decimal', (['"""50.00"""'], {}), "('50.00')\n", (70048, 70057), False, 'from decimal import Decimal\n'), ((70125, 70141), 'decimal.Decimal', 'Decimal', (['"""20.00"""'], {}), "('20.00')\n", (70132, 70141), False, 'from decimal import Decimal\n'), ((70875, 70921), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {'number': "['9999']"}), "(number=['9999'])\n", (70904, 70921), False, 'import blm\n'), ((70964, 70980), 'decimal.Decimal', 'Decimal', (['"""45.00"""'], {}), "('45.00')\n", (70971, 70980), False, 'from decimal import Decimal\n'), ((71006, 71052), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'id': 't1.id[0]'}), '(id=t1.id[0])\n', (71039, 71052), False, 'import blm\n'), ((71094, 71140), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'id': 't2.id[0]'}), '(id=t2.id[0])\n', (71127, 71140), False, 'import blm\n'), ((71204, 71220), 'decimal.Decimal', 'Decimal', (['"""15.00"""'], {}), "('15.00')\n", (71211, 71220), False, 'from decimal import Decimal\n'), ((72229, 72265), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (72263, 72265), False, 'import blm\n'), ((72294, 72329), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {}), '()\n', (72327, 72329), False, 'import blm\n'), ((72822, 72858), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (72856, 72858), False, 'import blm\n'), ((72882, 72917), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {}), '()\n', (72915, 72917), False, 'import blm\n'), ((73125, 73142), 'bson.BSON', 'bson.BSON', (['log[0]'], {}), '(log[0])\n', (73134, 73142), False, 'import bson, email, py, os, time, uuid\n'), ((73351, 73387), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (73385, 73387), False, 'import blm\n'), ((73678, 73714), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (73712, 73714), False, 'import blm\n'), ((74155, 74190), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {}), '()\n', (74188, 74190), False, 'import blm\n'), ((74212, 74256), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {'number': '"""2000"""'}), "(number='2000')\n", (74241, 74256), False, 'import blm\n'), ((74360, 74396), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (74394, 74396), False, 'import blm\n'), ((74484, 74501), 'bson.BSON', 'bson.BSON', (['log[1]'], {}), '(log[1])\n', (74493, 74501), False, 'import bson, email, py, os, time, uuid\n'), ((75228, 75264), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (75262, 75264), False, 'import blm\n'), ((75347, 75364), 'bson.BSON', 'bson.BSON', (['log[1]'], {}), '(log[1])\n', (75356, 75364), False, 'import bson, email, py, os, time, uuid\n'), ((79402, 79443), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'id': 'self.payer'}), '(id=self.payer)\n', (79428, 79443), False, 'import blm\n'), ((79473, 79515), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'id': 'self.member'}), '(id=self.member)\n', (79499, 79515), False, 'import blm\n'), ((79544, 79585), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'id': 'self.other'}), '(id=self.other)\n', (79570, 79585), False, 'import blm\n'), ((79612, 79650), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {'id': 'self.org'}), '(id=self.org)\n', (79637, 79650), False, 'import blm\n'), ((81394, 81444), 'blm.accounting.Accounting._query', 'blm.accounting.Accounting._query', ([], {'id': 'accounting.id'}), '(id=accounting.id)\n', (81426, 81444), False, 'import blm\n'), ((82844, 82896), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {'id': 'seriesA'}), '(id=seriesA)\n', (82884, 82896), False, 'import blm\n'), ((83546, 83590), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {'number': '"""1000"""'}), "(number='1000')\n", (83575, 83590), False, 'import blm\n'), ((86738, 86772), 'blm.accounting.Accounting._query', 'blm.accounting.Accounting._query', ([], {}), '()\n', (86770, 86772), False, 'import blm\n'), ((86797, 86839), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {}), '()\n', (86837, 86839), False, 'import blm\n'), ((86865, 86896), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {}), '()\n', (86894, 86896), False, 'import blm\n'), ((89457, 89470), 'decimal.Decimal', 'Decimal', (['"""10"""'], {}), "('10')\n", (89464, 89470), False, 'from decimal import Decimal\n'), ((89507, 89520), 'decimal.Decimal', 'Decimal', (['"""10"""'], {}), "('10')\n", (89514, 89520), False, 'from decimal import Decimal\n'), ((89557, 89570), 'decimal.Decimal', 'Decimal', (['"""20"""'], {}), "('20')\n", (89564, 89570), False, 'from decimal import Decimal\n'), ((92547, 92594), 'blm.accounting.Dimension._query', 'blm.accounting.Dimension._query', ([], {'accounting': 'acc'}), '(accounting=acc)\n', (92578, 92594), False, 'import blm\n'), ((96465, 96506), 'blm.accounting.Accounting._query', 'blm.accounting.Accounting._query', ([], {'id': 'toid'}), '(id=toid)\n', (96497, 96506), False, 'import blm\n'), ((98482, 98528), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {'accounting': 'acc1'}), '(accounting=acc1)\n', (98511, 98528), False, 'import blm\n'), ((98559, 98605), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {'accounting': 'acc2'}), '(accounting=acc2)\n', (98588, 98605), False, 'import blm\n'), ((98711, 98759), 'blm.accounting.Dimension._query', 'blm.accounting.Dimension._query', ([], {'accounting': 'acc1'}), '(accounting=acc1)\n', (98742, 98759), False, 'import blm\n'), ((98792, 98840), 'blm.accounting.Dimension._query', 'blm.accounting.Dimension._query', ([], {'accounting': 'acc2'}), '(accounting=acc2)\n', (98823, 98840), False, 'import blm\n'), ((98953, 99004), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {'accounting': 'acc1'}), '(accounting=acc1)\n', (98987, 99004), False, 'import blm\n'), ((99040, 99091), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {'accounting': 'acc2'}), '(accounting=acc2)\n', (99074, 99091), False, 'import blm\n'), ((99203, 99260), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {'accounting': 'acc1'}), '(accounting=acc1)\n', (99243, 99260), False, 'import blm\n'), ((99302, 99359), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {'accounting': 'acc2'}), '(accounting=acc2)\n', (99342, 99359), False, 'import blm\n'), ((102914, 102946), 'itertools.zip_longest', 'izip_longest', (['ab1_obbs', 'ab2_obbs'], {}), '(ab1_obbs, ab2_obbs)\n', (102926, 102946), True, 'from itertools import zip_longest as izip_longest\n'), ((115518, 115560), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {'id': 'vId'}), '(id=vId)\n', (115552, 115560), False, 'import blm\n'), ((117622, 117665), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {'id': 'vId2'}), '(id=vId2)\n', (117656, 117665), False, 'import blm\n'), ((119429, 119478), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {'id': "sVr1['id']"}), "(id=sVr1['id'])\n", (119463, 119478), False, 'import blm\n'), ((119606, 119664), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'text': '"""Transaction text"""'}), "(text='Transaction text')\n", (119639, 119664), False, 'import blm\n'), ((119837, 119853), 'decimal.Decimal', 'Decimal', (['"""50.00"""'], {}), "('50.00')\n", (119844, 119853), False, 'from decimal import Decimal\n'), ((119874, 119934), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'text': '"""Transaction text 2"""'}), "(text='Transaction text 2')\n", (119907, 119934), False, 'import blm\n'), ((120109, 120126), 'decimal.Decimal', 'Decimal', (['"""-50.00"""'], {}), "('-50.00')\n", (120116, 120126), False, 'from decimal import Decimal\n'), ((121309, 121348), 'blm.accounting.SupplierInvoice._query', 'blm.accounting.SupplierInvoice._query', ([], {}), '()\n', (121346, 121348), False, 'import blm\n'), ((126081, 126125), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {'id': 'verId'}), '(id=verId)\n', (126115, 126125), False, 'import blm\n'), ((126153, 126243), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'verification': 'verId', 'account': 'self.account4000.id[0]'}), '(verification=verId, account=self.\n account4000.id[0])\n', (126186, 126243), False, 'import blm\n'), ((128140, 128169), 'accounting.bankgiro.bg_transferdate', 'bankgiro.bg_transferdate', (['si1'], {}), '(si1)\n', (128164, 128169), False, 'from accounting import bankgiro, plusgiro\n'), ((128274, 128303), 'accounting.bankgiro.bg_transferdate', 'bankgiro.bg_transferdate', (['si1'], {}), '(si1)\n', (128298, 128303), False, 'from accounting import bankgiro, plusgiro\n'), ((128408, 128437), 'accounting.bankgiro.bg_transferdate', 'bankgiro.bg_transferdate', (['si1'], {}), '(si1)\n', (128432, 128437), False, 'from accounting import bankgiro, plusgiro\n'), ((128595, 128624), 'accounting.bankgiro.bg_transferdate', 'bankgiro.bg_transferdate', (['si1'], {}), '(si1)\n', (128619, 128624), False, 'from accounting import bankgiro, plusgiro\n'), ((128729, 128758), 'accounting.bankgiro.bg_transferdate', 'bankgiro.bg_transferdate', (['si1'], {}), '(si1)\n', (128753, 128758), False, 'from accounting import bankgiro, plusgiro\n'), ((128863, 128892), 'accounting.bankgiro.bg_transferdate', 'bankgiro.bg_transferdate', (['si1'], {}), '(si1)\n', (128887, 128892), False, 'from accounting import bankgiro, plusgiro\n'), ((128997, 129026), 'accounting.bankgiro.bg_transferdate', 'bankgiro.bg_transferdate', (['si1'], {}), '(si1)\n', (129021, 129026), False, 'from accounting import bankgiro, plusgiro\n'), ((129131, 129160), 'accounting.bankgiro.bg_transferdate', 'bankgiro.bg_transferdate', (['si1'], {}), '(si1)\n', (129155, 129160), False, 'from accounting import bankgiro, plusgiro\n'), ((139832, 139844), 'pytransact.testsupport.Time.time', 'Time.time', (['t'], {}), '(t)\n', (139841, 139844), False, 'from pytransact.testsupport import BLMTests, Time\n'), ((141664, 141699), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'data'], {}), '(key, data)\n', (141688, 141699), False, 'from accounting import bankgiro, plusgiro\n'), ((142009, 142044), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'data'], {}), '(key, data)\n', (142033, 142044), False, 'from accounting import bankgiro, plusgiro\n'), ((142252, 142287), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'data'], {}), '(key, data)\n', (142276, 142287), False, 'from accounting import bankgiro, plusgiro\n'), ((142628, 142663), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'data'], {}), '(key, data)\n', (142652, 142663), False, 'from accounting import bankgiro, plusgiro\n'), ((142851, 142886), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'data'], {}), '(key, data)\n', (142875, 142886), False, 'from accounting import bankgiro, plusgiro\n'), ((143046, 143081), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'data'], {}), '(key, data)\n', (143070, 143081), False, 'from accounting import bankgiro, plusgiro\n'), ((143386, 143421), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'data'], {}), '(key, data)\n', (143410, 143421), False, 'from accounting import bankgiro, plusgiro\n'), ((143593, 143628), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'data'], {}), '(key, data)\n', (143617, 143628), False, 'from accounting import bankgiro, plusgiro\n'), ((143893, 143928), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'data'], {}), '(key, data)\n', (143917, 143928), False, 'from accounting import bankgiro, plusgiro\n'), ((144097, 144132), 'accounting.bankgiro.hmac_sha256_128', 'bankgiro.hmac_sha256_128', (['key', 'data'], {}), '(key, data)\n', (144121, 144132), False, 'from accounting import bankgiro, plusgiro\n'), ((147822, 147849), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['s1'], {}), '(s1)\n', (147845, 147849), False, 'from accounting import bankgiro, plusgiro\n'), ((148012, 148039), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['n1'], {}), '(n1)\n', (148035, 148039), False, 'from accounting import bankgiro, plusgiro\n'), ((148079, 148108), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""Å"""'], {}), "(u'Å')\n", (148102, 148108), False, 'from accounting import bankgiro, plusgiro\n'), ((148135, 148164), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""Ä"""'], {}), "(u'Ä')\n", (148158, 148164), False, 'from accounting import bankgiro, plusgiro\n'), ((148191, 148220), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""Ö"""'], {}), "(u'Ö')\n", (148214, 148220), False, 'from accounting import bankgiro, plusgiro\n'), ((148248, 148277), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""å"""'], {}), "(u'å')\n", (148271, 148277), False, 'from accounting import bankgiro, plusgiro\n'), ((148304, 148333), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""ä"""'], {}), "(u'ä')\n", (148327, 148333), False, 'from accounting import bankgiro, plusgiro\n'), ((148360, 148389), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""ö"""'], {}), "(u'ö')\n", (148383, 148389), False, 'from accounting import bankgiro, plusgiro\n'), ((148416, 148445), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""É"""'], {}), "(u'É')\n", (148439, 148445), False, 'from accounting import bankgiro, plusgiro\n'), ((148472, 148501), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""é"""'], {}), "(u'é')\n", (148495, 148501), False, 'from accounting import bankgiro, plusgiro\n'), ((148528, 148557), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""Ü"""'], {}), "(u'Ü')\n", (148551, 148557), False, 'from accounting import bankgiro, plusgiro\n'), ((148584, 148613), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""ü"""'], {}), "(u'ü')\n", (148607, 148613), False, 'from accounting import bankgiro, plusgiro\n'), ((148640, 148670), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['u"""\n"""'], {}), "(u'\\n')\n", (148663, 148670), False, 'from accounting import bankgiro, plusgiro\n'), ((148696, 148736), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (["u'\\n\\n\\n\\r\\r\\n'"], {}), "(u'\\n\\n\\n\\r\\r\\n')\n", (148719, 148736), False, 'from accounting import bankgiro, plusgiro\n'), ((148974, 149001), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['s1'], {}), '(s1)\n', (148997, 149001), False, 'from accounting import bankgiro, plusgiro\n'), ((149126, 149153), 'accounting.bankgiro.normalize_text', 'bankgiro.normalize_text', (['n1'], {}), '(n1)\n', (149149, 149153), False, 'from accounting import bankgiro, plusgiro\n'), ((151196, 151222), 'accounting.bankgiro.encode_toid20', 'bankgiro.encode_toid20', (['si'], {}), '(si)\n', (151218, 151222), False, 'from accounting import bankgiro, plusgiro\n'), ((157337, 157358), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'si1'}), '(id=si1)\n', (157350, 157358), False, 'import blm\n'), ((157380, 157401), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'si2'}), '(id=si2)\n', (157393, 157401), False, 'import blm\n'), ((159326, 159347), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'si1'}), '(id=si1)\n', (159339, 159347), False, 'import blm\n'), ((159369, 159390), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'si2'}), '(id=si2)\n', (159382, 159390), False, 'import blm\n'), ((159634, 159655), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'si1'}), '(id=si1)\n', (159647, 159655), False, 'import blm\n'), ((159677, 159698), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'si2'}), '(id=si2)\n', (159690, 159698), False, 'import blm\n'), ((159888, 159909), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'si1'}), '(id=si1)\n', (159901, 159909), False, 'import blm\n'), ((159931, 159952), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'si2'}), '(id=si2)\n', (159944, 159952), False, 'import blm\n'), ((170920, 170959), 'blm.accounting.UG._query', 'blm.accounting.UG._query', ([], {'name': '"""public"""'}), "(name='public')\n", (170944, 170959), False, 'import blm\n'), ((171050, 171077), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {}), '()\n', (171075, 171077), False, 'import blm\n'), ((171252, 171283), 'blm.accounting.VatCode._query', 'blm.accounting.VatCode._query', ([], {}), '()\n', (171281, 171283), False, 'import blm\n'), ((3865, 3901), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'id': 'user1'}), '(id=user1)\n', (3891, 3901), False, 'import blm\n'), ((22589, 22629), 'pytransact.commit.CallToi', 'commit.CallToi', (['org.id[0]', '"""get_ocr"""', '[]'], {}), "(org.id[0], 'get_ocr', [])\n", (22603, 22629), False, 'from pytransact import commit, contextbroker, exceptions, mongo, queryops\n'), ((22774, 22823), 'pytransact.commit.wait_for_commit', 'commit.wait_for_commit', (['self.database', 'interested'], {}), '(self.database, interested)\n', (22796, 22823), False, 'from pytransact import commit, contextbroker, exceptions, mongo, queryops\n'), ((22962, 22973), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (22970, 22973), False, 'import bson, email, py, os, time, uuid\n'), ((33870, 33895), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (33885, 33895), False, 'import bson, email, py, os, time, uuid\n'), ((37785, 37827), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {}), '()\n', (37825, 37827), False, 'import blm\n'), ((52697, 52722), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (52710, 52722), False, 'import bson, email, py, os, time, uuid\n'), ((55946, 55996), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'id': 'trans2.id[0]'}), '(id=trans2.id[0])\n', (55979, 55996), False, 'import blm\n'), ((59865, 59915), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {'id': 'trans2.id[0]'}), '(id=trans2.id[0])\n', (59898, 59915), False, 'import blm\n'), ((72374, 72405), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {}), '()\n', (72403, 72405), False, 'import blm\n'), ((73814, 73831), 'bson.BSON', 'bson.BSON', (['log[1]'], {}), '(log[1])\n', (73823, 73831), False, 'import bson, email, py, os, time, uuid\n'), ((74649, 74665), 'decimal.Decimal', 'Decimal', (['"""10.00"""'], {}), "('10.00')\n", (74656, 74665), False, 'from decimal import Decimal\n'), ((74709, 74721), 'decimal.Decimal', 'Decimal', (['"""0"""'], {}), "('0')\n", (74716, 74721), False, 'from decimal import Decimal\n'), ((75512, 75528), 'decimal.Decimal', 'Decimal', (['"""10.00"""'], {}), "('10.00')\n", (75519, 75528), False, 'from decimal import Decimal\n'), ((75572, 75584), 'decimal.Decimal', 'Decimal', (['"""0"""'], {}), "('0')\n", (75579, 75584), False, 'from decimal import Decimal\n'), ((76470, 76511), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'id': 'self.admin'}), '(id=self.admin)\n', (76496, 76511), False, 'import blm\n'), ((76543, 76585), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'id': 'self.member'}), '(id=self.member)\n', (76569, 76585), False, 'import blm\n'), ((76616, 76657), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'id': 'self.other'}), '(id=self.other)\n', (76642, 76657), False, 'import blm\n'), ((76686, 76724), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {'id': 'self.org'}), '(id=self.org)\n', (76711, 76724), False, 'import blm\n'), ((76760, 76812), 'blm.accounting.Accounting._query', 'blm.accounting.Accounting._query', ([], {'id': 'self.accounting'}), '(id=self.accounting)\n', (76792, 76812), False, 'import blm\n'), ((80476, 80522), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'id': 'self.accountant'}), '(id=self.accountant)\n', (80502, 80522), False, 'import blm\n'), ((80554, 80596), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'id': 'self.member'}), '(id=self.member)\n', (80580, 80596), False, 'import blm\n'), ((80627, 80668), 'blm.accounting.User._query', 'blm.accounting.User._query', ([], {'id': 'self.other'}), '(id=self.other)\n', (80653, 80668), False, 'import blm\n'), ((80697, 80735), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {'id': 'self.org'}), '(id=self.org)\n', (80722, 80735), False, 'import blm\n'), ((88259, 88297), 'blm.accounting.Org._query', 'blm.accounting.Org._query', ([], {'id': 'self.org'}), '(id=self.org)\n', (88284, 88297), False, 'import blm\n'), ((96584, 96615), 'blm.accounting.Account._query', 'blm.accounting.Account._query', ([], {}), '()\n', (96613, 96615), False, 'import blm\n'), ((96641, 96679), 'blm.accounting.AccountBalance._query', 'blm.accounting.AccountBalance._query', ([], {}), '()\n', (96677, 96679), False, 'import blm\n'), ((96705, 96739), 'blm.accounting.Accounting._query', 'blm.accounting.Accounting._query', ([], {}), '()\n', (96737, 96739), False, 'import blm\n'), ((96765, 96805), 'blm.accounting.AccountingObject._query', 'blm.accounting.AccountingObject._query', ([], {}), '()\n', (96803, 96805), False, 'import blm\n'), ((96831, 96868), 'blm.accounting.BalanceBudget._query', 'blm.accounting.BalanceBudget._query', ([], {}), '()\n', (96866, 96868), False, 'import blm\n'), ((96894, 96927), 'blm.accounting.Dimension._query', 'blm.accounting.Dimension._query', ([], {}), '()\n', (96925, 96927), False, 'import blm\n'), ((96953, 96996), 'blm.accounting.ObjectBalanceBudget._query', 'blm.accounting.ObjectBalanceBudget._query', ([], {}), '()\n', (96994, 96996), False, 'import blm\n'), ((97022, 97057), 'blm.accounting.Transaction._query', 'blm.accounting.Transaction._query', ([], {}), '()\n', (97055, 97057), False, 'import blm\n'), ((97083, 97119), 'blm.accounting.Verification._query', 'blm.accounting.Verification._query', ([], {}), '()\n', (97117, 97119), False, 'import blm\n'), ((97145, 97187), 'blm.accounting.VerificationSeries._query', 'blm.accounting.VerificationSeries._query', ([], {}), '()\n', (97185, 97187), False, 'import blm\n'), ((97498, 97554), 'blm.accounting.Accounting._query', 'blm.accounting.Accounting._query', ([], {'id': 'importer.accounting'}), '(id=importer.accounting)\n', (97530, 97554), False, 'import blm\n'), ((100207, 100261), 'blm.accounting.AccountingObject._query', 'blm.accounting.AccountingObject._query', ([], {'dimension': 'dim1'}), '(dimension=dim1)\n', (100245, 100261), False, 'import blm\n'), ((100308, 100362), 'blm.accounting.AccountingObject._query', 'blm.accounting.AccountingObject._query', ([], {'dimension': 'dim2'}), '(dimension=dim2)\n', (100346, 100362), False, 'import blm\n'), ((105274, 105319), 'blm.accounting.Accounting._query', 'blm.accounting.Accounting._query', ([], {'id': 'original'}), '(id=original)\n', (105306, 105319), False, 'import blm\n'), ((105346, 105389), 'blm.accounting.Accounting._query', 'blm.accounting.Accounting._query', ([], {'id': 'expect'}), '(id=expect)\n', (105378, 105389), False, 'import blm\n'), ((105414, 105455), 'blm.accounting.Accounting._query', 'blm.accounting.Accounting._query', ([], {'id': 'copy'}), '(id=copy)\n', (105446, 105455), False, 'import blm\n'), ((118501, 118529), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice0'], {}), '(self.invoice0)\n', (118514, 118529), False, 'import copy\n'), ((120249, 120277), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice0'], {}), '(self.invoice0)\n', (120262, 120277), False, 'import copy\n'), ((126853, 126880), 'accounting.bankgiro.encode_toid20', 'bankgiro.encode_toid20', (['toi'], {}), '(toi)\n', (126875, 126880), False, 'from accounting import bankgiro, plusgiro\n'), ((140051, 140078), 'accounting.bankgiro.encode_toid20', 'bankgiro.encode_toid20', (['si1'], {}), '(si1)\n', (140073, 140078), False, 'from accounting import bankgiro, plusgiro\n'), ((154359, 154403), 'blm.TO._query', 'blm.TO._query', ([], {'id': 'si.transferVerification[0]'}), '(id=si.transferVerification[0])\n', (154372, 154403), False, 'import blm\n'), ((154565, 154593), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (154578, 154593), False, 'import copy\n'), ((154706, 154734), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice2'], {}), '(self.invoice2)\n', (154719, 154734), False, 'import copy\n'), ((155616, 155644), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (155629, 155644), False, 'import copy\n'), ((155757, 155785), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice2'], {}), '(self.invoice2)\n', (155770, 155785), False, 'import copy\n'), ((158133, 158161), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (158146, 158161), False, 'import copy\n'), ((158470, 158498), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (158483, 158498), False, 'import copy\n'), ((158886, 158914), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (158899, 158914), False, 'import copy\n'), ((159050, 159078), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice2'], {}), '(self.invoice2)\n', (159063, 159078), False, 'import copy\n'), ((170409, 170437), 'copy.deepcopy', 'copy.deepcopy', (['self.invoice1'], {}), '(self.invoice1)\n', (170422, 170437), False, 'import copy\n'), ((102641, 102703), 'blm.accounting.ObjectBalanceBudget._query', 'blm.accounting.ObjectBalanceBudget._query', ([], {'account_balance': 'ab1'}), '(account_balance=ab1)\n', (102682, 102703), False, 'import blm\n'), ((102758, 102820), 'blm.accounting.ObjectBalanceBudget._query', 'blm.accounting.ObjectBalanceBudget._query', ([], {'account_balance': 'ab2'}), '(account_balance=ab2)\n', (102799, 102820), False, 'import blm\n'), ((119073, 119125), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['si1.dateInvoiceRegistered[0]'], {}), '(si1.dateInvoiceRegistered[0])\n', (119095, 119125), False, 'from datetime import datetime, date, timedelta\n'), ((119561, 119573), 'datetime.date.today', 'date.today', ([], {}), '()\n', (119571, 119573), False, 'from datetime import datetime, date, timedelta\n'), ((120669, 120708), 'blm.accounting.SupplierInvoice._query', 'blm.accounting.SupplierInvoice._query', ([], {}), '()\n', (120706, 120708), False, 'import blm\n'), ((122120, 122159), 'blm.accounting.SupplierInvoice._query', 'blm.accounting.SupplierInvoice._query', ([], {}), '()\n', (122157, 122159), False, 'import blm\n'), ((145391, 145457), 'random.choice', 'random.choice', (['(string.digits + string.letters + string.punctuation)'], {}), '(string.digits + string.letters + string.punctuation)\n', (145404, 145457), False, 'import random\n'), ((156804, 156816), 'datetime.date.today', 'date.today', ([], {}), '()\n', (156814, 156816), False, 'from datetime import datetime, date, timedelta\n'), ((156819, 156836), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (156828, 156836), False, 'from datetime import datetime, date, timedelta\n'), ((34315, 34329), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (34327, 34329), False, 'from datetime import datetime, date, timedelta\n'), ((34332, 34364), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'years': '(+1)', 'days': '(-1)'}), '(years=+1, days=-1)\n', (34345, 34364), False, 'from dateutil.relativedelta import relativedelta\n'), ((34486, 34500), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (34498, 34500), False, 'from datetime import datetime, date, timedelta\n'), ((34503, 34526), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'years': '(+1)'}), '(years=+1)\n', (34516, 34526), False, 'from dateutil.relativedelta import relativedelta\n'), ((34592, 34606), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (34604, 34606), False, 'from datetime import datetime, date, timedelta\n'), ((34609, 34641), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'years': '(+2)', 'days': '(-1)'}), '(years=+2, days=-1)\n', (34622, 34641), False, 'from dateutil.relativedelta import relativedelta\n'), ((128082, 128099), 'datetime.date', 'date', (['(2017)', '(9)', '(19)'], {}), '(2017, 9, 19)\n', (128086, 128099), False, 'from datetime import datetime, date, timedelta\n'), ((128216, 128233), 'datetime.date', 'date', (['(2017)', '(9)', '(20)'], {}), '(2017, 9, 20)\n', (128220, 128233), False, 'from datetime import datetime, date, timedelta\n'), ((128350, 128367), 'datetime.date', 'date', (['(2017)', '(9)', '(21)'], {}), '(2017, 9, 21)\n', (128354, 128367), False, 'from datetime import datetime, date, timedelta\n'), ((128537, 128554), 'datetime.date', 'date', (['(2017)', '(9)', '(21)'], {}), '(2017, 9, 21)\n', (128541, 128554), False, 'from datetime import datetime, date, timedelta\n'), ((128671, 128688), 'datetime.date', 'date', (['(2017)', '(9)', '(22)'], {}), '(2017, 9, 22)\n', (128675, 128688), False, 'from datetime import datetime, date, timedelta\n'), ((128805, 128822), 'datetime.date', 'date', (['(2017)', '(9)', '(23)'], {}), '(2017, 9, 23)\n', (128809, 128822), False, 'from datetime import datetime, date, timedelta\n'), ((128939, 128956), 'datetime.date', 'date', (['(2017)', '(9)', '(24)'], {}), '(2017, 9, 24)\n', (128943, 128956), False, 'from datetime import datetime, date, timedelta\n'), ((129073, 129090), 'datetime.date', 'date', (['(2017)', '(9)', '(25)'], {}), '(2017, 9, 25)\n', (129077, 129090), False, 'from datetime import datetime, date, timedelta\n'), ((130099, 130115), 'datetime.date', 'date', (['(2017)', '(5)', '(1)'], {}), '(2017, 5, 1)\n', (130103, 130115), False, 'from datetime import datetime, date, timedelta\n'), ((130361, 130377), 'datetime.date', 'date', (['(2017)', '(6)', '(1)'], {}), '(2017, 6, 1)\n', (130365, 130377), False, 'from datetime import datetime, date, timedelta\n'), ((131043, 131059), 'datetime.date', 'date', (['(2017)', '(5)', '(1)'], {}), '(2017, 5, 1)\n', (131047, 131059), False, 'from datetime import datetime, date, timedelta\n'), ((132064, 132080), 'datetime.date', 'date', (['(2017)', '(5)', '(1)'], {}), '(2017, 5, 1)\n', (132068, 132080), False, 'from datetime import datetime, date, timedelta\n'), ((132335, 132351), 'datetime.date', 'date', (['(2017)', '(6)', '(1)'], {}), '(2017, 6, 1)\n', (132339, 132351), False, 'from datetime import datetime, date, timedelta\n'), ((134696, 134707), 'time.time', 'time.time', ([], {}), '()\n', (134705, 134707), False, 'import bson, email, py, os, time, uuid\n'), ((135802, 135818), 'datetime.date', 'date', (['(2017)', '(5)', '(1)'], {}), '(2017, 5, 1)\n', (135806, 135818), False, 'from datetime import datetime, date, timedelta\n'), ((136191, 136207), 'datetime.date', 'date', (['(2017)', '(5)', '(8)'], {}), '(2017, 5, 8)\n', (136195, 136207), False, 'from datetime import datetime, date, timedelta\n'), ((137859, 137875), 'datetime.date', 'date', (['(2017)', '(5)', '(1)'], {}), '(2017, 5, 1)\n', (137863, 137875), False, 'from datetime import datetime, date, timedelta\n'), ((138599, 138615), 'datetime.date', 'date', (['(2017)', '(5)', '(8)'], {}), '(2017, 5, 8)\n', (138603, 138615), False, 'from datetime import datetime, date, timedelta\n'), ((139559, 139575), 'datetime.date', 'date', (['(2017)', '(5)', '(8)'], {}), '(2017, 5, 8)\n', (139563, 139575), False, 'from datetime import datetime, date, timedelta\n'), ((149405, 149421), 'datetime.date', 'date', (['(2017)', '(5)', '(2)'], {}), '(2017, 5, 2)\n', (149409, 149421), False, 'from datetime import datetime, date, timedelta\n'), ((162515, 162533), 'datetime.date', 'date', (['(2011)', '(11)', '(30)'], {}), '(2011, 11, 30)\n', (162519, 162533), False, 'from datetime import datetime, date, timedelta\n')] |
"""
Create a representation of the NK speedCoach data file.
Classes:
NKSession
NKDevice
NKSessionFile
Functions:
None
Misc variables:
None
"""
import pandas
from io import StringIO
class NKSession(object):
"""
A class to containing the session data obtained from a single NK SpeedCoach File.
Attributes:
sessionname : str
The name of the session
sessionspeedinput : str
The source of the speed data, (GPS; Impeller)
sessionstarttime : str
The time that the session was started
sessionsystemofunits : str
The system of units the sessions was captured in (M M/S /500m; KM KMH /500m; MI MPH /MI)
sessiontype : str
The type of session (Just Row; Intervals)
sessionsummary : dataframe[]
A dataframe representing a single row denoting a sessions summary
sessionintervalsummaries : dataframe[]
A dataframe containing a summary row for each interval that was done during the session
sessionstrokedata : dataframe[]
A datafream containing a row for each stroke take for all summaries
"""
def __init__(self, NKSessionFile):
NKSessionFile.pseudofile.seek(0)
self._getnksessionheader(NKSessionFile.pseudofile)
NKSessionFile.pseudofile.seek(0)
self._getsessionsummary(NKSessionFile.pseudofile, NKSessionFile.sessionsummarystart, NKSessionFile.sessionintervalsummarystart)
NKSessionFile.pseudofile.seek(0)
self._getsessionintervalsummaries(NKSessionFile.pseudofile, NKSessionFile.sessionintervalsummarystart, NKSessionFile.perstrokedatastart)
NKSessionFile.pseudofile.seek(0)
self._getsessionstrokedata(NKSessionFile.pseudofile, NKSessionFile.perstrokedatastart)
def _getnksessionheader(self, pseudofile):
# Read CSV file where session header resides
SessionHeaderDF = pandas.read_csv(pseudofile, header=None, skiprows=2, nrows=5, usecols=[0, 1], names=["Field", "Value"])
# Strip Whitspace
SessionHeaderDF = SessionHeaderDF.apply(lambda x: x.str.strip() if x.dtype == "object" else x)
self.SessionName = SessionHeaderDF.iloc[0, 1]
self.SessionStartTime = SessionHeaderDF.iloc[1, 1]
self.SessionType = SessionHeaderDF.iloc[2, 1]
self.SessionSystemOfUnits = SessionHeaderDF.iloc[3, 1]
self.SessionSpeedInput = SessionHeaderDF.iloc[4, 1]
def _getsessionsummary(self, pseudofile, sessionsummarystart, sessionintervalsummarystart):
# Read session summary into dataframe
rowrange = sessionintervalsummarystart - sessionsummarystart - 5
SessionSummaryDF = pandas.read_csv(pseudofile, header=0, skiprows=sessionsummarystart, nrows=rowrange, sep=",")
# Drop first row
SessionSummaryDF.drop(SessionSummaryDF.index[0], inplace=True)
# Data Fixes
SessionSummaryDF.replace({"---": None}, inplace=True)
SessionSummaryDF.replace(":60.0", ":59.9", inplace=True, regex=True)
self.SessionSummary = SessionSummaryDF
def _getsessionintervalsummaries(self, pseudofile, sessionintervalsummarystart, perstrokedatastart):
# Read session interval summaries into dataframe
rowrange = perstrokedatastart - sessionintervalsummarystart - 6
SessionIntervalSummariesDF = pandas.read_csv(pseudofile, header=0, skiprows=sessionintervalsummarystart, nrows=rowrange, sep=",")
# Drop first row
SessionIntervalSummariesDF.drop(SessionIntervalSummariesDF.index[0], inplace=True)
# Data Fixes
SessionIntervalSummariesDF.replace({"---": None}, inplace=True)
SessionIntervalSummariesDF.replace(":60.0", ":59.9", inplace=True, regex=True)
self.SessionIntervalSummaries = SessionIntervalSummariesDF
def _getsessionstrokedata(self, pseudofile, perstrokedatastart):
# Read session details into dataframe
SessionStrokeDataDF = pandas.read_csv(pseudofile, header=0, skiprows=perstrokedatastart, sep=",")
# Drop first row
SessionStrokeDataDF.drop(SessionStrokeDataDF.index[0], inplace=True)
# Data Fixes
SessionStrokeDataDF.replace({"---": None}, inplace=True)
SessionStrokeDataDF.replace(":60.0", ":59.9", inplace=True, regex=True)
self.SessionStrokeData = SessionStrokeDataDF
class NKDevice(object):
"""
A class to containing the device information obtained from a single NK SpeedCoach File.
Attributes:
devicefirmwareversion : str
The firmware version of the speedcoach device
devicehardwareversion : str
The hardware version of the speedcoach device
devicemodel : str
The model of the speedcoach device
devicename : str
The name of the speedcoach device
deviceprofileversion : str
The profile version of the speedcoach device
deviceprofileversion : str
The serial number of the speedcoach device
These are unique across devices and can be used as a key
deviceoarlockboatid : str
The name assgned to the oarlock device
Used to identify the boat the device is installed in
deviceoarlockfirmwareversion : str
The firmware version of the oarlock device
deviceoarlockinboardlength : str
The inboard length of the oar/blade used on the oarlock device
deviceoarlockinboardlength : str
The overall length of the oar/blade used on the oarlock device
deviceoarlockseatnumber : str
The seat number of the rigger on which the oarlock device is installed
deviceoarlockside : str
The side of the rigger on which the oarlock device is installed
"""
def __init__(self, NKSessionFile):
NKSessionFile.pseudofile.seek(0)
self._getnkspeedcoachinfo(NKSessionFile.pseudofile)
NKSessionFile.pseudofile.seek(0)
self._getnkoarlockinfo(NKSessionFile.pseudofile)
NKSessionFile.pseudofile.seek(0)
self._getnkoarlocksettings(NKSessionFile.pseudofile)
def _getnkspeedcoachinfo(self, pseudofile):
# Read CSV file where device information resides
DeviceDF = pandas.read_csv(pseudofile, header=None, skiprows=2, nrows=6, usecols=[4, 5], names=["Field", "Value"])
# Strip WhiteSpace
DeviceDF = DeviceDF.apply(lambda x: x.str.strip() if x.dtype == "object" else x)
self.DeviceName = DeviceDF.iloc[0, 1]
self.DeviceModel = DeviceDF.iloc[1, 1]
self.DeviceSerial = DeviceDF.iloc[2, 1]
self.DeviceFirmwareVersion = DeviceDF.iloc[3, 1]
self.DeviceProfileVersion = DeviceDF.iloc[4, 1]
self.DeviceHardwareVersion = DeviceDF.iloc[5, 1]
def _getnkoarlockinfo(self, pseudofile):
# Read CSV file where oarlock firmware data resides
OarlockFirmwareDF = pandas.read_csv(pseudofile, header=None, skiprows=2, nrows=1, usecols=[8, 9], names=["Field", "Value"])
# Strip WhiteSpace
OarlockFirmwareDF = OarlockFirmwareDF.apply(lambda x: x.str.strip() if x.dtype == "object" else x)
self.DeviceOarlockFirmwareVersion = OarlockFirmwareDF.iloc[0, 1]
def _getnkoarlocksettings(self, pseudofile):
# Read CSV file where oarlock metadata resides
OarlockMetaDataDF = pandas.read_csv(pseudofile, header=None, skiprows=2, nrows=5, usecols=[12, 13], names=["Field", "Value"])
# Strip WhiteSpace
OarlockMetaDataDF = OarlockMetaDataDF.apply(lambda x: x.str.strip() if x.dtype == "object" else x)
self.DeviceOarlockBoatId = OarlockMetaDataDF.iloc[0, 1]
self.DeviceOarlockSeatNumber = OarlockMetaDataDF.iloc[1, 1]
self.DeviceOarlockSide = OarlockMetaDataDF.iloc[2, 1]
self.DeviceOarlockOarlLength = OarlockMetaDataDF.iloc[3, 1]
self.DeviceOarlockInboardLength = OarlockMetaDataDF.iloc[4, 1]
class NKSessionFile(object):
"""
A class to represent the NK file.
When loaded, the file is loaded into an object containing the file contents.
This object is passed to the other classes.
The file is parsed to obtain the starting points of various datapoints.
The starting points are stored within the attributes listed below.
Attributes
----------
sessionsummarystart : int
The starting line of the session summary data
sessionintervalsummarystart : int
The starting line of the interval summary data
perstrokedatastart : int
The starting line of the session stroke data
pseudofile : string
An object containing the contents of the NK file
Methods
-------
None
Returns
-------
None
"""
init = False
def __init__(self, filepath):
if self.init is False:
self._createpseudofile(filepath)
def _createpseudofile(self, filepath):
# Open the file
nkfile = open(filepath, "r")
# Read the file and determine the data start points
i = 1
for line in nkfile.readlines():
if "Session Summary:" in line:
self.sessionsummarystart = i
elif "Interval Summaries:" in line:
self.sessionintervalsummarystart = i
elif "Per-Stroke Data:" in line:
self.perstrokedatastart = i
i += 1
# Very basic check to see if its a valid file
if self.sessionsummarystart == 0 and self.sessionintervalsummarystart == 0 and self.perstrokedatastart == 0:
print(f'File {filepath} does not appear to be a valid NK Speedcoach file!')
else:
nkfile.seek(0)
self.pseudofile = StringIO(nkfile.read())
self.init = True
| [
"pandas.read_csv"
] | [((1951, 2059), 'pandas.read_csv', 'pandas.read_csv', (['pseudofile'], {'header': 'None', 'skiprows': '(2)', 'nrows': '(5)', 'usecols': '[0, 1]', 'names': "['Field', 'Value']"}), "(pseudofile, header=None, skiprows=2, nrows=5, usecols=[0, 1\n ], names=['Field', 'Value'])\n", (1966, 2059), False, 'import pandas\n'), ((2719, 2816), 'pandas.read_csv', 'pandas.read_csv', (['pseudofile'], {'header': '(0)', 'skiprows': 'sessionsummarystart', 'nrows': 'rowrange', 'sep': '""","""'}), "(pseudofile, header=0, skiprows=sessionsummarystart, nrows=\n rowrange, sep=',')\n", (2734, 2816), False, 'import pandas\n'), ((3388, 3492), 'pandas.read_csv', 'pandas.read_csv', (['pseudofile'], {'header': '(0)', 'skiprows': 'sessionintervalsummarystart', 'nrows': 'rowrange', 'sep': '""","""'}), "(pseudofile, header=0, skiprows=sessionintervalsummarystart,\n nrows=rowrange, sep=',')\n", (3403, 3492), False, 'import pandas\n'), ((3999, 4074), 'pandas.read_csv', 'pandas.read_csv', (['pseudofile'], {'header': '(0)', 'skiprows': 'perstrokedatastart', 'sep': '""","""'}), "(pseudofile, header=0, skiprows=perstrokedatastart, sep=',')\n", (4014, 4074), False, 'import pandas\n'), ((6307, 6415), 'pandas.read_csv', 'pandas.read_csv', (['pseudofile'], {'header': 'None', 'skiprows': '(2)', 'nrows': '(6)', 'usecols': '[4, 5]', 'names': "['Field', 'Value']"}), "(pseudofile, header=None, skiprows=2, nrows=6, usecols=[4, 5\n ], names=['Field', 'Value'])\n", (6322, 6415), False, 'import pandas\n'), ((6974, 7082), 'pandas.read_csv', 'pandas.read_csv', (['pseudofile'], {'header': 'None', 'skiprows': '(2)', 'nrows': '(1)', 'usecols': '[8, 9]', 'names': "['Field', 'Value']"}), "(pseudofile, header=None, skiprows=2, nrows=1, usecols=[8, 9\n ], names=['Field', 'Value'])\n", (6989, 7082), False, 'import pandas\n'), ((7420, 7530), 'pandas.read_csv', 'pandas.read_csv', (['pseudofile'], {'header': 'None', 'skiprows': '(2)', 'nrows': '(5)', 'usecols': '[12, 13]', 'names': "['Field', 'Value']"}), "(pseudofile, header=None, skiprows=2, nrows=5, usecols=[12, \n 13], names=['Field', 'Value'])\n", (7435, 7530), False, 'import pandas\n')] |
# Copyright (c) 2016 <NAME> (<EMAIL>)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from ElementTree import ElementTreeVisitor
from utils.annotations import virtual, overrides
from Elements.element import element
class ElementProcessor(ElementTreeVisitor):
"""
templatizes all element processors that run on element tree as visitors
"""
@overrides(ElementTreeVisitor)
def visit(self, element):
return self.process(element)
@virtual
def process(self, element):
"""
@param element: element to visit and process
"""
pass
class PrintElementName(ElementProcessor):
"""
test ElementProcessor to print the element hierarchy and
verify the visitor infrastructure works correctly
"""
@overrides(ElementProcessor)
def initialize(self):
self.tabbing = 0
@overrides(ElementProcessor)
def going_deeper(self):
self.tabbing += 1
@overrides(ElementProcessor)
def coming_back_up(self):
self.tabbing -= 1 if self.tabbing > 0 else 0
@overrides(ElementProcessor)
def process(self, element):
print("\t" * self.tabbing + type(element).__name__)
class VerifyParentageAndConfigure(ElementProcessor):
"""
verify that each element is residing under a desired parentage
and then configures the element if there is any parent specific configuration
"""
@overrides(ElementProcessor)
def initialize(self):
# a chain of parent elements
self.parentage = [element] #start with a root element as a placeholder
self.last_processed_element = None
@overrides(ElementProcessor)
def going_deeper(self):
if self.last_processed_element is not None:
self.parentage.append(type(self.last_processed_element))
@overrides(ElementProcessor)
def coming_back_up(self):
self.parentage.pop()
@overrides(ElementProcessor)
def process(self, elem):
self.last_processed_element = elem
#if len(self.parentage) > 0:
if issubclass(type(elem), element):
elem.validate_parentage(self.parentage[1:]) #avoid sending in the root element
elem.configure_for_parent_element(self.parentage[-1])
| [
"utils.annotations.overrides"
] | [((1369, 1398), 'utils.annotations.overrides', 'overrides', (['ElementTreeVisitor'], {}), '(ElementTreeVisitor)\n', (1378, 1398), False, 'from utils.annotations import virtual, overrides\n'), ((1782, 1809), 'utils.annotations.overrides', 'overrides', (['ElementProcessor'], {}), '(ElementProcessor)\n', (1791, 1809), False, 'from utils.annotations import virtual, overrides\n'), ((1867, 1894), 'utils.annotations.overrides', 'overrides', (['ElementProcessor'], {}), '(ElementProcessor)\n', (1876, 1894), False, 'from utils.annotations import virtual, overrides\n'), ((1955, 1982), 'utils.annotations.overrides', 'overrides', (['ElementProcessor'], {}), '(ElementProcessor)\n', (1964, 1982), False, 'from utils.annotations import virtual, overrides\n'), ((2072, 2099), 'utils.annotations.overrides', 'overrides', (['ElementProcessor'], {}), '(ElementProcessor)\n', (2081, 2099), False, 'from utils.annotations import virtual, overrides\n'), ((2417, 2444), 'utils.annotations.overrides', 'overrides', (['ElementProcessor'], {}), '(ElementProcessor)\n', (2426, 2444), False, 'from utils.annotations import virtual, overrides\n'), ((2637, 2664), 'utils.annotations.overrides', 'overrides', (['ElementProcessor'], {}), '(ElementProcessor)\n', (2646, 2664), False, 'from utils.annotations import virtual, overrides\n'), ((2820, 2847), 'utils.annotations.overrides', 'overrides', (['ElementProcessor'], {}), '(ElementProcessor)\n', (2829, 2847), False, 'from utils.annotations import virtual, overrides\n'), ((2913, 2940), 'utils.annotations.overrides', 'overrides', (['ElementProcessor'], {}), '(ElementProcessor)\n', (2922, 2940), False, 'from utils.annotations import virtual, overrides\n')] |
# This program has been developed by students from the bachelor Computer Science at Utrecht University within the
# Software and Game project course
# ©Copyright Utrecht University Department of Information and Computing Sciences.
"""Perform sync_agent presence check on starting the app."""
from django.apps import AppConfig
from django.conf import settings
import os
import requests
class AssistantsConfig(AppConfig):
"""Perform sync_agent presence check on starting the app."""
name = 'assistants'
def ready(self):
"""Perform sync agent checks, updates and creations when the server is started."""
if os.environ.get('RUN_MAIN'):
from assistants import sync_agent
import django.core.validators as validators
from django.core.exceptions import ValidationError
validator = validators.URLValidator(schemes=("http", "https"))
try:
validator(os.getenv("MOODLE_BASE_URL"))
validator(os.getenv("MOODLE_BASE_IP"))
validator(os.getenv("MOODLE_WEBSERVICE_URL"))
validator(os.getenv("LL_URL"))
except ValueError:
raise ValidationError()
try:
sync_agent.build_sync_agents()
except requests.ConnectionError:
print(f'Connection to Learning Locker was refused! Is it running on {settings.LL_URL} and reachable?')
quit()
| [
"django.core.validators.URLValidator",
"assistants.sync_agent.build_sync_agents",
"os.getenv",
"os.environ.get",
"django.core.exceptions.ValidationError"
] | [((636, 662), 'os.environ.get', 'os.environ.get', (['"""RUN_MAIN"""'], {}), "('RUN_MAIN')\n", (650, 662), False, 'import os\n'), ((854, 904), 'django.core.validators.URLValidator', 'validators.URLValidator', ([], {'schemes': "('http', 'https')"}), "(schemes=('http', 'https'))\n", (877, 904), True, 'import django.core.validators as validators\n'), ((1247, 1277), 'assistants.sync_agent.build_sync_agents', 'sync_agent.build_sync_agents', ([], {}), '()\n', (1275, 1277), False, 'from assistants import sync_agent\n'), ((948, 976), 'os.getenv', 'os.getenv', (['"""MOODLE_BASE_URL"""'], {}), "('MOODLE_BASE_URL')\n", (957, 976), False, 'import os\n'), ((1004, 1031), 'os.getenv', 'os.getenv', (['"""MOODLE_BASE_IP"""'], {}), "('MOODLE_BASE_IP')\n", (1013, 1031), False, 'import os\n'), ((1059, 1093), 'os.getenv', 'os.getenv', (['"""MOODLE_WEBSERVICE_URL"""'], {}), "('MOODLE_WEBSERVICE_URL')\n", (1068, 1093), False, 'import os\n'), ((1121, 1140), 'os.getenv', 'os.getenv', (['"""LL_URL"""'], {}), "('LL_URL')\n", (1130, 1140), False, 'import os\n'), ((1195, 1212), 'django.core.exceptions.ValidationError', 'ValidationError', ([], {}), '()\n', (1210, 1212), False, 'from django.core.exceptions import ValidationError\n')] |
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""NMS implementation"""
import numpy as np
def oks_iou(g, d, a_g, a_d, sigmas=None, vis_thr=None):
"""Calculate oks ious.
Args:
g: Ground truth keypoints.
d: Detected keypoints.
a_g: Area of the ground truth object.
a_d: Area of the detected object.
sigmas: standard deviation of keypoint labelling.
vis_thr: threshold of the keypoint visibility.
Returns:
list: The oks ious.
"""
if sigmas is None:
sigmas = np.array([
.26, .25, .25, .35, .35, .79, .79, .72, .72, .62, .62, 1.07, 1.07,
.87, .87, .89, .89
]) / 10.0
key_vars = (sigmas * 2)**2
xg = g[0::3]
yg = g[1::3]
vg = g[2::3]
ious = np.zeros(len(d), dtype=np.float32)
for n_d in range(0, len(d)):
xd = d[n_d, 0::3]
yd = d[n_d, 1::3]
vd = d[n_d, 2::3]
dx = xd - xg
dy = yd - yg
e = (dx**2 + dy**2) / key_vars / ((a_g + a_d[n_d]) / 2 + np.spacing(1)) / 2
if vis_thr is not None:
ind = list(vg > vis_thr) and list(vd > vis_thr)
e = e[ind]
ious[n_d] = np.sum(np.exp(-e)) / len(e) if e.size != 0 else 0.0
return ious
def oks_nms(kpts_db, thr, sigmas=None, vis_thr=None):
"""OKS NMS implementations.
Args:
kpts_db: keypoints.
thr: Retain overlap < thr.
sigmas: standard deviation of keypoint labelling.
vis_thr: threshold of the keypoint visibility.
Returns:
np.ndarray: indexes to keep.
"""
if not kpts_db:
return []
scores = np.array([k['score'] for k in kpts_db])
kpts = np.array([k['keypoints'].flatten() for k in kpts_db])
areas = np.array([k['area'] for k in kpts_db])
order = scores.argsort()[::-1]
keep = []
while order.size > 0:
i = order[0]
keep.append(i)
oks_ovr = oks_iou(kpts[i], kpts[order[1:]], areas[i], areas[order[1:]],
sigmas, vis_thr)
inds = np.where(oks_ovr <= thr)[0]
order = order[inds + 1]
keep = np.array(keep)
return keep
def _rescore(overlap, scores, thr, key_type='gaussian'):
"""Rescoring mechanism gaussian or linear.
Args:
overlap: calculated ious
scores: target scores.
thr: retain oks overlap < thr.
key_type: 'gaussian' or 'linear'
Returns:
np.ndarray: indexes to keep
"""
assert len(overlap) == len(scores)
assert key_type in ['gaussian', 'linear']
if key_type == 'linear':
inds = np.where(overlap >= thr)[0]
scores[inds] = scores[inds] * (1 - overlap[inds])
else:
scores = scores * np.exp(-overlap**2 / thr)
return scores
def soft_oks_nms(kpts_db, thr, max_dets=20, sigmas=None, vis_thr=None):
"""Soft OKS NMS implementations.
Args:
kpts_db
thr: retain oks overlap < thr.
max_dets: max number of detections to keep.
sigmas: Keypoint labelling uncertainty.
Returns:
np.ndarray: indexes to keep.
"""
if not kpts_db:
return []
scores = np.array([k['score'] for k in kpts_db])
kpts = np.array([k['keypoints'].flatten() for k in kpts_db])
areas = np.array([k['area'] for k in kpts_db])
order = scores.argsort()[::-1]
scores = scores[order]
keep = np.zeros(max_dets, dtype=np.intp)
keep_cnt = 0
while order.size > 0 and keep_cnt < max_dets:
i = order[0]
oks_ovr = oks_iou(kpts[i], kpts[order[1:]], areas[i], areas[order[1:]],
sigmas, vis_thr)
order = order[1:]
scores = _rescore(oks_ovr, scores[1:], thr)
tmp = scores.argsort()[::-1]
order = order[tmp]
scores = scores[tmp]
keep[keep_cnt] = i
keep_cnt += 1
keep = keep[:keep_cnt]
return keep
| [
"numpy.where",
"numpy.exp",
"numpy.array",
"numpy.zeros",
"numpy.spacing"
] | [((2253, 2292), 'numpy.array', 'np.array', (["[k['score'] for k in kpts_db]"], {}), "([k['score'] for k in kpts_db])\n", (2261, 2292), True, 'import numpy as np\n'), ((2370, 2408), 'numpy.array', 'np.array', (["[k['area'] for k in kpts_db]"], {}), "([k['area'] for k in kpts_db])\n", (2378, 2408), True, 'import numpy as np\n'), ((2742, 2756), 'numpy.array', 'np.array', (['keep'], {}), '(keep)\n', (2750, 2756), True, 'import numpy as np\n'), ((3774, 3813), 'numpy.array', 'np.array', (["[k['score'] for k in kpts_db]"], {}), "([k['score'] for k in kpts_db])\n", (3782, 3813), True, 'import numpy as np\n'), ((3891, 3929), 'numpy.array', 'np.array', (["[k['area'] for k in kpts_db]"], {}), "([k['area'] for k in kpts_db])\n", (3899, 3929), True, 'import numpy as np\n'), ((4005, 4038), 'numpy.zeros', 'np.zeros', (['max_dets'], {'dtype': 'np.intp'}), '(max_dets, dtype=np.intp)\n', (4013, 4038), True, 'import numpy as np\n'), ((1162, 1278), 'numpy.array', 'np.array', (['[0.26, 0.25, 0.25, 0.35, 0.35, 0.79, 0.79, 0.72, 0.72, 0.62, 0.62, 1.07, \n 1.07, 0.87, 0.87, 0.89, 0.89]'], {}), '([0.26, 0.25, 0.25, 0.35, 0.35, 0.79, 0.79, 0.72, 0.72, 0.62, 0.62,\n 1.07, 1.07, 0.87, 0.87, 0.89, 0.89])\n', (1170, 1278), True, 'import numpy as np\n'), ((2670, 2694), 'numpy.where', 'np.where', (['(oks_ovr <= thr)'], {}), '(oks_ovr <= thr)\n', (2678, 2694), True, 'import numpy as np\n'), ((3221, 3245), 'numpy.where', 'np.where', (['(overlap >= thr)'], {}), '(overlap >= thr)\n', (3229, 3245), True, 'import numpy as np\n'), ((3343, 3370), 'numpy.exp', 'np.exp', (['(-overlap ** 2 / thr)'], {}), '(-overlap ** 2 / thr)\n', (3349, 3370), True, 'import numpy as np\n'), ((1647, 1660), 'numpy.spacing', 'np.spacing', (['(1)'], {}), '(1)\n', (1657, 1660), True, 'import numpy as np\n'), ((1808, 1818), 'numpy.exp', 'np.exp', (['(-e)'], {}), '(-e)\n', (1814, 1818), True, 'import numpy as np\n')] |
# import the necessary packages
import numpy as np
import cv2
cap=cv2.VideoCapture(1)
def order_points(pts):
# initialzie a list of coordinates that will be ordered
# such that the first entry in the list is the top-left,
# the second entry is the top-right, the third is the
# bottom-right, and the fourth is the bottom-left
rect = np.zeros((4, 2), dtype = "float32")
# the top-left point will have the smallest sum, whereas
# the bottom-right point will have the largest sum
s = pts.sum(axis = 1)
rect[0] = pts[np.argmin(s)]
rect[2] = pts[np.argmax(s)]
# now, compute the difference between the points, the
# top-right point will have the smallest difference,
# whereas the bottom-left will have the largest difference
diff = np.diff(pts, axis = 1)
rect[1] = pts[np.argmin(diff)]
rect[3] = pts[np.argmax(diff)]
# return the ordered coordinates
return rect
def four_point_transform(image, pts):
# obtain a consistent order of the points and unpack them
# individually
rect = order_points(pts)
(tl, tr, br, bl) = rect
# compute the width of the new image, which will be the
# maximum distance between bottom-right and bottom-left
# x-coordiates or the top-right and top-left x-coordinates
widthA = np.sqrt(((br[0] - bl[0]) ** 2) + ((br[1] - bl[1]) ** 2))
widthB = np.sqrt(((tr[0] - tl[0]) ** 2) + ((tr[1] - tl[1]) ** 2))
maxWidth = max(int(widthA), int(widthB))
# compute the height of the new image, which will be the
# maximum distance between the top-right and bottom-right
# y-coordinates or the top-left and bottom-left y-coordinates
heightA = np.sqrt(((tr[0] - br[0]) ** 2) + ((tr[1] - br[1]) ** 2))
heightB = np.sqrt(((tl[0] - bl[0]) ** 2) + ((tl[1] - bl[1]) ** 2))
maxHeight = max(int(heightA), int(heightB))
# now that we have the dimensions of the new image, construct
# the set of destination points to obtain a "birds eye view",
# (i.e. top-down view) of the image, again specifying points
# in the top-left, top-right, bottom-right, and bottom-left
# order
dst = np.array([
[0, 0],
[maxWidth - 1, 0],
[maxWidth - 1, maxHeight - 1],
[0, maxHeight - 1]], dtype = "float32")
# compute the perspective transform matrix and then apply it
M = cv2.getPerspectiveTransform(rect, dst)
warped = cv2.warpPerspective(image, M, (maxWidth, maxHeight))
# return the warped image
return warped
#=============================================================================================================================================
# mouse callback function
def draw_circle(event,x,y,flags,param):
global pts
global num
global frame
if event == cv2.EVENT_LBUTTONDBLCLK:
cv2.circle(frame,(x,y),10,(255,0,0),-1)
pts[num][0]= x
pts[num][1]= y
num=num+1
print(num)
print(pts)
#=============================================================================================================================================
pts=np.array([(0,0),(0,1),(1,1),(1,0)])
num=0
def points_inverse():
global pts
global num
global frame
while(1):
ret,frame=cap.read()
cv2.imshow("image_demo",frame)
k = cv2.waitKey(20) & 0xFF
if k == ord('a'):
break
cv2.destroyAllWindows()
print("out")
cv2.namedWindow("image")
cv2.setMouseCallback("image",draw_circle)
print("out**")
while(num<4):
cv2.imshow("image",frame)
cv2.waitKey(30)
cv2.destroyAllWindows()
points_inverse()
print("point define")
while(1):
ret,frame=cap.read()
global pts
# apply the four point tranform to obtain a "birds eye view" of
# the image
warped = four_point_transform(frame, pts)
# show the original and warped images
cv2.imshow("Original", frame)
cv2.imshow("Warped", warped)
k = cv2.waitKey(20) & 0xFF
if k == ord('a'):
break
cv2.destroyAllWindows()
| [
"cv2.setMouseCallback",
"numpy.sqrt",
"cv2.getPerspectiveTransform",
"numpy.diff",
"numpy.argmax",
"cv2.imshow",
"numpy.array",
"numpy.zeros",
"cv2.warpPerspective",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.circle",
"numpy.argmin",
"cv2.waitKey",
"cv2.namedWindow"
] | [((66, 85), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(1)'], {}), '(1)\n', (82, 85), False, 'import cv2\n'), ((2964, 3006), 'numpy.array', 'np.array', (['[(0, 0), (0, 1), (1, 1), (1, 0)]'], {}), '([(0, 0), (0, 1), (1, 1), (1, 0)])\n', (2972, 3006), True, 'import numpy as np\n'), ((3787, 3810), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3808, 3810), False, 'import cv2\n'), ((339, 372), 'numpy.zeros', 'np.zeros', (['(4, 2)'], {'dtype': '"""float32"""'}), "((4, 2), dtype='float32')\n", (347, 372), True, 'import numpy as np\n'), ((747, 767), 'numpy.diff', 'np.diff', (['pts'], {'axis': '(1)'}), '(pts, axis=1)\n', (754, 767), True, 'import numpy as np\n'), ((1236, 1288), 'numpy.sqrt', 'np.sqrt', (['((br[0] - bl[0]) ** 2 + (br[1] - bl[1]) ** 2)'], {}), '((br[0] - bl[0]) ** 2 + (br[1] - bl[1]) ** 2)\n', (1243, 1288), True, 'import numpy as np\n'), ((1303, 1355), 'numpy.sqrt', 'np.sqrt', (['((tr[0] - tl[0]) ** 2 + (tr[1] - tl[1]) ** 2)'], {}), '((tr[0] - tl[0]) ** 2 + (tr[1] - tl[1]) ** 2)\n', (1310, 1355), True, 'import numpy as np\n'), ((1595, 1647), 'numpy.sqrt', 'np.sqrt', (['((tr[0] - br[0]) ** 2 + (tr[1] - br[1]) ** 2)'], {}), '((tr[0] - br[0]) ** 2 + (tr[1] - br[1]) ** 2)\n', (1602, 1647), True, 'import numpy as np\n'), ((1663, 1715), 'numpy.sqrt', 'np.sqrt', (['((tl[0] - bl[0]) ** 2 + (tl[1] - bl[1]) ** 2)'], {}), '((tl[0] - bl[0]) ** 2 + (tl[1] - bl[1]) ** 2)\n', (1670, 1715), True, 'import numpy as np\n'), ((2032, 2142), 'numpy.array', 'np.array', (['[[0, 0], [maxWidth - 1, 0], [maxWidth - 1, maxHeight - 1], [0, maxHeight - 1]]'], {'dtype': '"""float32"""'}), "([[0, 0], [maxWidth - 1, 0], [maxWidth - 1, maxHeight - 1], [0, \n maxHeight - 1]], dtype='float32')\n", (2040, 2142), True, 'import numpy as np\n'), ((2218, 2256), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['rect', 'dst'], {}), '(rect, dst)\n', (2245, 2256), False, 'import cv2\n'), ((2267, 2319), 'cv2.warpPerspective', 'cv2.warpPerspective', (['image', 'M', '(maxWidth, maxHeight)'], {}), '(image, M, (maxWidth, maxHeight))\n', (2286, 2319), False, 'import cv2\n'), ((3207, 3230), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3228, 3230), False, 'import cv2\n'), ((3246, 3270), 'cv2.namedWindow', 'cv2.namedWindow', (['"""image"""'], {}), "('image')\n", (3261, 3270), False, 'import cv2\n'), ((3272, 3314), 'cv2.setMouseCallback', 'cv2.setMouseCallback', (['"""image"""', 'draw_circle'], {}), "('image', draw_circle)\n", (3292, 3314), False, 'import cv2\n'), ((3394, 3417), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3415, 3417), False, 'import cv2\n'), ((3670, 3699), 'cv2.imshow', 'cv2.imshow', (['"""Original"""', 'frame'], {}), "('Original', frame)\n", (3680, 3699), False, 'import cv2\n'), ((3701, 3729), 'cv2.imshow', 'cv2.imshow', (['"""Warped"""', 'warped'], {}), "('Warped', warped)\n", (3711, 3729), False, 'import cv2\n'), ((525, 537), 'numpy.argmin', 'np.argmin', (['s'], {}), '(s)\n', (534, 537), True, 'import numpy as np\n'), ((554, 566), 'numpy.argmax', 'np.argmax', (['s'], {}), '(s)\n', (563, 566), True, 'import numpy as np\n'), ((785, 800), 'numpy.argmin', 'np.argmin', (['diff'], {}), '(diff)\n', (794, 800), True, 'import numpy as np\n'), ((817, 832), 'numpy.argmax', 'np.argmax', (['diff'], {}), '(diff)\n', (826, 832), True, 'import numpy as np\n'), ((2669, 2715), 'cv2.circle', 'cv2.circle', (['frame', '(x, y)', '(10)', '(255, 0, 0)', '(-1)'], {}), '(frame, (x, y), 10, (255, 0, 0), -1)\n', (2679, 2715), False, 'import cv2\n'), ((3108, 3139), 'cv2.imshow', 'cv2.imshow', (['"""image_demo"""', 'frame'], {}), "('image_demo', frame)\n", (3118, 3139), False, 'import cv2\n'), ((3349, 3375), 'cv2.imshow', 'cv2.imshow', (['"""image"""', 'frame'], {}), "('image', frame)\n", (3359, 3375), False, 'import cv2\n'), ((3377, 3392), 'cv2.waitKey', 'cv2.waitKey', (['(30)'], {}), '(30)\n', (3388, 3392), False, 'import cv2\n'), ((3735, 3750), 'cv2.waitKey', 'cv2.waitKey', (['(20)'], {}), '(20)\n', (3746, 3750), False, 'import cv2\n'), ((3147, 3162), 'cv2.waitKey', 'cv2.waitKey', (['(20)'], {}), '(20)\n', (3158, 3162), False, 'import cv2\n')] |
#!/usr/bin/python
from snap.pyglog import *
from snap.deluge import provenance
from snap.deluge import core
def test_PrintResourceProvenanceList():
#uri = 'maprfs://data/itergraph/tide_v12//sift//cbir/3cf541188f15713d6ebb2b9ff6badb8e//itergraph/0142f574ff8a1d0f2deaabb1622e84f8//phase002//merged_matches.pert'
uri = 'maprfs://data/itergraph/tide_v12/sift/cbir/3cf541188f15713d6ebb2b9ff6badb8e/query_results/shard_00000.pert'
#uri = "maprfs://data/itergraph/tide_v12/sift/feature_counts.pert"
#uri = "maprfs://data/itergraph/tide_v12/sift/features.pert"
#uri = 'maprfs://data/itergraph/tide_v12/cropped_scaled_photoid_to_image.pert'
fingerprint = core.GetResourceFingerprintFromUri(uri)
provenance.GetResourceTiming(fingerprint)
return
if __name__ == "__main__":
test_PrintResourceProvenanceList()
| [
"snap.deluge.core.GetResourceFingerprintFromUri",
"snap.deluge.provenance.GetResourceTiming"
] | [((664, 703), 'snap.deluge.core.GetResourceFingerprintFromUri', 'core.GetResourceFingerprintFromUri', (['uri'], {}), '(uri)\n', (698, 703), False, 'from snap.deluge import core\n'), ((706, 747), 'snap.deluge.provenance.GetResourceTiming', 'provenance.GetResourceTiming', (['fingerprint'], {}), '(fingerprint)\n', (734, 747), False, 'from snap.deluge import provenance\n')] |
from datetime import datetime, date
from marqeta.response_models import datetime_object
import json
import re
class Pos(object):
def __init__(self, json_response):
self.json_response = json_response
def __str__(self):
return json.dumps(self.json_response, default=self.json_serial)
@staticmethod
def json_serial(o):
if isinstance(o, datetime) or isinstance(o, date):
return o.__str__()
@property
def pan_entry_mode(self):
return self.json_response.get('pan_entry_mode', None)
@property
def pin_entry_mode(self):
return self.json_response.get('pin_entry_mode', None)
@property
def terminal_id(self):
return self.json_response.get('terminal_id', None)
@property
def terminal_attendance(self):
return self.json_response.get('terminal_attendance', None)
@property
def terminal_location(self):
return self.json_response.get('terminal_location', None)
@property
def card_holder_presence(self):
return self.json_response.get('card_holder_presence', None)
@property
def cardholder_authentication_method(self):
return self.json_response.get('cardholder_authentication_method', None)
@property
def card_presence(self):
return self.json_response.get('card_presence', None)
@property
def terminal_type(self):
return self.json_response.get('terminal_type', None)
@property
def card_data_input_capability(self):
return self.json_response.get('card_data_input_capability', None)
@property
def country_code(self):
return self.json_response.get('country_code', None)
@property
def zip(self):
return self.json_response.get('zip', None)
@property
def partial_approval_capable(self):
return self.json_response.get('partial_approval_capable', None)
@property
def purchase_amount_only(self):
return self.json_response.get('purchase_amount_only', None)
@property
def is_recurring(self):
return self.json_response.get('is_recurring', None)
def __repr__(self):
return '<Marqeta.response_models.pos.Pos>' + self.__str__()
| [
"json.dumps"
] | [((252, 308), 'json.dumps', 'json.dumps', (['self.json_response'], {'default': 'self.json_serial'}), '(self.json_response, default=self.json_serial)\n', (262, 308), False, 'import json\n')] |
import time
from selenium import webdriver
def main():
driver = webdriver.Chrome(executable_path='chromedriver.exe')
driver.get('https://www.w3schools.com/html/default.asp')
time.sleep(3)
driver.find_element_by_partial_link_text('HTML Global Attributes').click() #Es el texto que hace el link hacia una url del sitio.
input()
if __name__ == '__main__':
main() | [
"selenium.webdriver.Chrome",
"time.sleep"
] | [((69, 121), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': '"""chromedriver.exe"""'}), "(executable_path='chromedriver.exe')\n", (85, 121), False, 'from selenium import webdriver\n'), ((187, 200), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (197, 200), False, 'import time\n')] |
import numpy as np
from deap import benchmarks
from BayesOpt import BO
from BayesOpt.Surrogate import RandomForest
from BayesOpt.SearchSpace import ContinuousSpace, OrdinalSpace, NominalSpace
from BayesOpt.base import Solution
np.random.seed(42)
def obj_func(x):
x_r, x_i, x_d = np.array(x[:2]), x[2], x[3]
if x_d == 'OK':
tmp = 0
else:
tmp = 1
return np.sum((x_r + np.array([2, 2])) ** 2) + abs(x_i - 10) * 10 + tmp
def eq_func(x):
x_r = np.array(x[:2])
return np.sum(x_r ** 2) - 2
def ineq_func(x):
x_r = np.array(x[:2])
return np.sum(x_r) + 1
space = ((ContinuousSpace([-10, 10]) * 2) + OrdinalSpace([5, 15])
+ NominalSpace(['OK', 'A', 'B', 'C', 'D', 'E', 'F', 'G']))
warm_data = Solution([4.6827082694127835, 9.87885354178838, 5, 'A'], var_name=["r_0", "r_1", "i", "d"], n_eval=1, fitness=236.76575128)
warm_data += Solution([-8.99187067168115, 8.317469942991558, 5, 'D'], var_name=["r_0", "r_1", "i", "d"], n_eval=1, fitness=206.33644151)
warm_data += Solution([-2.50919762305275, 9.014286128198322, 12, 'G'], var_name=["r_0", "r_1", "i", "d"], n_eval=1, fitness=142.57378113)
warm_data += Solution([4.639878836228101, 1.973169683940732, 9, 'G'], var_name=["r_0", "r_1", "i", "d"], n_eval=1, fitness=70.8740683)
if 11 < 2:
model = RandomForest(levels=space.levels)
opt = BO(space, obj_func, model, minimize=True,
n_init_sample=3, max_eval=50, verbose=True, optimizer='MIES',
warm_data=warm_data)
xopt, fopt, stop_dict = opt.run()
else:
model = RandomForest(levels=space.levels)
opt = BO(space, obj_func, model, minimize=True,
n_init_sample=3, max_eval=50, verbose=True, optimizer='MIES',
warm_data="test_warmdata.data")
xopt, fopt, stop_dict = opt.run()
| [
"BayesOpt.SearchSpace.ContinuousSpace",
"BayesOpt.Surrogate.RandomForest",
"BayesOpt.BO",
"BayesOpt.SearchSpace.OrdinalSpace",
"numpy.array",
"numpy.sum",
"BayesOpt.base.Solution",
"numpy.random.seed",
"BayesOpt.SearchSpace.NominalSpace"
] | [((230, 248), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (244, 248), True, 'import numpy as np\n'), ((745, 872), 'BayesOpt.base.Solution', 'Solution', (["[4.6827082694127835, 9.87885354178838, 5, 'A']"], {'var_name': "['r_0', 'r_1', 'i', 'd']", 'n_eval': '(1)', 'fitness': '(236.76575128)'}), "([4.6827082694127835, 9.87885354178838, 5, 'A'], var_name=['r_0',\n 'r_1', 'i', 'd'], n_eval=1, fitness=236.76575128)\n", (753, 872), False, 'from BayesOpt.base import Solution\n'), ((882, 1009), 'BayesOpt.base.Solution', 'Solution', (["[-8.99187067168115, 8.317469942991558, 5, 'D']"], {'var_name': "['r_0', 'r_1', 'i', 'd']", 'n_eval': '(1)', 'fitness': '(206.33644151)'}), "([-8.99187067168115, 8.317469942991558, 5, 'D'], var_name=['r_0',\n 'r_1', 'i', 'd'], n_eval=1, fitness=206.33644151)\n", (890, 1009), False, 'from BayesOpt.base import Solution\n'), ((1019, 1147), 'BayesOpt.base.Solution', 'Solution', (["[-2.50919762305275, 9.014286128198322, 12, 'G']"], {'var_name': "['r_0', 'r_1', 'i', 'd']", 'n_eval': '(1)', 'fitness': '(142.57378113)'}), "([-2.50919762305275, 9.014286128198322, 12, 'G'], var_name=['r_0',\n 'r_1', 'i', 'd'], n_eval=1, fitness=142.57378113)\n", (1027, 1147), False, 'from BayesOpt.base import Solution\n'), ((1157, 1282), 'BayesOpt.base.Solution', 'Solution', (["[4.639878836228101, 1.973169683940732, 9, 'G']"], {'var_name': "['r_0', 'r_1', 'i', 'd']", 'n_eval': '(1)', 'fitness': '(70.8740683)'}), "([4.639878836228101, 1.973169683940732, 9, 'G'], var_name=['r_0',\n 'r_1', 'i', 'd'], n_eval=1, fitness=70.8740683)\n", (1165, 1282), False, 'from BayesOpt.base import Solution\n'), ((481, 496), 'numpy.array', 'np.array', (['x[:2]'], {}), '(x[:2])\n', (489, 496), True, 'import numpy as np\n'), ((558, 573), 'numpy.array', 'np.array', (['x[:2]'], {}), '(x[:2])\n', (566, 573), True, 'import numpy as np\n'), ((674, 729), 'BayesOpt.SearchSpace.NominalSpace', 'NominalSpace', (["['OK', 'A', 'B', 'C', 'D', 'E', 'F', 'G']"], {}), "(['OK', 'A', 'B', 'C', 'D', 'E', 'F', 'G'])\n", (686, 729), False, 'from BayesOpt.SearchSpace import ContinuousSpace, OrdinalSpace, NominalSpace\n'), ((1304, 1337), 'BayesOpt.Surrogate.RandomForest', 'RandomForest', ([], {'levels': 'space.levels'}), '(levels=space.levels)\n', (1316, 1337), False, 'from BayesOpt.Surrogate import RandomForest\n'), ((1348, 1476), 'BayesOpt.BO', 'BO', (['space', 'obj_func', 'model'], {'minimize': '(True)', 'n_init_sample': '(3)', 'max_eval': '(50)', 'verbose': '(True)', 'optimizer': '"""MIES"""', 'warm_data': 'warm_data'}), "(space, obj_func, model, minimize=True, n_init_sample=3, max_eval=50,\n verbose=True, optimizer='MIES', warm_data=warm_data)\n", (1350, 1476), False, 'from BayesOpt import BO\n'), ((1555, 1588), 'BayesOpt.Surrogate.RandomForest', 'RandomForest', ([], {'levels': 'space.levels'}), '(levels=space.levels)\n', (1567, 1588), False, 'from BayesOpt.Surrogate import RandomForest\n'), ((1599, 1738), 'BayesOpt.BO', 'BO', (['space', 'obj_func', 'model'], {'minimize': '(True)', 'n_init_sample': '(3)', 'max_eval': '(50)', 'verbose': '(True)', 'optimizer': '"""MIES"""', 'warm_data': '"""test_warmdata.data"""'}), "(space, obj_func, model, minimize=True, n_init_sample=3, max_eval=50,\n verbose=True, optimizer='MIES', warm_data='test_warmdata.data')\n", (1601, 1738), False, 'from BayesOpt import BO\n'), ((287, 302), 'numpy.array', 'np.array', (['x[:2]'], {}), '(x[:2])\n', (295, 302), True, 'import numpy as np\n'), ((508, 524), 'numpy.sum', 'np.sum', (['(x_r ** 2)'], {}), '(x_r ** 2)\n', (514, 524), True, 'import numpy as np\n'), ((585, 596), 'numpy.sum', 'np.sum', (['x_r'], {}), '(x_r)\n', (591, 596), True, 'import numpy as np\n'), ((646, 667), 'BayesOpt.SearchSpace.OrdinalSpace', 'OrdinalSpace', (['[5, 15]'], {}), '([5, 15])\n', (658, 667), False, 'from BayesOpt.SearchSpace import ContinuousSpace, OrdinalSpace, NominalSpace\n'), ((612, 638), 'BayesOpt.SearchSpace.ContinuousSpace', 'ContinuousSpace', (['[-10, 10]'], {}), '([-10, 10])\n', (627, 638), False, 'from BayesOpt.SearchSpace import ContinuousSpace, OrdinalSpace, NominalSpace\n'), ((402, 418), 'numpy.array', 'np.array', (['[2, 2]'], {}), '([2, 2])\n', (410, 418), True, 'import numpy as np\n')] |
# Standard library imports
import datetime
import time
import json
import copy
# Third party imports
import traceback
import requests
from loguru import logger
import pymysql
# import requests
import paramiko
# Local application imports
from func.save_data import save_509_data
from utils import send_to_axxnr
from func.judge.judge import judge_data
system_flag = '5x9'
def get_loading_rate_data(host_name):
"""
获取加载率数据
@params:
host_name : 主机名称(必填参数) str
"""
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(hostname=host_name, port=22, username='root', password='<PASSWORD>')
cmd = "find /opt/recv/nfs/ -name 'loading_rate_*' | grep -v wrong|sort|awk 'END{print}'|xargs cat"
stdin, stdout, stderr = client.exec_command(cmd)
result = stdout.read().decode('utf-8')
if result:
result = json.loads(result)
ret_list = []
temp_time1 = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(result[0]['values'][0][0]))
temp_time2 = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(result[1]['values'][0][0]))
ret_list.append((result[0]['metric']['device'], result[0]['metric']['instance'], temp_time1,
result[0]['values'][0][1]))
ret_list.append((result[1]['metric']['device'], result[1]['metric']['instance'], temp_time2,
result[1]['values'][0][1]))
save_509_data.save_loading_rate_data(ret_list)
for data_list in ret_list:
judge_data(system_flag + '加载率-' + data_list[1], data_list[3])
def del_loading_rate_data(host_name):
"""
删除加载率数据
@params:
host_name : 主机名称(必填参数) str
"""
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(hostname=host_name, port=22, username='root', password='<PASSWORD>')
cmd = "find /opt/recv/nfs/ -name 'loading_rate_*' | xargs rm -rf"
stdin, stdout, stderr = client.exec_command(cmd)
def get_hive_db_data(host_name):
"""
获取hive db数据
@params:
host_name : 主机名称(必填参数) str
"""
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(hostname=host_name, port=22, username='root', password='<PASSWORD>')
cmd = "find /opt/recv/nfs/ -name '509hive_db_*' | grep -v wrong|sort|awk 'END{print}'|xargs cat"
stdin, stdout, stderr = client.exec_command(cmd)
result = stdout.read().decode('utf-8')
if result:
result = json.loads(result)
data1 = [tuple(i) for i in result["table_storage"]]
data2 = [tuple(i) for i in result["db_storage"]]
ret_list = data1 + data2
logger.debug(len(ret_list))
logger.debug(ret_list)
save_509_data.save_hive_db_data(ret_list)
def del_hive_db_data(host_name):
"""
删除hive_db数据
@params:
host_name : 主机名称(必填参数) str
"""
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(hostname=host_name, port=22, username='root', password='<PASSWORD>')
cmd = "find /opt/recv/nfs/ -name '509hive_db_*' | xargs rm -rf"
stdin, stdout, stderr = client.exec_command(cmd)
def get_hive_db_increment():
"""
获取hive_db增量
"""
# 当前时间
NOW_DATE_TIME = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# 当前时间减去2小时
HOUR_DATE_TIME = (datetime.datetime.now()+datetime.timedelta(minutes=-120)).strftime("%Y-%m-%d %H:%M:%S")
db = pymysql.connect(host='10.238.249.33', port=3306, user='root', password='<PASSWORD>',
database='tianjin', charset='utf8')
# db = pymysql.connect(host='172.27.1.12', port=3306, user='root', password='<PASSWORD>', database='tianjin',
# charset='utf8mb4')
cursor = db.cursor()
sql = """
SELECT
db_id,
data,
d_time
FROM
t_509_hive_db
WHERE
d_time > "{}" AND d_time <= "{}"
ORDER BY
d_time DESC;""".format(HOUR_DATE_TIME,NOW_DATE_TIME)
cursor.execute(sql)
rows = cursor.fetchall()
data_list = [list(row) for row in rows]
increment = []
new_list = copy.copy(data_list)
for item in new_list:
data_list.remove(item)
for item2 in data_list:
if item2[0] == item[0]:
temp = []
temp.append(item[0])
detla = item[1]-item2[1]
temp.append(detla)
temp.append(item[2])
new_list.remove(item2)
increment.append(temp)
result = [tuple(i) for i in increment]
cursor.close()
db.close()
logger.debug(len(result))
if result:
save_509_data.save_hive_db_increment(result)
def get_loading_rate_increment():
"""获取记载率数据增量"""
NOW_DATE_TIME = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# 当前时间减10分钟
MIN_DATE_TIME = (datetime.datetime.now()+datetime.timedelta(minutes=-10)).strftime("%Y-%m-%d %H:%M:%S")
db = pymysql.connect(host='10.238.249.33', port=3306, user='root', password='<PASSWORD>',
database='tianjin', charset='utf8mb4')
# db = pymysql.connect(host='172.27.1.12', port=3306, user='root', password='<PASSWORD>', database='tianjin',
# charset='utf8mb4')
cursor = db.cursor()
sql = """
SELECT
device,
ip_port,
d_time,
data
FROM
t_509_loading_rate
WHERE
d_time > '{}'
AND d_time <= '{}'
ORDER BY
d_time DESC;""".format(MIN_DATE_TIME, NOW_DATE_TIME)
cursor.execute(sql)
rows = cursor.fetchall()
data_list = [list(row) for row in rows]
increment = []
for i in range(int(len(data_list)/2)):
temp = data_list[i]
for j in data_list:
if j[0] == data_list[i][0] and j[1] == data_list[i][1] and j[2] != data_list[i][2]:
delta = data_list[i][3] - j[3]
temp[3] = delta
increment.append(temp)
result = [tuple(k) for k in increment]
cursor.close()
db.close()
logger.debug(len(result))
if result:
save_509_data.save_loading_rate_increment(result)
def get_row_flow():
"""获取原始流量"""
"""
[{"obps": 429941888.0, "discards": 0.0, "iMpps": 0.0, "ibps": 0.0, "ip": "10.148.255.7", "iUpps": 0.0, "epps": 0.0, "state": 1,
"oMpps": 0.0, "timestamp": "2020-12-04 09:23:44", "speed": 10000, "port": "xgei-0/1/0/3", "oUpps": 253642.7,
"desc": "To:TJ-V-QDJC-SK-SEV2-TGE1"}, ]
"""
# 查询接口
url = "http://northstar.pub:8080/northstar/port/?net=v&ip=10.148.255.7"
try:
res = requests.get(url, timeout=30)
if res.status_code == 200:
d = res.json()
else:
print("=========== get api code err ===========")
print(res)
return
except Exception as e:
print("============ 请求接口异常 ============")
traceback.print_exc()
else:
if not d:
print("=========== api data is empty ===========")
return
insert_data = [] # 入库数据
for data in d:
desc = data.get("desc", "")
if desc == "": # 从中获取运营商信息
continue
if "-LT-" in desc:
operators = "联通"
#operators = "LT"
elif "-YD-" in desc:
operators = "移动"
#operators = "YD"
elif "-DX-" in desc:
operators = "电信"
#operators = "DX"
else:
operators = "其他"
print("========= desc ==========")
print(desc)
ip = data.get("ip", "")
obps = data.get("obps", "")
ibps = data.get("ibps", "")
timestamp = data.get("timestamp", "")
if timestamp == "":
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
port = data.get("port", "")
insert_data.append((ip, obps, ibps, operators, desc, port, timestamp))
# print(insert_data)
# 入库
save_509_data.save_row_flow(insert_data)
| [
"time.localtime",
"json.loads",
"func.save_data.save_509_data.save_loading_rate_increment",
"loguru.logger.debug",
"paramiko.AutoAddPolicy",
"func.save_data.save_509_data.save_hive_db_increment",
"func.save_data.save_509_data.save_row_flow",
"time.strftime",
"pymysql.connect",
"requests.get",
"d... | [((520, 540), 'paramiko.SSHClient', 'paramiko.SSHClient', ([], {}), '()\n', (538, 540), False, 'import paramiko\n'), ((1771, 1791), 'paramiko.SSHClient', 'paramiko.SSHClient', ([], {}), '()\n', (1789, 1791), False, 'import paramiko\n'), ((2212, 2232), 'paramiko.SSHClient', 'paramiko.SSHClient', ([], {}), '()\n', (2230, 2232), False, 'import paramiko\n'), ((3054, 3074), 'paramiko.SSHClient', 'paramiko.SSHClient', ([], {}), '()\n', (3072, 3074), False, 'import paramiko\n'), ((3635, 3760), 'pymysql.connect', 'pymysql.connect', ([], {'host': '"""10.238.249.33"""', 'port': '(3306)', 'user': '"""root"""', 'password': '"""<PASSWORD>"""', 'database': '"""tianjin"""', 'charset': '"""utf8"""'}), "(host='10.238.249.33', port=3306, user='root', password=\n '<PASSWORD>', database='tianjin', charset='utf8')\n", (3650, 3760), False, 'import pymysql\n'), ((4362, 4382), 'copy.copy', 'copy.copy', (['data_list'], {}), '(data_list)\n', (4371, 4382), False, 'import copy\n'), ((5208, 5336), 'pymysql.connect', 'pymysql.connect', ([], {'host': '"""10.238.249.33"""', 'port': '(3306)', 'user': '"""root"""', 'password': '"""<PASSWORD>"""', 'database': '"""tianjin"""', 'charset': '"""utf8mb4"""'}), "(host='10.238.249.33', port=3306, user='root', password=\n '<PASSWORD>', database='tianjin', charset='utf8mb4')\n", (5223, 5336), False, 'import pymysql\n'), ((580, 604), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ([], {}), '()\n', (602, 604), False, 'import paramiko\n'), ((925, 943), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (935, 943), False, 'import json\n'), ((1470, 1516), 'func.save_data.save_509_data.save_loading_rate_data', 'save_509_data.save_loading_rate_data', (['ret_list'], {}), '(ret_list)\n', (1506, 1516), False, 'from func.save_data import save_509_data\n'), ((1831, 1855), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ([], {}), '()\n', (1853, 1855), False, 'import paramiko\n'), ((2272, 2296), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ([], {}), '()\n', (2294, 2296), False, 'import paramiko\n'), ((2615, 2633), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (2625, 2633), False, 'import json\n'), ((2828, 2850), 'loguru.logger.debug', 'logger.debug', (['ret_list'], {}), '(ret_list)\n', (2840, 2850), False, 'from loguru import logger\n'), ((2859, 2900), 'func.save_data.save_509_data.save_hive_db_data', 'save_509_data.save_hive_db_data', (['ret_list'], {}), '(ret_list)\n', (2890, 2900), False, 'from func.save_data import save_509_data\n'), ((3114, 3138), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ([], {}), '()\n', (3136, 3138), False, 'import paramiko\n'), ((4892, 4936), 'func.save_data.save_509_data.save_hive_db_increment', 'save_509_data.save_hive_db_increment', (['result'], {}), '(result)\n', (4928, 4936), False, 'from func.save_data import save_509_data\n'), ((6399, 6448), 'func.save_data.save_509_data.save_loading_rate_increment', 'save_509_data.save_loading_rate_increment', (['result'], {}), '(result)\n', (6440, 6448), False, 'from func.save_data import save_509_data\n'), ((6905, 6934), 'requests.get', 'requests.get', (['url'], {'timeout': '(30)'}), '(url, timeout=30)\n', (6917, 6934), False, 'import requests\n'), ((8358, 8398), 'func.save_data.save_509_data.save_row_flow', 'save_509_data.save_row_flow', (['insert_data'], {}), '(insert_data)\n', (8385, 8398), False, 'from func.save_data import save_509_data\n'), ((1022, 1063), 'time.localtime', 'time.localtime', (["result[0]['values'][0][0]"], {}), "(result[0]['values'][0][0])\n", (1036, 1063), False, 'import time\n'), ((1121, 1162), 'time.localtime', 'time.localtime', (["result[1]['values'][0][0]"], {}), "(result[1]['values'][0][0])\n", (1135, 1162), False, 'import time\n'), ((1564, 1625), 'func.judge.judge.judge_data', 'judge_data', (["(system_flag + '加载率-' + data_list[1])", 'data_list[3]'], {}), "(system_flag + '加载率-' + data_list[1], data_list[3])\n", (1574, 1625), False, 'from func.judge.judge import judge_data\n'), ((3446, 3469), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3467, 3469), False, 'import datetime\n'), ((5021, 5044), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5042, 5044), False, 'import datetime\n'), ((7201, 7222), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (7220, 7222), False, 'import traceback\n'), ((3538, 3561), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3559, 3561), False, 'import datetime\n'), ((3562, 3594), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(-120)'}), '(minutes=-120)\n', (3580, 3594), False, 'import datetime\n'), ((5112, 5135), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5133, 5135), False, 'import datetime\n'), ((5136, 5167), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(-10)'}), '(minutes=-10)\n', (5154, 5167), False, 'import datetime\n'), ((8148, 8182), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""'], {}), "('%Y-%m-%d %H:%M:%S')\n", (8161, 8182), False, 'import time\n')] |
'''
Author :: <NAME> -- https://github.com/ravisankar712
<NAME>' webpage :: http://www.red3d.com/cwr/boids/
'''
from manimlib.imports import *
import QuadTree as qt
###########################
## setup for simulations ##
###########################
#colors!!
BLUE_SHADES = ["#023E8A", "#0077B6", "#0096C7", "#00B4D8", "#48CAE4", "#90E0EF", "#ADE8F4"]
GRASSHOPPER_SHADES = ["#007F5F", "#2B9348", "#55A630", "#80B918", "#AACC00", "#BFD200", "#D4D700", "#DDDF00", "#EEEF20", "#FFFF3F"]
RED_SHADES = ["#e62c29", "#ff312e", "#cc2725", "#ff4643", "#ff5a58", "#ff6f6d", "#ff8382", "#ff9897", "#ffadab"]
GREEN_SHADES = ["#95c89a", "#7fbd85", "#6ab271", "#55a75d", "#3f9c48", "#2a9134", "#26832f", "#22742a"]
ORANGE_SHADES = ["#de7200", "#f77f00", "#f88c1a", "#f99933", "#f9a54d", "#fab266", "#fbbf80"]
PINK_SHADES = ["#f7a3b7", "#f591a9", "#f47e9a", "#f26c8c", "#f1597d", "#ef476f", "#d74064", "#bf3959"]
YELLOW_SHADES = ["#cc9202", "#e6a503", "#ffb703", "#ffbe1c", "#ffc535", "#ffcd4f", "#ffd468"]
#for introducing a bit of noise to the boids
def rotation_matrix(angle):
#rotation about the z axis by an angle
mat = np.array([[np.cos(angle), -np.sin(angle), 0.0], [np.sin(angle), np.cos(angle), 0.0], [0., 0., 1.0]])
return mat
class Boid(VGroup):
CONFIG = {
"size" : 0.1,
"boid_colors" : None,
"maxSpeed" : 2.0,
"maxForce" : 3.0,
#this is the basic perception radius
"friendzone" : 1.0,
#the rules of the game!
"alignment" : True,
"cohesion" : True,
"separation" : True,
"noise" : True,
#wrap around in the X or Y direcion.
"wrapX" : False,
"wrapY" : False
}
def __init__(self, **kwargs):
super().__init__(**kwargs)
#the boid don't need a position since VGroup already has a center. To get the position, just use get_center
self.velocity = (np.random.random(3) - 0.5)
self.velocity[2] = 0.0 #manim has vectors in 3D. If we let the third component nonzero, the norm will do crazy things!
self.noise_probability = 0.5
#setting to maxspeed if only noise is present. This will make the random walk look prettier!!
if not self.alignment and not self.cohesion and not self.separation:
self.set_maxSpeed()
self.alignment_radius = self.friendzone
self.cohesion_radius = self.friendzone
self.separation_radius = self.friendzone/2.0
#a list which keeps track of other boids withing the perception
self.friends = []
#a list keeping track of all the obstacles
self.obstacles = []
self.add_body()
#the updator for each boid.
#since boid ingerits VGroup, and boid animations are applied to the VGroup, the body automatically follows the group center.
self.add_updater(lambda m, dt : m.update_position(dt))
def add_body(self):
body = self.get_body()
body.set_height(self.size)
body.move_to(self.get_center())
if self.boid_colors is None:
#setting a random color
col = random_bright_color()
else:
col = random.choice(self.boid_colors)
body.set_color(color=col)
body.rotate(np.arctan2(self.velocity[1], self.velocity[0]))
self.add(body)
self.body = body
#body is a manim polygon Mobject.
#This is written as a separate function so that it is easy to inherit the boid class and rewrite just the get_body function to get a new avatar!
def get_body(self):
c = self.get_center()
v1 = c + RIGHT
v2 = c + (UL + LEFT)
v3 = c + (DL + LEFT)
return Polygon(v1, v2, v3, fill_opacity=1.0)
#getting other boids within the perception. Optimised using a QuadTree
def get_friends(self, qtree):
points = qtree.query_radius(self.get_center()[:-1], self.friendzone, [])
for p in points:
if p.payload != self:
self.friends.append(p.payload)
#for clearing the list of friends after each updation step
def unfriend(self):
self.friends = []
#add obstacles to a boid's vision.
def add_obstacles(self, obstacles):
for obs in obstacles:
self.obstacles.append(obs)
#function to set to max speed
def set_maxSpeed(self):
speed = np.linalg.norm(self.velocity)
if speed > 0.0:
self.velocity *= self.maxSpeed / speed
#incase repulsion from top and bottom boundaries is required(wrapy is false)
def get_vertical_fear(self):
d_top = FRAME_Y_RADIUS - self.get_center()[1]
d_bot = +FRAME_Y_RADIUS + self.get_center()[1]
#using MED_SMALL_BUFF = 0.25 as the fear radius
if 0.0 < d_top < MED_LARGE_BUFF:
return np.array([0., - 2. * self.maxForce/d_top, 0.])
elif 0.0 < d_bot < MED_LARGE_BUFF:
return np.array([0., 2. * self.maxForce/d_bot, 0.])
else: return 0.0
#incase repulsion from left and right boundaries is required(wrapy is false)
def get_horizontal_fear(self):
d_right = FRAME_X_RADIUS - self.get_center()[0]
d_left = +FRAME_X_RADIUS + self.get_center()[0]
#using MED_SMALL_BUFF = 0.25 as the fear radius
if 0.0 < d_right < MED_LARGE_BUFF:
return np.array([- 2. * self.maxForce/d_right, 0., 0.])
elif 0.0 < d_left < MED_LARGE_BUFF:
return np.array([2. * self.maxForce/d_left, 0., 0.])
else: return 0.0
#the end is the beginning!! #anything goes to the right comes from the left(and vice versa)!
def wrapx(self):
x, y = self.get_center()[:-1]
if x > FRAME_X_RADIUS:
self.move_to(np.array([-FRAME_X_RADIUS, y, 0.0]))
elif x < - FRAME_X_RADIUS:
self.move_to(np.array([FRAME_X_RADIUS, y, 0.0]))
#the end is the beginning!! anything goes to the top comes from the bottom(and vice versa)!
def wrapy(self):
x, y = self.get_center()[:-1]
if y > FRAME_Y_RADIUS:
self.move_to(np.array([x, -FRAME_Y_RADIUS, 0.0]))
elif y < - FRAME_Y_RADIUS:
self.move_to(np.array([x, FRAME_Y_RADIUS, 0.0]))
#<NAME>'s algorithm step-1 :: Alignment. Link to the webpage on top
def get_alignment(self):
steering = np.zeros(3)
desired_vel = np.zeros(3)
total = 0.
for friend in self.friends:
d = np.linalg.norm(self.get_center() - friend.get_center())
if d > 0.0 and d < self.alignment_radius:
desired_vel += friend.velocity/d
total += 1
if total > 0:
desired_vel /= total
desired_speed = np.linalg.norm(desired_vel)
if desired_speed > 0.0:
desired_vel = desired_vel/desired_speed * self.maxSpeed
steering = desired_vel - self.velocity
if np.linalg.norm(steering) > 0.0:
steering = steering / np.linalg.norm(steering) * self.maxForce
return steering
#<NAME>'s algorithm step-2 :: Cohesion. Link to the webpage on top
def get_cohesion(self):
steering = np.zeros(3)
center_of_mass = np.zeros(3)
total = 0.
for friend in self.friends:
d = np.linalg.norm(self.get_center() - friend.get_center())
if d > 0.0 and d < self.cohesion_radius:
center_of_mass += friend.get_center()
total += 1
if total > 0:
center_of_mass /= total
steering = center_of_mass - self.get_center()
if np.linalg.norm(steering) > 0.0:
steering = steering / np.linalg.norm(steering) * self.maxForce
return steering*0.5
#<NAME>'s algorithm step-3 :: Separation. Link to the webpage on top
def get_separation(self):
steering = np.zeros(3)
total = 0.
for friend in self.friends:
force = self.get_center() - friend.get_center()
d = np.linalg.norm(force)
if d > 0.0 and d < self.separation_radius:
steering += force/d
total += 1
if total > 0:
steering /= total
if np.linalg.norm(steering) > 0.0:
steering = steering / np.linalg.norm(steering) * self.maxForce
return steering
#repulsion towards obstacles
def get_obstacle_fear(self):
mypos = self.get_center()
steering = np.zeros(3)
for obstacle in self.obstacles:
d_top = np.linalg.norm(mypos - obstacle.get_top())
d_bottom = np.linalg.norm(mypos - obstacle.get_bottom())
d_right = np.linalg.norm(mypos - obstacle.get_right())
d_left = np.linalg.norm(mypos - obstacle.get_left())
fear_radius = self.friendzone
#repels to all four sides
if 0.0 < d_top < fear_radius:
steering += (mypos - obstacle.get_top())/d_top
if 0.0 < d_bottom < fear_radius:
steering += (mypos - obstacle.get_bottom())/d_bottom
if 0.0 < d_right < fear_radius:
steering += (mypos - obstacle.get_right())/d_right
if 0.0 < d_left < fear_radius:
steering += (mypos - obstacle.get_left())/d_left
if np.linalg.norm(steering) > 0.0:
steering = steering / np.linalg.norm(steering) * self.maxForce
return 2.0 * steering
def update_position(self, dt):
#adding up forces.
force = np.zeros(3)
if self.alignment:
force += self.get_alignment()
if self.cohesion:
force += self.get_cohesion()
if self.separation:
force += self.get_separation()
if not self.wrapX:
force += self.get_horizontal_fear()
if not self.wrapY:
force += self.get_vertical_fear()
force += self.get_obstacle_fear()
#starting heading of the boid
angle = np.arctan2(self.velocity[1], self.velocity[0])
#updating the velocity
self.velocity += force * dt
#limiting the speed to maxSpeed
speed = np.linalg.norm(self.velocity)
if speed > self.maxSpeed:
self.velocity *= self.maxSpeed / speed
#noise is random deviations in the direction of the velocity.
if self.noise and np.random.random() < self.noise_probability:
ang = (np.random.random() - 0.5) * 2 * PI/12.
rot = rotation_matrix(ang)
self.velocity = np.dot(rot, self.velocity)
#shifting the postion.
self.shift(self.velocity * dt)
#getting the angle to which the heading must be rotated
angle = np.arctan2(self.velocity[1], self.velocity[0]) - angle
self.rotate(angle)
#wrapping around if required
if self.wrapX:
self.wrapx()
if self.wrapY:
self.wrapy()
#getting rid of the friends
self.unfriend()
class DotBoid(Boid):
def get_body(self):
return Dot(fill_opacity=1.0)
class Obstacle(VGroup):
CONFIG = {
"type" : "circ",
"size" : 0.2,
"color" : RED
}
def __init__(self, x=None, y=None, **kwargs):
super().__init__(**kwargs)
if x is not None and y is not None:
self.pos = np.array([x, y, 0.])
else:
self.pos = self.get_center()
self.add_body()
def add_body(self):
if self.type == "circ":
body = Circle(radius=self.size, color=self.color, fill_opacity=1.0)
elif self.type == "rect":
body = Rectangle(width=self.size, height=self.size, color=self.color, fill_opacity=1.0)
body.move_to(self.pos)
self.add(body)
self.body = body
def get_size(self):
return self.size
class Flock(VGroup):
CONFIG = {
"num_boids" : 10,
"boid_type" : Boid,
"boid_config" : {
"size" : 0.1,
"boid_colors" : None,
"maxSpeed" : 3.0,
"maxForce" : 5.0,
"friendzone" : 1.0,
"alignment" : True,
"cohesion" : True,
"separation" : True,
"noise" : True,
"wrapX" : True,
"wrapY" : False
},
}
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.add_boids()
self.obstacles = VGroup()
self.add_updater(lambda m, dt: m.update_boids(dt))
def add_boids(self):
boids = VGroup()
#randomising boid's positions
for _ in range(self.num_boids):
x = random.uniform(-FRAME_X_RADIUS + MED_SMALL_BUFF, FRAME_X_RADIUS - MED_SMALL_BUFF)
y = random.uniform(-FRAME_Y_RADIUS + MED_SMALL_BUFF, FRAME_Y_RADIUS - MED_SMALL_BUFF)
boid = self.boid_type(**self.boid_config).move_to(np.array([x, y, 0]))
boids.add(boid)
#the flock object has a VGroup containing all boids, and each boid is a VGroup!!
self.boids = boids
self.add(self.boids)
def add_obstacles(self, obstacles):
self.obstacles = obstacles
self.add_to_back(obstacles)
for boid in self.boids:
boid.add_obstacles(self.obstacles)
def update_boids(self, dt):
#creates a new quadtree each step
self.create_quadtree(self.boids)
#each boid just need a friendlist, and then it will do its own thing!
for boid in self.boids:
boid.get_friends(self.qtree)
#making the quadtree
def create_quadtree(self, flock):
boundary = qt.Rect(0., 0., FRAME_WIDTH, FRAME_HEIGHT)
qtree = qt.QuadTree(boundary)
for boid in flock:
x, y = boid.get_center()[:-1]
p = qt.Point(x, y, payload=boid)
qtree.insert(p)
self.qtree = qtree
#general setup for every flocking scene. Inherit this scene and give new CONFIG to make required scenes
#used zoomed scene to show a zoomed in shot of flocking. Scene is enough otherwise
class GeneralFlockScene(ZoomedScene):
CONFIG = {
#the default value of random seed is zero. So if we do not change this, we will
#get the same scene over and over again for every run! We need this for the explanation scenes, but not here!
"random_seed" : None,
"runtime" : 10,
"obstacle_list" : [],
"num_boids" : 10,
"boid_colors" : BLUE_SHADES,
"boid_type" : Boid,
"boid_size" : 0.07,
"maxSpeed" : 3.0,
"maxForce" : 5.0,
"friendzone" : 0.9,
"alignment" : True,
"cohesion" : True,
"separation" : True,
"noise" : True,
"wrapX" : True,
"wrapY" : False
}
def setup(self):
self.flock_config = {
"num_boids" : self.num_boids,
"boid_type" : self.boid_type,
"boid_config" : {
"size" : self.boid_size,
"boid_colors" : self.boid_colors,
"maxSpeed" : self.maxSpeed,
"maxForce" : self.maxForce,
"friendzone" : self.friendzone,
"alignment" : self.alignment,
"cohesion" : self.cohesion,
"separation" : self.separation,
"noise" : self.noise,
"wrapX" : self.wrapX,
"wrapY" : self.wrapY
}
}
self.create_flock()
self.add_obstacles()
self.add(self.flock)
super().setup() #the zoomedscene has a setup which must be run!
def create_flock(self):
self.flock = Flock(**self.flock_config)
def add_obstacles(self):
self.flock.add_obstacles(VGroup(*self.obstacle_list))
def construct(self):
self.wait(self.runtime)
#################
# simulations #
#################
class Random_Walking_Boids(GeneralFlockScene):
CONFIG = {
"num_boids" : 100,
"boid_colors" : BLUE_SHADES,
"runtime" : 20,
"alignment" : False,
"cohesion" : False,
"separation" : False,
"noise" : True,
"wrapX" : False
}
class Only_Cohesion(GeneralFlockScene):
CONFIG = {
"num_boids" : 150,
"boid_colors" : YELLOW_SHADES,
"runtime" : 20,
"alignment" : False,
"noise" : False,
"separation" : False,
"cohesion" : True,
"wrapX" : False,
"friendzone" : 2.0
}
class Only_Separation(GeneralFlockScene):
CONFIG = {
"num_boids" : 150,
"boid_colors" : RED_SHADES,
"runtime" : 20,
"alignment" : False,
"noise" : False,
"separation" : True,
"cohesion" : False,
"wrapX" : False,
"friendzone" : 2.0
}
class Only_Alignment(GeneralFlockScene):
CONFIG = {
"num_boids" : 150,
"boid_colors" : GREEN_SHADES,
"runtime" : 20,
"alignment" : True,
"noise" : False,
"separation" : False,
"cohesion" : False,
"wrapX" : False,
"friendzone" : 1.0
}
class Cohesion_and_Separation(GeneralFlockScene):
CONFIG = {
"num_boids" : 150,
"boid_colors" : PINK_SHADES,
"runtime" : 20,
"alignment" : False,
"noise" : False,
"separation" : True,
"cohesion" : True,
"wrapX" : False,
"friendzone" : 1.0
}
class Cohesion_and_Alignment(GeneralFlockScene):
CONFIG = {
"num_boids" : 10,
"boid_colors" : ORANGE_SHADES,
"runtime" : 20,
"alignment" : True,
"noise" : False,
"separation" : False,
"cohesion" : True,
"wrapX" : False,
"friendzone" : 1.0
}
class Separation_and_Alignment(GeneralFlockScene):
CONFIG = {
"num_boids" : 10,
"boid_colors" : BLUE_SHADES,
"runtime" : 20,
"alignment" : True,
"noise" : False,
"separation" : True,
"cohesion" : False,
"wrapX" : False,
"friendzone" : 1.0
}
obs_size = 0.7
xs = [obs_size * i for i in range(-3, 4, 1)]
obstacle_list = [Obstacle(x, 0, type='rect', size=obs_size, color=BLACK) for x in xs]
class For_End_Credits_1(GeneralFlockScene):
CONFIG = {
"num_boids" : 300,
"obstacle_list" : obstacle_list,
"runtime" : 30,
"alignment" : True,
"noise" : True,
"separation" : True,
"cohesion" : True,
"wrapX" : True,
"boid_colors" : GRASSHOPPER_SHADES
}
class For_End_Credits_2(GeneralFlockScene):
CONFIG = {
"num_boids" : 300,
"obstacle_list" : obstacle_list,
"runtime" : 30,
"alignment" : True,
"noise" : True,
"separation" : True,
"cohesion" : True,
"wrapX" : False,
"boid_colors" : GRASSHOPPER_SHADES
}
class Murmuration_1(GeneralFlockScene):
CONFIG = {
"num_boids" : 150,
"runtime" : 30,
"wrapX" : False,
"boid_colors" : GRASSHOPPER_SHADES,
}
class Murmuration_2(GeneralFlockScene):
CONFIG = {
"num_boids" : 150,
"runtime" : 30,
"wrapX" : True,
"boid_colors" : GRASSHOPPER_SHADES,
}
class Murmuration_3(GeneralFlockScene):
CONFIG = {
"num_boids" : 200,
"runtime" : 30,
"wrapX" : False,
"boid_colors" : GRASSHOPPER_SHADES,
}
class Murmuration_4(GeneralFlockScene):
CONFIG = {
"num_boids" : 200,
"runtime" : 30,
"wrapX" : True,
"boid_colors" : GRASSHOPPER_SHADES,
}
# class Murmuration_With_Zoom(GeneralFlockScene):
# CONFIG = {
# "runtime" : 20,
# "num_boids" : 100,
# "wrapX" : False
# }
# def construct(self):
# self.camera_frame.save_state()
# random_boid = self.flock.boids[0]
# # self.camera_frame.move_to(random_boid)
# self.camera_frame.add_updater(lambda m: m.move_to(random_boid))
# self.add(self.camera_frame)
# self.camera_frame.set_width(self.friendzone * 5)
# self.wait(8)
# self.camera_frame.clear_updaters()
# self.play(
# self.camera_frame.restore
# )
# self.wait(self.runtime)
############################################
# Setup for Non-simulation animations #
############################################
#this is a dummy boid object for non simualation animation.
# The methods are gimmicks for making the animating process easier!!
class DummyBoid(VGroup):
CONFIG = {
"color" : "#2095f2",
"size" : 0.2,
"perception" : 1.4
}
def __init__(self, x, y, **kwargs):
super().__init__(**kwargs)
self.velocity = (np.random.random(3) - 0.5) * 2.0
self.velocity[2] = 0.0
self.add_body(x, y)
def add_body(self, x, y):
body = self.get_body()
body.move_to(np.array([x, y, 0.0]))
body.set_height(self.size)
angle = np.arctan2(self.velocity[1], self.velocity[0])
body.rotate(angle)
self.add(body)
self.body = body
self.friends = []
def get_body(self):
c = self.get_center()
v1 = c + RIGHT
v2 = c + (UL + LEFT)
v3 = c + (DL + LEFT)
return Polygon(v1, v2, v3, color=self.color, fill_opacity=1.0)
def change_velocity(self, vel):
angle = np.arctan2(self.velocity[1], self.velocity[0])
self.velocity = np.array(vel)
angle = np.arctan2(self.velocity[1], self.velocity[0]) - angle
self.rotate(angle)
def get_friends(self, flock):
for b in flock:
d = np.linalg.norm(self.get_center() - b.get_center())
if d < self.perception:
self.friends.append(b)
class Boid_Rules_Template(ZoomedScene):
#here, we are not changing the random seed! so we'll get the same configuration of boids for all teaching scenes!
#create a flock of dummy boids
def create_dummy_flock(self):
flock = VGroup()
for _ in range(40):
x = np.random.random() * (FRAME_WIDTH) - FRAME_X_RADIUS - MED_SMALL_BUFF
y = np.random.random() * (FRAME_HEIGHT) - FRAME_Y_RADIUS - MED_SMALL_BUFF
b = DummyBoid(x, y, color="#2095f2")
flock.add(b)
#maybe inefficient. Basically making sure we have a region with 4 boids
b = DummyBoid(5.0, -0.2, color="#2095f2")
flock.add(b)
#chosen_one is the boid on which the explanations are performed
self.chosen_one = b
b2 = DummyBoid(5.6, 0.4, color="#2095f2")
b2.change_velocity([0.4, 0.8, 0.0])
flock.add(b2)
b3 = DummyBoid(4.2, -0.8, color="#2095f2")
b3.change_velocity([-0.4, 1.2, 0.0])
flock.add(b3)
self.flock = flock
#general introduction scene
def introduce_scene(self, animate=True):
self.create_dummy_flock()
r = self.chosen_one.perception
self.perception_circ = Circle(radius=r, fill_opacity=0.1).move_to(self.chosen_one).set_color(GREY)
self.chosen_one.get_friends(self.flock)
not_friends = []
for bd in self.flock:
if bd not in self.chosen_one.friends and bd != self.chosen_one:
not_friends.append(bd)
if animate:
self.play(AnimationGroup(*[GrowFromCenter(b) for b in self.flock], lag_ratio=0.0))
self.wait(0.5)
self.play(ApplyMethod(self.chosen_one.set_color, "#e81d62"))
self.play(GrowFromCenter(self.perception_circ))
self.wait()
self.play(
self.camera_frame.set_width, self.perception_circ.get_width()*2.0,
self.camera_frame.move_to, self.chosen_one,
AnimationGroup(*[FadeOut(boid) for boid in not_friends])
)
self.flock.remove(*not_friends)
self.wait()
else:
self.add(self.flock)
self.chosen_one.set_color("#e81d62")
self.camera_frame.set_width(self.perception_circ.get_width()*2.0)
self.camera_frame.move_to(self.chosen_one)
self.flock.remove(*not_friends)
self.add(self.perception_circ)
#another common method used in all teaching scenes
def fade_friends(self, animate=True):
if animate:
animations = []
for bd in self.chosen_one.friends:
if bd != self.chosen_one:
animations.append(FadeOut(bd))
return (AnimationGroup(*animations, lag_ratio=0.0))
else:
for bd in self.chosen_one.friends:
if bd != self.chosen_one:
self.remove(bd)
return None
#Just in case if we need the neighbors back!
def bring_back_friends(self, animate=True):
if animate:
animations = []
for bd in self.chosen_one.friends:
if bd != self.chosen_one:
animations.append(FadeIn(bd))
return (AnimationGroup(*animations, lag_ratio=0.0))
else:
for bd in self.chosen_one.friends:
if bd != self.chosen_one:
self.add(bd)
return None
#general method to show the vector addition animation. returns the sum vector
#zoom is just a fudge that we manually have to put in so that the summed arrow won't be outside the canvas
def vector_addition(self, vectors, arrows, offset, zoom=2.0):
animations = []
s = vectors[0] + offset
for i in range(1, len(arrows)):
animations.append(ApplyMethod(arrows[i].move_to, s + vectors[i]/2.0))
s += vectors[i]
self.camera_frame.save_state()
self.play(
AnimationGroup(*animations, lag_ratio=0.2),
self.camera_frame.set_width,
self.perception_circ.get_width()*zoom
)
self.wait()
return s
#the scaling of force and all that is shown here. returns the final force vector
def teach_force(self, total_force, force_arrow, arrows, offset):
self.play(
AnimationGroup(
##Fade Friends
self.fade_friends(),
FadeOut(self.perception_circ),
*[ApplyMethod(arr.set_opacity, 0.2) for arr in arrows]),
GrowArrow(force_arrow)
)
force_arrow.generate_target()
start = offset
end = offset + (total_force-offset)/(len(self.chosen_one.friends)-1)
force_arrow.target.put_start_and_end_on(start=start, end=end).add_tip(tip_length=0.1)
copy = force_arrow.copy().set_opacity(0.2)
self.add(copy)
self.wait(2)
self.play(
MoveToTarget(force_arrow),
self.camera_frame.restore
)
self.wait(0.5)
self.play(FadeOut(arrows), FadeOut(copy))
return (total_force-offset)/(len(self.chosen_one.friends)-1)
#shows the boid aligning to the desired velocity
def align_to_desired(self, force, tot_force_arrow, offset, col1=RED, col2=TEAL):
b = self.chosen_one
b_arrow = Line(offset, offset+b.velocity).add_tip(tip_length=0.1).set_color(col1)
self.play(
GrowArrow(b_arrow)
)
desired_v = b.velocity + force
desired_arrow = Line(offset, offset + desired_v).add_tip(tip_length=0.1).set_color(col2)
self.play(
ApplyMethod(tot_force_arrow.move_to, offset + b.velocity + force/2.0),
)
self.play(
GrowArrow(desired_arrow)
)
self.wait()
self.play(
self.bring_back_friends(),
FadeIn(self.perception_circ)
)
self.wait()
angle = np.arctan2(desired_v[1], desired_v[0]) - np.arctan2(b.velocity[1], b.velocity[0])
self.play(
ApplyMethod(b.rotate, angle),
ReplacementTransform(b_arrow, desired_arrow),
FadeOut(tot_force_arrow),
run_time=1.5
)
##################################
# Non-Simulation Animations #
##################################
class WhatIsPosition(VectorScene):
CONFIG = {
"number_plane_config" :
{
"background_line_style": {
"stroke_color": BLUE_D,
"stroke_width": 2,
"stroke_opacity": 0.3,
}
}}
def construct(self):
boid = DummyBoid(2, 1, size=0.3).set_color("#e81d62")
self.play(GrowFromCenter(boid))
self.wait()
self.add_plane(animate=True, run_time=3.0)
self.bring_to_front(boid)
o = Circle(color=BLUE, fill_opacity=1.0, radius=0.1)
self.play(GrowFromCenter(o))
x, y = boid.get_center()[:-1]
xpos = Line(ORIGIN, x * RIGHT, color=GREEN, stroke_width=8.0)
# xbrace = Brace(xpos, DOWN)
xloc_text = TexMobject("x").next_to(xpos, DOWN).set_color(GREEN)
ypos = Line(x * RIGHT, x * RIGHT + y * UP, color=ORANGE, stroke_width=8.0)
# ybrace = Brace(ypos, RIGHT)
yloc_text = TexMobject("y").next_to(ypos, RIGHT).set_color(ORANGE)
Mxy = Matrix([["x"], ["y"]])
Mxy[0][0].set_color(GREEN)
Mxy[0][1].set_color(ORANGE)
self.wait()
self.play(ShowCreation(xpos), ShowCreation(ypos))
self.play(Write(xloc_text), Write(yloc_text))
self.bring_to_front(boid)
self.wait()
#this is another way to flash the origin point, using a fading circle
# o_circ = Circle(color=YELLOW, radius=o.get_width()/2.0, fill_opacity=0.0)
# def surround_effect(mob, dt):
# op = mob.get_stroke_opacity()
# if op > 0.0:
# mob.set_stroke(opacity=op - 0.09)
# mob.set_width(mob.get_width() + 0.15)
# else:
# self.remove(mob)
# o_circ.add_updater(surround_effect)
# self.add(o_circ)
self.play(Flash(ORIGIN))
self.wait(2)
pos_vector = Line(ORIGIN, boid.get_center()).add_tip(tip_length=0.2).set_color(YELLOW)
Mxy.next_to(boid, UR)
getting_rid = VGroup(xloc_text.copy(), yloc_text.copy())
self.play(
ReplacementTransform(getting_rid, Mxy)
)
x, y = boid.get_center()[:-1]
x_val = DecimalNumber(x, num_decimal_places=1, include_sign=True).next_to(xpos, DOWN).set_color(GREEN)
y_val = DecimalNumber(y, num_decimal_places=1, include_sign=True).next_to(ypos, RIGHT).set_color(ORANGE)
self.play(
ReplacementTransform(xloc_text, x_val),
ReplacementTransform(yloc_text, y_val)
)
M = DecimalMatrix([[x], [y]],element_to_mobject_config= {"include_sign":True})
M[0][0].set_color(GREEN)
M[0][1].set_color(ORANGE)
M.move_to(Mxy)
# axes = self.add_axes()
# self.play(
# ReplacementTransform(self.plane, axes)
# )
self.wait(0.5)
self.play(ReplacementTransform(Mxy, M))
def M_updater(mat):
x, y = boid.get_center()[:-1]
mat[0][0].set_value(x)
mat[0][1].set_value(y)
M.add_updater(M_updater)
self.add(M)
def x_val_updater(mob):
x, y = boid.get_center()[:-1]
mob.set_value(x)
mob.next_to(xpos, -np.sign(y)*UP)
def y_val_updater(mob):
x, y = boid.get_center()[:-1]
mob.set_value(y)
mob.next_to(ypos, -np.sign(x)*LEFT)
def xpos_updater(mob):
x = boid.get_center()[0]
mob.put_start_and_end_on(ORIGIN, np.array([x, 0, 0]))
def ypos_updater(mob):
x, y = boid.get_center()[:-1]
start = ORIGIN + np.array([x, 0, 0])
end = start + np.array([0, y, 0])
mob.put_start_and_end_on(start, end)
x_val.add_updater(x_val_updater)
y_val.add_updater(y_val_updater)
xpos.add_updater(xpos_updater)
ypos.add_updater(ypos_updater)
self.add(x_val, y_val, xpos, ypos)
self.bring_to_front(boid)
vec_text = TexMobject("\\va{r}", "=")
self.play(
ApplyMethod(M.shift, RIGHT*2)
)
vec_text.next_to(M, LEFT)
self.play(
Write(vec_text)
)
self.wait(1.5)
# self.add_vector(pos_vector, run_time=1.5)
self.play(
GrowArrow(pos_vector)
)
self.bring_to_front(boid)
self.wait(2)
pos_vector.add_updater(lambda m: m.put_start_and_end_on(ORIGIN + 0.00001, boid.get_center()))
self.play(ApplyMethod(boid.shift, RIGHT))
self.wait(1.5)
self.play(ApplyMethod(boid.shift, DOWN * 2.5))
self.wait(1.5)
self.play(ApplyMethod(boid.shift, LEFT * 5.5))
self.wait(1.5)
self.play(ApplyMethod(boid.shift, UP * 2.5))
self.wait()
class VectorAddition(VectorScene):
CONFIG = {
"number_plane_config" :
{
"background_line_style": {
"stroke_color": BLUE_D,
"stroke_width": 2,
"stroke_opacity": 0.3,
}
}}
def construct(self):
self.add_plane(animate=True, run_time=3.5)
vector_a = np.array([1.5, 1.0, 0.0])
vector_b = np.array([2.0, - 0.5, 0.0])
arrow_a = self.get_vector(vector_a, color=RED)
arrow_b = self.get_vector(vector_b, color=GREEN)
label_a = TexMobject("\\va{a}").set_color(RED)
label_b = TexMobject("\\va{b}").set_color(GREEN)
# self.add(arrow_a, arrow_b)
self.play(GrowArrow(arrow_a))
self.play(GrowArrow(arrow_b))
ball = Circle(radius=0.4, fill_opacity=1.0)
ball.set_color([YELLOW, WHITE])
ball.set_sheen_direction(UL)
self.wait()
self.play(FadeIn(ball))
#wiggly effect
# self.play(ApplyMethod(arrow_a.rotate, PI/12, rate_func=wiggle))
# self.wait(0.5)
# self.play(ApplyMethod(arrow_b.rotate, -PI/12, rate_func=wiggle))
# self.wait(0.5)
self.play(WiggleOutThenIn(arrow_a))
self.play(WiggleOutThenIn(arrow_b))
self.wait(0.5)
vector_ab = vector_a + vector_b
arrow_ab = self.get_vector(vector_ab)
arrow_ab[0].set_color([RED, GREEN])
arrow_ab[1].set_color(GREEN)
arrow_ab.set_sheen_direction(RIGHT)
label_ab = TexMobject("\\va{a}"," +", " \\va{b}")
label_ab[0].set_color(RED)
label_ab[2].set_color(GREEN)
label_ab.set_sheen_direction(RIGHT)
self.bring_to_back(arrow_ab)
# self.add(arrow_ab)
self.play(FadeInFrom(arrow_ab, -vector_ab))
ball.add_updater(lambda m, dt : m.move_to(m.get_center() + 2.4 * vector_ab * dt))
self.add(ball)
self.wait()
self.play(FadeOut(arrow_ab))
ball.clear_updaters()
self.remove(ball)
self.wait()
arrow_a.save_state()
self.play(ApplyMethod(arrow_a.move_to, vector_b + vector_a/2.0))
self.play(GrowArrow(arrow_ab))
label_a.move_to(arrow_a.get_center() + DOWN*0.5).scale(0.8)
label_b.move_to(arrow_b.get_center() + DOWN*0.5).scale(0.8)
label_ab.move_to(arrow_ab.get_center() + UP*0.5).scale(0.8)
self.play(Write(label_a), Write(label_b), Write(label_ab))
self.wait()
self.play(FadeOut(label_a), FadeOut(label_b), FadeOut(label_ab))
self.wait(0.5)
self.play(ApplyMethod(arrow_ab.set_opacity, 0.2))
self.play(ApplyMethod(arrow_a.restore))
self.play(ApplyMethod(arrow_b.move_to, vector_a + vector_b/2.0))
self.play(ApplyMethod(arrow_ab.set_opacity, 1.0))
label_a.move_to(arrow_a.get_center() + 0.25*UL)
label_b.move_to(arrow_b.get_center() + 0.5*UP)
label_ab.move_to(arrow_ab.get_center() + 0.5*DOWN)
self.play(Write(label_a), Write(label_b), Write(label_ab))
self.wait()
class Adding_N_Vectors(VectorScene):
CONFIG = {
"number_plane_config" :
{
"background_line_style": {
"stroke_color": BLUE_D,
"stroke_width": 2,
"stroke_opacity": 0.3,
}
}}
def construct(self):
self.add_plane()
vectors = [np.array([1.5, -1.1, 0]),
np.array([0.8, 1.7, 0]),
np.array([-0.9, 1.9, 0]),
np.array([-0.3, -1.2, 0]),
np.array([-2.1, 0.2, 0.0])]
arrows = VGroup()
for vec in vectors:
arr = self.get_vector(vec)
# hue = random.randint(0, 255)
col = interpolate_color(GREEN, RED, arr.get_length()/2.1)
arr.set_color(col)
arrows.add(arr)
self.play(AnimationGroup(*[GrowArrow(arr) for arr in arrows], lag_ratio=0.1))
self.wait()
animations = []
s = vectors[0]
for i in range(1, len(arrows)):
animations.append(ApplyMethod(arrows[i].move_to, s + vectors[i]/2.0))
s += vectors[i]
self.play(AnimationGroup(*animations, lag_ratio=0.2))
self.wait(0.5)
s_arrow = self.get_vector(s)
s_arrow.set_color(YELLOW)
self.play(AnimationGroup(*[ApplyMethod(arr.set_opacity, 0.5) for arr in arrows]))
self.add_vector(s_arrow)
self.wait()
class Scaling_Vector(VectorScene):
CONFIG = {
"number_plane_config" :
{
"background_line_style": {
"stroke_color": BLUE_D,
"stroke_width": 2,
"stroke_opacity": 0.3,
}
}}
def construct(self):
self.add_plane()
vector = np.array([2.0, 1.5, 0.0])
length = np.linalg.norm(vector)
arrow = self.add_vector(vector, color=RED)
label = TexMobject("1.0 \\times \\va{a}").move_to(arrow.get_center() + DR*0.5)
self.play(Write(label))
self.wait()
#I admit! Should make it more general!
def label_updater(mob):
mob.generate_target()
c = arrow.get_center()
if c[0] < 0.0 and c[1] < 0.0:
mult = "-" + str(round(arrow.get_length() / length * 100) / 100)
else:
mult = str(round(arrow.get_length() / length * 100) / 100)
mob.target = TexMobject( mult + "\\times \\va{a}").move_to(arrow.get_center() + DR*0.5)
mob.become(mob.target)
label.add_updater(label_updater)
self.add(label)
arrow_copy = arrow.copy()
self.play(
ApplyMethod(arrow.put_start_and_end_on, ORIGIN + 0.00001, 2*vector),
run_time=2.0
)
self.wait()
self.play(FadeIn(arrow_copy))
self.play(
ApplyMethod(arrow_copy.shift, UL/2)
)
self.wait()
self.play(
ApplyMethod(arrow.put_start_and_end_on, ORIGIN + 0.00001, 0.5*vector),
run_time=2.0
)
self.wait()
self.play(
ApplyMethod(arrow.put_start_and_end_on, ORIGIN + 0.00001, -1.0*vector),
run_time=2.0
)
self.wait()
class WhatIsCohesion(Boid_Rules_Template):
def construct(self):
#intro
self.introduce_scene()
#aliasing
b = self.chosen_one
c = b.get_center()
#getting relative positions of other boids and corresponding arrows for animation.
vectors = []
arrows = VGroup()
for bd in b.friends:
if bd != b:
vec = bd.get_center() - c
vectors.append(vec)
arrows.add(Line(c, bd.get_center()).add_tip(tip_length=0.1).set_color("#ef6191"))
#animate those arrows
self.play(AnimationGroup(*[GrowArrow(arr) for arr in arrows], lag_ratio=0.1))
# self.bring_to_front(b)
self.wait(2)
s = self.vector_addition(vectors, arrows, c)
s_arrow = Line(c, s).add_tip(tip_length=0.1).set_color("#90be6d")
force = self.teach_force(s, s_arrow, arrows, c)
self.wait()
s_arrow_copy = s_arrow.copy()
self.add(s_arrow_copy)
text = TextMobject(" =", " Cohesion Force").scale(0.5)
text[1].set_color("#90be6d")
self.play(ApplyMethod(s_arrow.shift, UL))
text.next_to(s_arrow, RIGHT)
self.play(Write(text))
self.wait()
self.play(
Uncreate(text),
Uncreate(s_arrow),
)
self.wait()
self.align_to_desired(force, s_arrow_copy, c, col1="#f8b5cb" , col2="#048ba8")
self.wait()
class WhatIsSeparation(Boid_Rules_Template):
def construct(self):
#intro
self.introduce_scene()
self.wait()
#aliasing
b = self.chosen_one
c = b.get_center()
#getting realtive postions and corresponding arrows
vectors = []
arrows = VGroup()
for bd in b.friends:
if bd != b:
vec = bd.get_center() - c
vectors.append(vec)
arrows.add(Line(c, bd.get_center()).add_tip(tip_length=0.1).set_color("#ef6191"))
#steer_vec is to store all repulsive steering forces
steer_vec = []
#explaining the repulsion force for the first arrow
arr = arrows[0].copy()
self.play(GrowArrow(arr))
self.wait(2)
d = arr.get_length()
steering = vectors[0]/d**2
steer_arr = Line(c, c + steering).add_tip(tip_length=0.1).set_color("#ef6191")
self.play(Transform(arr, steer_arr))
self.wait(2)
steering *= -1
steer_arr = Line(c, c + steering).add_tip(tip_length=0.1).set_color("#ba174e")
self.play(Transform(arr, steer_arr))
self.wait(2)
self.play(FadeOut(arr))
b.save_state()
self.play(
AnimationGroup(*[FadeOut(bd) for bd in b.friends[1:] if bd != b], lag_ratio=0.0)
)
self.wait()
self.why_divide_by_d()
self.why_divide_by_d(UL + LEFT*0.2)
self.play(
b.restore
)
self.play(
AnimationGroup(*[FadeIn(bd) for bd in b.friends[1:] if bd != b ], lag_ratio=0.0)
)
#repeating the above procedure for all arrows, but quickly
for i in range(0, len(vectors)):
arr = arrows[i]
self.play(GrowArrow(arr), run_time=0.4)
d = arr.get_length()
steering = -vectors[i]/d**2
steer_arr = Line(c, c + steering).add_tip(tip_length=0.1).set_color("#ba174e")
self.play(Transform(arr, steer_arr), run_time=0.4)
steer_vec.append(steering)
arrows[i].become(steer_arr)
self.wait()
s = self.vector_addition(steer_vec, arrows, c)
s_arrow = Line(c, s).add_tip(tip_length=0.1).set_color("#f8961e")
force = self.teach_force(s, s_arrow, arrows, c)
self.wait()
s_arrow_copy = s_arrow.copy()
self.add(s_arrow_copy)
text = TextMobject(" =", " Separation Force").scale(0.5)
text[1].set_color("#f8961e")
self.play(ApplyMethod(s_arrow.shift, UL))
text.next_to(s_arrow, RIGHT)
self.play(Write(text))
self.wait()
self.play(
Uncreate(text),
Uncreate(s_arrow),
)
self.wait()
self.align_to_desired(force, s_arrow_copy, c, col1="#f8b5cb" , col2="#048ba8")
self.wait()
#this is used twice, to show the effect of dividing by d
def why_divide_by_d(self, new_pos=DR*0.5):
b = self.chosen_one
b1 = b.friends[0]
self.play(ApplyMethod(b.shift, new_pos))
displacement = b1.get_center() - b.get_center()
temp_arr = Line(b.get_center(), b.get_center() + displacement).add_tip(tip_length=0.1).set_color("#ef6191")
self.wait()
self.play(GrowArrow(temp_arr))
self.wait()
direction_for_brace = np.dot(rotation_matrix(PI/2), displacement)
brace = Brace(temp_arr, direction_for_brace)
d = np.linalg.norm(displacement)
dist_text = TexMobject("d = ", str(round(d*100)/100.)).move_to(brace.get_center() + 0.5*direction_for_brace).scale(0.4)
angle = np.arctan2(displacement[1], displacement[0]) + PI
dist_text.rotate(angle)
self.play(
GrowFromCenter(brace),
Write(dist_text)
)
self.wait(3)
self.play(
Uncreate(dist_text),
ShrinkToCenter(brace)
)
temp_arr.generate_target()
temp_arr.target = Line(b.get_center(), b.get_center() + displacement/d**2).add_tip(tip_length=0.1).set_color("#ef6191")
self.wait()
self.play(MoveToTarget(temp_arr))
temp_arr.generate_target()
temp_arr.target = Line(b.get_center(), b.get_center() - displacement/d**2).add_tip(tip_length=0.1).set_color("#ba174e")
self.wait()
self.play(MoveToTarget(temp_arr))
self.wait(2)
self.play(FadeOut(temp_arr))
class WhatIsAlignment(Boid_Rules_Template):
def construct(self):
#intro
self.introduce_scene(animate=False)
#aliasing
b = self.chosen_one
c = b.get_center()
self.wait()
#getting velocities of neighbors
vel_vecs = []
vel_arrows = VGroup()
#velocity of the chosen boid
b_arrow = Line(c, c+b.velocity).add_tip(tip_length=0.1).set_color("#f8b5cb")
for bd in b.friends:
if bd != b:
vel = bd.velocity/2
vel_vecs.append(vel)
cen = bd.get_center()
arr = Line(cen, cen+vel).add_tip(tip_length=0.1).set_color("#b2dafb")
vel_arrows.add(arr)
self.play(GrowArrow(b_arrow))
self.wait()
self.play(
FadeOut(b_arrow),
AnimationGroup(*[GrowArrow(arr) for arr in vel_arrows])
)
#move the first velocity arrow to the chosen one's center
self.wait()
self.play(ApplyMethod(vel_arrows[0].move_to, c + vel_vecs[0]/2))
s = self.vector_addition(vel_vecs, vel_arrows, c, zoom=2.5)
s_arrow = Line(c, s).add_tip(tip_length=0.1).set_color(GOLD)
force = self.teach_force(s, s_arrow, vel_arrows, c)
self.wait(2)
self.play(FadeIn(b_arrow))
self.wait()
#explain what is the alignment force
steering = force - b.velocity
steer_arrow = Line(c + b.velocity, c + b.velocity + steering).add_tip(tip_length=0.1).set_color("#8338ec")
self.play(GrowArrow(steer_arrow))
self.wait()
self.play(
FadeOut(b_arrow),
FadeOut(s_arrow),
ApplyMethod(steer_arrow.move_to, c+steering/2.0)
)
self.wait()
steer_arrow_copy = steer_arrow.copy()
self.add(steer_arrow_copy)
text = TextMobject(" =", " Alignment Force").scale(0.5)
text[1].set_color("#8338ec")
self.play(ApplyMethod(steer_arrow.shift, UL))
text.next_to(steer_arrow, RIGHT)
self.play(Write(text))
self.wait()
self.play(
Uncreate(text),
Uncreate(steer_arrow),
)
self.wait()
self.align_to_desired(steering, steer_arrow_copy, c, col1="#f8b5cb" , col2="#048ba8")
self.wait()
class TotalForces(Boid_Rules_Template):
def construct(self):
self.introduce_scene(False)
b = self.chosen_one
self.play(
ApplyMethod(self.flock.shift, RIGHT*1.2),
ApplyMethod(self.perception_circ.shift, RIGHT*1.2)
)
c = b.get_center()
c_force = 0.0
s_force = 0.0
a_force = 0.0
l = len(b.friends) - 1
for bd in b.friends:
if bd != b:
a_force += bd.velocity/2
a_force /= l
a_force -= b.velocity
for bd in b.friends:
if bd != b:
c_force += bd.get_center() - c
c_force /= l
for bd in b.friends:
if bd != b:
f = bd.get_center() - c
s_force += -f/np.linalg.norm(f)**2
s_force /= l
left = self.camera_frame.get_left() + np.array([MED_LARGE_BUFF, 0.0, 0.0])
top = UP/2.0
#confusing!! TODO: refactor, if i care at all!!
c_arrow = Line(c, c + c_force).add_tip(tip_length=0.1).set_color("#90be6d")
s_arrow = Line(c, c + s_force).add_tip(tip_length=0.1).set_color("#f8961e")
a_arrow = Line(c, c + a_force).add_tip(tip_length=0.1).set_color("#8338ec")
c_text = TextMobject("Cohesion").set_color("#90be6d").scale(0.4)
s_text = TextMobject("Separation").set_color("#f8961e").scale(0.4)
a_text = TextMobject("Alignment").set_color("#8338ec").scale(0.4)
tot_text = TextMobject("Total Force").set_color("#048ba8").scale(0.4)
plus1 = TexMobject("+").scale(0.4)
plus2 = plus1.copy()
equal = TexMobject("=").scale(0.4)
equalc = equal.copy()
equals = equal.copy()
equala = equal.copy()
equalb = equal.copy()
b_vel_text = TextMobject("Current Velocity").set_color(RED).scale(0.4)
plus3 = plus1.copy()
desired_text = TextMobject("Desired Velocity").set_color(GOLD).scale(0.4)
# self.add(c_arrow, s_arrow, a_arrow, c_text, s_text, a_text)
self.play(
GrowArrow(c_arrow)
)
c_text.move_to(left + c_text.get_width()/2.0 + top)
c_arrow_copy = c_arrow.copy()
self.play(
Write(c_text),
Write(equalc.next_to(c_text, RIGHT)),
ApplyMethod(c_arrow_copy.next_to, equalc, RIGHT)
)
self.wait()
equals.next_to(equalc, DOWN)
s_text.next_to(equals, LEFT)
s_arrow_copy = s_arrow.copy()
self.play(
GrowArrow(s_arrow)
)
self.play(
Write(s_text),
Write(equals),
ApplyMethod(s_arrow_copy.next_to, equals, RIGHT)
)
self.wait()
equala.next_to(equals, DOWN)
a_text.next_to(equala, LEFT)
a_arrow_copy = a_arrow.copy()
self.play(
GrowArrow(a_arrow)
)
self.play(
Write(a_text),
Write(equala),
ApplyMethod(a_arrow_copy.next_to, equala, RIGHT)
)
self.wait()
self.play(
FadeOut(a_text),
FadeOut(s_text),
FadeOut(c_text),
FadeOut(equalc),
FadeOut(equals),
FadeOut(equala),
ApplyMethod(c_arrow_copy.move_to, left),
Write(plus1.move_to(left + 0.25*RIGHT)),
ApplyMethod(s_arrow_copy.move_to, left+0.5*RIGHT),
Write(plus2.move_to(left + 0.75*RIGHT)),
ApplyMethod(a_arrow_copy.move_to, left+RIGHT)
)
self.wait()
# self.play(FadeOut(c_text), FadeOut(s_text), FadeOut(a_text))
s = a_force
self.play(ApplyMethod(s_arrow.move_to, c + s + s_force/2.0), run_time=0.2)
s += s_force
self.play(ApplyMethod(c_arrow.move_to, c + s + c_force/2.0), run_time=0.2)
s += c_force
total_arrow = Line(c, c + s).add_tip(tip_length=0.1).set_color("#048ba8")
self.wait(0.3)
self.play(
GrowArrow(total_arrow),
ApplyMethod(c_arrow.set_opacity, 0.2),
ApplyMethod(s_arrow.set_opacity, 0.2),
ApplyMethod(a_arrow.set_opacity, 0.2),
run_time=0.5
)
total_arrow_copy = total_arrow.copy()
self.add(total_arrow_copy)
self.play(
Write(equal.move_to(a_arrow_copy.get_center() + 0.3*RIGHT)),
ApplyMethod(total_arrow_copy.move_to, equal.get_center() + 0.4*RIGHT)
)
self.wait()
getting_rid = VGroup(*[
s_arrow_copy,
a_arrow_copy,
c_arrow_copy,
plus1,
plus2
])
tot_text.next_to(equal, LEFT)
self.play(
FadeOut(c_arrow),
FadeOut(s_arrow),
FadeOut(a_arrow),
ReplacementTransform(getting_rid, tot_text)
)
self.wait()
b_arrow = Line(c, c + b.velocity).add_tip(tip_length=0.1).set_color(RED)
b_arrow_copy = b_arrow.copy()
self.play(GrowArrow(b_arrow))
self.add(b_arrow_copy)
equalb.next_to(equal, UP)
b_vel_text.next_to(equalb, LEFT)
self.play(
Write(b_vel_text),
Write(equalb),
ApplyMethod(b_arrow_copy.next_to, total_arrow_copy, UP)
)
self.wait()
desired_v = b.velocity + s
self.play(ApplyMethod(total_arrow.move_to, c + b.velocity + s/2.0))
self.play(
FadeOut(b_vel_text),
FadeOut(tot_text),
FadeOut(equalb),
FadeOut(equal),
ApplyMethod(b_arrow_copy.move_to, equal.get_center() + LEFT*1.5),
Write(plus3.move_to(equal.get_center() + LEFT)),
ApplyMethod(total_arrow_copy.move_to, equal.get_center() + LEFT*0.5)
)
desired_arrow = Line(c, c + desired_v).add_tip(tip_length=0.1).set_color(GOLD)
desired_arrow_copy = desired_arrow.copy()
self.play(GrowArrow(desired_arrow))
self.add(desired_arrow_copy)
self.play(
Write(equal),
ApplyMethod(desired_arrow_copy.next_to, equal, RIGHT)
)
getting_rid = VGroup(*[
plus3,
b_arrow_copy,
total_arrow_copy
])
desired_text.next_to(equal, LEFT)
self.play(
ReplacementTransform(getting_rid, desired_text)
)
self.wait(2)
angle = np.arctan2(desired_v[1], desired_v[0]) - np.arctan2(b.velocity[1], b.velocity[0])
self.play(
ApplyMethod(b.rotate, angle),
ReplacementTransform(b_arrow, desired_arrow),
FadeOut(total_arrow),
run_time=1.5
)
self.wait()
SCENES_IN_ORDER = [
Random_Walking_Boids,
WhatIsPosition,
VectorAddition,
Adding_N_Vectors,
Scaling_Vector,
WhatIsCohesion,
Only_Cohesion,
WhatIsSeparation,
Only_Separation,
Cohesion_and_Separation,
WhatIsAlignment,
TotalForces,
Murmuration_1,
Murmuration_2,
Murmuration_3,
Murmuration_4,
For_End_Credits_1,
For_End_Credits_2]
| [
"QuadTree.QuadTree",
"QuadTree.Point",
"QuadTree.Rect"
] | [((14068, 14112), 'QuadTree.Rect', 'qt.Rect', (['(0.0)', '(0.0)', 'FRAME_WIDTH', 'FRAME_HEIGHT'], {}), '(0.0, 0.0, FRAME_WIDTH, FRAME_HEIGHT)\n', (14075, 14112), True, 'import QuadTree as qt\n'), ((14128, 14149), 'QuadTree.QuadTree', 'qt.QuadTree', (['boundary'], {}), '(boundary)\n', (14139, 14149), True, 'import QuadTree as qt\n'), ((14238, 14266), 'QuadTree.Point', 'qt.Point', (['x', 'y'], {'payload': 'boid'}), '(x, y, payload=boid)\n', (14246, 14266), True, 'import QuadTree as qt\n')] |
from flasgger import Swagger
from flask import Flask
from flask_cors import CORS
from flask_login import LoginManager
from flask_migrate import Migrate
from flask_pymongo import PyMongo
from sitepipes.config import init_config, init_logger
config = init_config()
log = init_logger('log')
cors = CORS()
db = PyMongo()
login_manager = LoginManager()
migrate = Migrate()
swagger = Swagger()
def create_app():
""" Initialize the core application """
app = Flask(__name__, instance_relative_config=False, static_url_path='')
app.config.from_object('sitepipes.config.AppConfig')
cors.init_app(app, supports_credentials=True)
db.init_app(app)
login_manager.init_app(app)
migrate.init_app(app)
swagger.init_app(app)
with app.app_context():
# MUST import libraries here to avoid circular dependencies
from sitepipes.routes import models
app.register_blueprint(models.models)
return app
def create_site():
""" Initialize the core site for an autodetected OS """
# MUST import libraries here to avoid circular dependencies
from sitepipes.network.abstract import Site
site = Site()
return site | [
"flask_login.LoginManager",
"sitepipes.config.init_config",
"flask_cors.CORS",
"sitepipes.config.init_logger",
"flasgger.Swagger",
"flask.Flask",
"flask_pymongo.PyMongo",
"flask_migrate.Migrate",
"sitepipes.network.abstract.Site"
] | [((251, 264), 'sitepipes.config.init_config', 'init_config', ([], {}), '()\n', (262, 264), False, 'from sitepipes.config import init_config, init_logger\n'), ((271, 289), 'sitepipes.config.init_logger', 'init_logger', (['"""log"""'], {}), "('log')\n", (282, 289), False, 'from sitepipes.config import init_config, init_logger\n'), ((298, 304), 'flask_cors.CORS', 'CORS', ([], {}), '()\n', (302, 304), False, 'from flask_cors import CORS\n'), ((310, 319), 'flask_pymongo.PyMongo', 'PyMongo', ([], {}), '()\n', (317, 319), False, 'from flask_pymongo import PyMongo\n'), ((336, 350), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (348, 350), False, 'from flask_login import LoginManager\n'), ((361, 370), 'flask_migrate.Migrate', 'Migrate', ([], {}), '()\n', (368, 370), False, 'from flask_migrate import Migrate\n'), ((381, 390), 'flasgger.Swagger', 'Swagger', ([], {}), '()\n', (388, 390), False, 'from flasgger import Swagger\n'), ((465, 532), 'flask.Flask', 'Flask', (['__name__'], {'instance_relative_config': '(False)', 'static_url_path': '""""""'}), "(__name__, instance_relative_config=False, static_url_path='')\n", (470, 532), False, 'from flask import Flask\n'), ((1156, 1162), 'sitepipes.network.abstract.Site', 'Site', ([], {}), '()\n', (1160, 1162), False, 'from sitepipes.network.abstract import Site\n')] |
import json
import os
import shutil
import subprocess
import sys
import tempfile
import unittest
sys.path.append("../main")
from algorithms import *
port = 2222
def sshd(**kwargs):
dirname = tempfile.mkdtemp()
confname = dirname + "/sshd_config"
logname = dirname + "/sshd.log"
with open(confname, "w") as conf:
print("LogLevel", "DEBUG3", file=conf)
print("ListenAddress", "127.0.0.1:2222", file=conf)
for key, value in kwargs.items():
if type(value) == str:
print(key, value.replace("${confdir}", dirname), file=conf)
else:
for val in value:
print(key, val.replace("${confdir}", dirname), file=conf)
for f in os.listdir("./config"):
shutil.copy("./config/" + f, dirname)
if f.startswith("ssh_host_") and f.endswith("_key") and "HostKey" not in kwargs:
print("HostKey", dirname + "/" + f, file=conf)
return subprocess.Popen([
"/usr/sbin/sshd",
"-f", confname,
"-E", logname,
"-D",
])
def scan(*args):
scanner = subprocess.Popen(
[ "../main/scanner.py", "--json" ] + [ "--" + arg for arg in args ] + [ "127.0.0.1:2222" ],
stdout=subprocess.PIPE
)
( stdout, stderr ) = scanner.communicate()
if scanner.returncode == 0:
return json.loads(stdout.decode())
else:
return None
def what(result):
return [ issue["what"] for issue in result["issues"] ]
class TestScanner(unittest.TestCase):
def tearDown(self):
self.sshd.terminate()
def test_djb(self):
self.sshd = sshd(
KexAlgorithms=KEX_ECDH_CURVE25519_SHA256,
HostKey="${confdir}/ssh_host_ed25519_key",
Ciphers="<EMAIL>"
)
results = scan("algorithms")
self.assertEqual(len(results), 1)
for r in results:
self.assertEqual(r["host"], "127.0.0.1")
self.assertEqual(r["port"], 2222)
self.assertEqual(r["kex_init"]["kex_algorithms"], [ KEX_ECDH_CURVE25519_SHA256 ])
self.assertEqual(r["kex_init"]["server_host_key_algorithms"], [ "ssh-ed25519" ])
self.assertEqual(r["kex_init"]["encryption_algorithms_c2s"], [ "<EMAIL>" ])
self.assertEqual(r["kex_init"]["encryption_algorithms_s2c"], [ "<EMAIL>" ])
def test_nsa(self):
self.sshd = sshd(
KexAlgorithms=",".join([
KEX_ECDH_NISTP521_SHA512,
KEX_ECDH_NISTP384_SHA384,
KEX_ECDH_NISTP256_SHA256,
]),
HostKey=[
"${confdir}/ssh_host_ecdsa521_key",
"${confdir}/ssh_host_ecdsa384_key",
"${confdir}/ssh_host_ecdsa256_key",
],
Ciphers="<EMAIL>,<EMAIL>"
)
results = scan("algorithms", "details")
self.assertEqual(len(results), 1)
for r in results:
self.assertEqual(r["host"], "127.0.0.1")
self.assertEqual(r["port"], 2222)
self.assertEqual(
r["kex_init"]["kex_algorithms"],
[ KEX_ECDH_NISTP521_SHA512, KEX_ECDH_NISTP384_SHA384, KEX_ECDH_NISTP256_SHA256 ]
)
self.assertEqual(
r["kex_init"]["server_host_key_algorithms"],
[SIGN_ECDSA_NISTP521_SHA512,SIGN_ECDSA_NISTP384_SHA384,SIGN_ECDSA_NISTP256_SHA256]
)
self.assertEqual(
r["kex_init"]["encryption_algorithms_c2s"],
[ "<EMAIL>", "<EMAIL>" ]
)
self.assertEqual(
r["kex_init"]["encryption_algorithms_s2c"],
[ "<EMAIL>", "<EMAIL>" ]
)
self.assertTrue(any([ x == "Key exchange: unsafe elliptic curve" for x in what(r) ]))
self.assertTrue(any([ x == "Signature: requires per-signature entropy" for x in what(r) ]))
self.assertTrue(any([ x == "Signature: unsafe elliptic curve" for x in what(r) ]))
def test_old(self):
self.sshd = sshd(
KexAlgorithms=",".join([ KEX_DH_GROUP1_SHA1, KEX_DH_GROUP14_SHA1 ]),
HostKey=[ "${confdir}/ssh_host_rsa1024_key", "${confdir}/ssh_host_dsa_key" ],
HostKeyAlgorithms="ssh-rsa,ssh-dss",
Ciphers="3des-cbc,arcfour",
MACs="hmac-md5"
)
results = scan("algorithms", "instructions")
self.assertEqual(len(results), 1)
for r in results:
self.assertEqual(r["host"], "127.0.0.1")
self.assertEqual(r["port"], 2222)
self.assertEqual(
r["kex_init"]["kex_algorithms"],
[ KEX_DH_GROUP1_SHA1, KEX_DH_GROUP14_SHA1 ]
)
self.assertEqual(
r["kex_init"]["server_host_key_algorithms"],
[ SIGN_RSA_SHA1, SIGN_RSA_SHA512, SIGN_RSA_SHA256, SIGN_DSA ]
)
self.assertEqual(r["kex_init"]["encryption_algorithms_c2s"], [ "3des-cbc","arcfour" ])
self.assertEqual(r["kex_init"]["encryption_algorithms_s2c"], [ "3des-cbc","arcfour" ])
self.assertEqual(r["kex_init"]["mac_algorithms_c2s"], [ "hmac-md5" ])
self.assertEqual(r["kex_init"]["mac_algorithms_s2c"], [ "hmac-md5" ])
self.assertTrue(any([ x == "Key exchange: weak hash" for x in what(r) ]))
self.assertTrue(any([ x == "Key exchange: small DH group" for x in what(r) ]))
self.assertTrue(any([ x == "Signature: small key size" for x in what(r) ]))
self.assertTrue(any([ x == "Signature: requires per-signature entropy" for x in what(r) ]))
self.assertTrue(any([ x == "Cipher: small block size" for x in what(r) ]))
self.assertTrue(any([ x == "Authenticated encryption: CBC-and-MAC" for x in what(r) ]))
def test_classic(self):
self.sshd = sshd(
KexAlgorithms=",".join([ KEX_DH_GEX_SHA256 ]),
HostKey=[ "${confdir}/ssh_host_rsa2048_key" ],
Ciphers="aes256-ctr,aes192-ctr,aes128-ctr",
MACs="hmac-sha2-512-etm@openssh.com,hmac-sha2-256-etm@openssh.com,hmac-ripemd160-etm@openssh.com,umac-128-etm@openssh.com"
)
results = scan("algorithms", "details", "fast", "instructions")
self.assertEqual(len(results), 1)
for r in results:
self.assertEqual(r["host"], "127.0.0.1")
self.assertEqual(r["port"], 2222)
self.assertEqual(r["kex_init"]["kex_algorithms"], [ KEX_DH_GEX_SHA256 ])
self.assertEqual(
r["kex_init"]["server_host_key_algorithms"],
[ SIGN_RSA_SHA1, SIGN_RSA_SHA512, SIGN_RSA_SHA256 ]
)
self.assertEqual(
r["kex_init"]["encryption_algorithms_c2s"],
[ "aes256-ctr", "aes192-ctr", "aes128-ctr" ]
)
self.assertEqual(
r["kex_init"]["encryption_algorithms_s2c"],
[ "aes256-ctr", "aes192-ctr", "aes128-ctr" ]
)
self.assertEqual(
r["kex_init"]["mac_algorithms_c2s"],
[
"hmac-sha2-512-etm@openssh.com",
"hmac-sha2-256-etm@openssh.com",
"hmac-ripemd160-etm@openssh.com",
"umac-128-et<EMAIL>",
]
)
self.assertEqual(
r["kex_init"]["mac_algorithms_s2c"],
[
"hmac-sha2-512-etm@openssh.com",
"hmac-sha2-256-etm@openssh.com",
"hmac-ripemd160-etm@<EMAIL>",
"umac-<EMAIL>-<EMAIL>",
]
)
| [
"os.listdir",
"subprocess.Popen",
"tempfile.mkdtemp",
"shutil.copy",
"sys.path.append"
] | [((98, 124), 'sys.path.append', 'sys.path.append', (['"""../main"""'], {}), "('../main')\n", (113, 124), False, 'import sys\n'), ((200, 218), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (216, 218), False, 'import tempfile\n'), ((987, 1060), 'subprocess.Popen', 'subprocess.Popen', (["['/usr/sbin/sshd', '-f', confname, '-E', logname, '-D']"], {}), "(['/usr/sbin/sshd', '-f', confname, '-E', logname, '-D'])\n", (1003, 1060), False, 'import subprocess\n'), ((1132, 1264), 'subprocess.Popen', 'subprocess.Popen', (["(['../main/scanner.py', '--json'] + [('--' + arg) for arg in args] + [\n '127.0.0.1:2222'])"], {'stdout': 'subprocess.PIPE'}), "(['../main/scanner.py', '--json'] + [('--' + arg) for arg in\n args] + ['127.0.0.1:2222'], stdout=subprocess.PIPE)\n", (1148, 1264), False, 'import subprocess\n'), ((744, 766), 'os.listdir', 'os.listdir', (['"""./config"""'], {}), "('./config')\n", (754, 766), False, 'import os\n'), ((780, 817), 'shutil.copy', 'shutil.copy', (["('./config/' + f)", 'dirname'], {}), "('./config/' + f, dirname)\n", (791, 817), False, 'import shutil\n')] |
from datetime import datetime, timedelta
from dateutil import parser as dateutil_parser
from .messages import LEMBRETE
def prettify_date(dt_object):
"""
Pretty-format a datetime object
Args:
dt_object(datetime.datetime): A datetime object.
Returns:
str: A pretty-formatted date.
"""
return dt_object.strftime("%A %Hh%M, %d %b %Y")
def parse_date(date_args):
"""
Parse argument into a datetime object
Args:
date_args: A list or datetime object to be parsed.
Returns:
tuple: A tuple of a string-date and a datetime object.
"""
if isinstance(date_args, (list, tuple)):
date_args = " ".join(date_args)
elif isinstance(date_args, datetime):
date_args = str(date_args)
datetime_obj = dateutil_parser.parse(date_args)
parsed_date = prettify_date(datetime_obj)
return parsed_date, datetime_obj
def get_meeting_range(date_args):
"""
Parse argument into datetimes and datetime range.
Args:
date_args: A list or datetime object to be parsed.
Returns:
tuple: A tuple of a string-date, a datetime object and
a list of tuple representing the next meetings.
"""
# Get message meeting and following ones
parsed_date, datetime_obj = parse_date(date_args)
next_meetings, interval = [
(parsed_date, datetime_obj),
], 7
for _ in range(3):
next_meetings.append(parse_date(datetime_obj + timedelta(days=interval)))
interval += interval
return parsed_date, datetime_obj, next_meetings
def add_timedeltas(dt_object):
"""
Localize and buffer meeting datetime object
Adds a timedelta of +3 to localize to GMT-3 and
a timedelta of -30min for the reminder.
Args:
dt_object(datetime.datetime): A datetime object.
Returns:
datetime.datetime: A datetime object localized and buffered.
"""
return dt_object + timedelta(hours=3) - timedelta(minutes=30)
def generate_reminders(next_meetings):
"""
Generate list of datetimes for the alarms
Args:
next_meetings(list): The list made by get_meeting_range.
Returns:
list: A list of datetime objects corresponding to alarm times.
"""
alarm_times = [add_timedeltas(meeting[1]) for meeting in next_meetings]
return alarm_times
def alarm(context):
"""
Handle sending the alarm message
"""
job = context.job
context.bot.send_message(job.context, text=LEMBRETE) | [
"dateutil.parser.parse",
"datetime.timedelta"
] | [((794, 826), 'dateutil.parser.parse', 'dateutil_parser.parse', (['date_args'], {}), '(date_args)\n', (815, 826), True, 'from dateutil import parser as dateutil_parser\n'), ((1974, 1995), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(30)'}), '(minutes=30)\n', (1983, 1995), False, 'from datetime import datetime, timedelta\n'), ((1953, 1971), 'datetime.timedelta', 'timedelta', ([], {'hours': '(3)'}), '(hours=3)\n', (1962, 1971), False, 'from datetime import datetime, timedelta\n'), ((1475, 1499), 'datetime.timedelta', 'timedelta', ([], {'days': 'interval'}), '(days=interval)\n', (1484, 1499), False, 'from datetime import datetime, timedelta\n')] |
from .action import Action
from .exceptions import ShowMenuException
from game_state import GamePhase
from loader_functions.data_loaders import save_game
class ShowMenuAction(Action):
def _execute(self):
# Go back to main menu
# TODO When going back to main game it's always player's turn, maybe
# consider removing it from the required arguments of `save_game`?
save_game(self.player, self.game_map, self.message_log,
GamePhase.PLAYERS_TURN)
# Raise an exception which will cause the game to exit the main loop
raise ShowMenuException()
class ShowCharacterScreenAction(Action):
def _execute(self):
# Return outcome
outcome = {
'next_state': GamePhase.CHARACTER_SCREEN,
}
return outcome
class SelectInventoryItemAction(Action):
def __init__(self, item_letter):
self.item_letter = item_letter
def _execute(self):
try:
item_index = self.player.inventory.item_letters.index(
self.item_letter)
item = self.player.inventory.items[item_index]
# print("Selected {}!".format(item))
next_phase = GamePhase.INVENTORY_ITEM_MENU
except ValueError as e:
# print("Value error!")
item = None
next_phase = GamePhase.INVENTORY_MENU
except IndexError as e:
# print("Index error!")
item = None
next_phase = GamePhase.INVENTORY_MENU
except Exception as e:
print("Uncaught Exception!")
raise e
# Return outcome
outcome = {
'selected_inventory_item': item,
'next_state': next_phase
}
return outcome
class ShowInventoryAction(Action):
def _execute(self):
# Return outcome
outcome = {
'selected_inventory_item': None,
'next_state': GamePhase.INVENTORY_MENU,
}
return outcome
class BackToInventoryMenuAction(Action):
def _execute(self):
"""
Simply reset the state to show inventory menu
"""
# Return outcome
outcome = {
'next_state': GamePhase.INVENTORY_MENU,
'redraw_terrain': True,
}
return outcome
class BackToGameAction(Action):
def _execute(self):
"""
Simply reset the state to player's turn
"""
# Return outcome
outcome = {
'next_state': GamePhase.PLAYERS_TURN,
'redraw_terrain': True,
}
return outcome
| [
"loader_functions.data_loaders.save_game"
] | [((405, 484), 'loader_functions.data_loaders.save_game', 'save_game', (['self.player', 'self.game_map', 'self.message_log', 'GamePhase.PLAYERS_TURN'], {}), '(self.player, self.game_map, self.message_log, GamePhase.PLAYERS_TURN)\n', (414, 484), False, 'from loader_functions.data_loaders import save_game\n')] |
#1.Extract postcodes from csv file
#2.For every postcode
import urllib.request
import re
import csv
array = """DG1 1DF
DG1 1DG
DG1 1DJ
DG1 1DL
DG1 1DR
DG1 1DT
DG1 1DU
DG1 1DX
DG1 1EA
DG1 1EB
DG1 1ED
DG1 1EF
DG1 1EG
DG1 1EH
DG1 1EJ
DG1 1EL
DG1 1ET
DG1 1EW
DG1 1EX
DG1 1FA
DG1 1GL
DG1 1GN
DG1 1GP
DG1 1GQ
DG1 1GZ
DG1 1AA
DG1 1AG
DG1 1BA
DG1 1BD
DG1 1BF
DG1 1BG
DG1 1BH
DG1 1BJ
DG1 1BL
DG1 1BS
DG1 1BT
DG1 1BU
DG1 1BW
DG1 1BX
DG1 1BY
DG1 1BZ
DG1 1DA
DG1 1DB
DG1 1DD
DG1 1DE
DG1 1HA
DG1 1HB
DG1 1HD
DG1 1HE
DG1 1HF
DG1 1HH
DG1 1HJ
DG1 1HL
DG1 1HN
DG1 1HP
DG1 1HR
DG1 1HS
DG1 1HU
DG1 1HW
DG1 1HX
DG1 1HY
DG1 1HZ
DG1 1JA
DG1 1JB
DG1 1JD
DG1 1JE
DG1 1JF
DG1 1JG
DG1 1JL
DG1 1JN
DG1 1JP
DG1 1JQ
DG1 1JR
DG1 1JU
DG1 1JW
DG1 1JX
DG1 1JY
DG1 1JZ
DG1 1LG
DG1 1LN
DG1 1LP
DG1 1LR
DG1 1LS
DG1 1LT
DG1 1LU
DG1 1LW
DG1 1LX
DG1 1LZ
DG1 1NA
DG1 1NE
DG1 1NF
DG1 1NH
DG1 1NJ
DG1 1NL
DG1 1NN
DG1 1NP
DG10 9NT
DG10 9NU
DG10 9NW
DG10 9NX
DG10 9PA
DG10 9PB
DG10 9PD
DG10 9PE
DG10 9PF
DG10 9PG
DG10 9PH
DG10 9PJ
DG10 9PL
DG10 9PN
DG10 9PP
DG10 9PQ
DG10 9PR
DG10 9PS
DG10 9PT
DG10 9PU
DG10 9PW
DG10 9PX
DG10 9PY
DG10 9PZ
DG10 9QA
DG10 9QB
DG10 9QF
DG10 9QG
DG10 9QH
DG10 9QJ
DG1 1NQ
DG1 1NR
DG1 1NS
DG1 1NT
DG1 1NW
DG1 1NY
DG1 1NZ
DG1 1PA
DG1 1PB
DG1 1PD
DG1 1PE
DG1 1PF
DG1 1PG
DG1 1PJ
DG1 1PL
DG1 1PP
DG1 1PW
DG1 1PX
DG1 1PZ
DG1 1QA
DG1 1QB
DG1 1QD
DG1 1QE
DG1 1QF
DG1 1QG
DG1 1QH
DG1 1QJ
DG1 1QL
DG1 1QN
DG1 1QP
DG1 1QQ
DG1 1QR
DG1 1QS
DG1 1QU
DG1 1QW
DG1 1QX
DG1 1QY
DG1 1QZ
DG1 1RA
DG1 1RB
DG1 1RD
DG1 1RE
DG1 1RF
DG1 1RG
DG1 1RH
DG1 1RJ
DG1 1RL
DG1 1RN
DG1 1RP
DG1 1RQ
DG1 1RR
DG1 1RS
DG1 1RT
DG1 1RU
DG1 1RX
DG1 1RY
DG1 1RZ
DG1 1SA
DG1 1SD
DG1 1SE
DG1 1SF
DG1 1SG
DG1 1SH
DG1 1SJ
DG1 1SL
DG1 1SP
DG1 1SQ
DG1 1SR
DG1 1SS
DG1 1ST
DG1 1SU
DG1 1SW
DG1 1SX
DG1 1SY
DG1 1SZ
DG1 1TA
DG1 1TB
DG1 1TD
DG1 1TE
DG1 1TF
DG1 1TG
DG1 1TH
DG1 1TN
DG1 1TP
DG1 1TQ
DG1 1TR
DG1 1TS
DG1 1TT
DG1 1TU
DG1 1TW
DG1 1TX
DG1 1TY
DG1 1TZ
DG1 1UA
DG1 1UB
DG1 1UD
DG1 1UF
DG1 1UG
DG1 1UH
DG1 1UJ
DG1 1UL
DG1 1UN
DG1 1UP
DG1 1UQ
DG1 1UR
DG1 1US
DG1 1UT
DG1 1UU
DG1 1UW
DG1 1UX
DG1 1UY
DG1 1UZ
DG1 1WD
DG1 1XA
DG1 1XB
DG1 1XD
DG1 1XE
DG1 1XF
DG1 1XG
DG1 1XH
DG1 1XJ
DG1 1XL
DG1 1XN
DG1 1XP
DG1 1XQ
DG1 1XR
DG1 1XS
DG1 1XW
DG1 2AA
DG1 2AB
DG1 2AD
DG1 2AG
DG1 2AH
DG1 2AL
DG1 2AN
DG1 2AP
DG1 2AT
DG1 2AW
DG1 2AX
DG1 2BA
DG1 2BD
DG1 2BG
DG1 2BH
DG1 2BJ
DG1 2BN
DG1 2BQ
DG1 2BS
DG1 2BT
DG1 2BU
DG1 2BY
DG1 2DA
DG1 2DB
DG1 2DD
DG1 2DE
DG1 2DF
DG1 2DL
DG1 2DN
DG1 2DP
DG1 2DR
DG1 2DS
DG1 2DT
DG1 2DU
DG1 2DX
DG1 2DY
DG1 2DZ
DG1 2EB
DG1 2ED
DG1 2EE
DG1 2EF
DG1 2EJ
DG1 2EL
DG1 2EN
DG1 2EP
DG1 2EQ
DG1 2ER
DG1 2ET
DG1 2EU
DG1 2EW
DG1 2EX
DG1 2EY
DG1 2EZ
DG1 2GB
DG1 2HA
DG1 2HB
DG1 2HD
DG1 2HE
DG1 2HF
DG1 2HG
DG1 2HH
DG1 2HJ
DG1 2HL
DG1 2HN
DG1 2HP
DG1 2HQ
DG1 2HR
DG1 2HS
DG1 2HT
DG1 2HU
DG1 2JA
DG1 2JB
DG1 2JD
DG1 2JE
DG1 2JF
DG1 2JG
DG1 2JH
DG1 2JJ
DG1 2JL
DG1 2JN
DG1 2JP
DG1 2JQ
DG1 2JR
DG1 2JS
DG1 2JT
DG1 2JU
DG1 2JW
DG1 2JX
DG1 2JY
DG1 2JZ
DG1 2LA
DG1 2LB
DG1 2LE
DG1 2LF
DG1 2LH
DG1 2LJ
DG1 2LL
DG1 2LP
DG1 2LQ
DG1 2LR
DG1 2LS
DG1 2LT
DG1 2LU
DG1 2LX
DG1 2LZ
DG1 2NJ
DG1 2NN
DG1 2NP
DG1 2NS
DG1 2NT
DG1 2NU
DG1 2NX
DG1 2NZ
DG1 2PA
DG1 2PB
DG1 2PE
DG1 2PF
DG1 2PG
DG1 2PH
DG1 2PJ
DG1 2PL
DG1 2PN
DG1 2PP
DG1 2PQ
DG1 2PR
DG1 2PS
DG1 2PW
DG1 2PX
DG1 2PY
DG1 2PZ
DG1 2QB
DG1 2QD
DG1 2QE
DG1 2QF
DG1 2QG
DG1 2QH
DG1 2QL
DG1 2QN
DG1 2QP
DG1 2QQ
DG1 2QR
DG1 2QS
DG1 2QT
DG1 2RA
DG1 2RB
DG1 2RE
DG1 2RF
DG1 2RL
DG1 2RN
DG1 2RP
DG1 2RQ
DG1 2RR
DG1 2RS
DG1 2RT
DG1 2RU
DG1 2RW
DG1 2RX
DG1 2RY
DG1 2RZ
DG1 2SA
DG1 2SD
DG1 2SG
DG1 2SH
DG1 2YA
DG1 3AB
DG1 3AD
DG1 3AE
DG1 3AG
DG1 3AH
DG1 3AJ
DG1 3AL
DG1 3AN
DG1 3AP
DG1 3AQ
DG1 3AR
DG1 3AS
DG1 3AT
DG1 3AU
DG1 3AW
DG1 3AX
DG1 3AY
DG1 3AZ
DG1 3BA
DG1 3BB
DG1 3BD
DG1 3BE
DG1 3BH
DG1 3BJ
DG1 3BL
DG1 3BN
DG1 3BP
DG1 3BQ
DG1 3BW
DG1 3BX
DG1 3BY
DG1 3BZ
DG1 3DA
DG1 3DB
DG1 3DD
DG1 3DF
DG1 3DG
DG1 3DJ
DG1 3DL
DG1 3DN
DG1 3DP
DG1 3DQ
DG1 3DR
DG1 3DS
DG1 3DT
DG1 3DU
DG1 3DW
DG1 3DY
DG1 3EB
DG1 3ED
DG1 3EE
DG1 3EF
DG1 3EG
DG1 3EJ
DG1 3EN
DG1 3EP
DG1 3EQ
DG1 3ES
DG1 3ET
DG1 3EU
DG1 3EW
DG1 3EX
DG1 3EY
DG1 3EZ
DG1 3FB
DG1 3FD
DG1 3FE
DG1 3FF
DG1 3FG
DG1 3FL
DG1 3FN
DG1 3FP
DG1 3FQ
DG1 3FS
DG1 3FT
DG1 3FW
DG1 3FX
DG1 3FY
DG1 3FZ
DG1 3GA
DG1 3GB
DG1 3GD
DG1 3GE
DG1 3GG
DG1 3GR
DG1 3GS
DG1 3GT
DG1 3GU
DG1 3HA
DG1 3HB
DG1 3HE
DG1 3HF
DG1 3HG
DG1 3HH
DG1 3HJ
DG1 3HN
DG1 3HP
DG1 3HQ
DG1 3HR
DG1 3HS
DG1 3HT
DG1 3HU
DG1 3HW
DG1 3HX
DG1 3HZ
DG1 3JA
DG1 3JB
DG1 3JD
DG1 3JE
DG1 3JF
DG1 3JG
DG1 3JH
DG1 3JJ
DG1 3JN
DG1 3JP
DG1 3JQ
DG1 3JR
DG1 3JS
DG1 3JU
DG1 3JX
DG1 3JY
DG1 3JZ
DG1 3LB
DG1 3LD
DG1 3LG
DG1 3LH
DG1 3LJ
DG1 3LL
DG1 3LN
DG1 3LP
DG1 3LQ
DG1 3LR
DG1 3LS
DG1 3LT
DG1 3LU
DG1 3LW
DG1 3LX
DG1 3LY
DG1 3LZ
DG1 3NA
DG1 3NB
DG1 3ND
DG1 3NE
DG1 3NF
DG1 3NG
DG1 3NH
DG1 3NJ
DG1 3NL
DG1 3NN
DG1 3NP
DG1 3NQ
DG1 3NR
DG1 3NS
DG1 3NT
DG1 3NU
DG1 3NW
DG1 3NX
DG1 3NY
DG1 3NZ
DG1 3PA
DG1 3PB
DG1 3PD
DG1 3PE
DG1 3PF
DG1 3PG
DG1 3PH
DG1 3PJ
DG1 3PL
DG1 3PN
DG1 3PP
DG1 3PQ
DG1 3PR
DG1 3PS
DG1 3PT
DG1 3PU
DG1 3PW
DG1 3PX
DG1 3PY
DG1 3QA
DG1 3QB
DG1 3QD
DG1 3QE
DG1 3QF
DG1 3QG
DG1 3QH
DG1 3QJ
DG1 3QL
DG1 3QN
DG1 3QP
DG1 3QR
DG1 3QS
DG1 3QT
DG1 3QU
DG1 3QW
DG1 3QX
DG1 3QY
DG1 3RA
DG1 3RB
DG1 3RD
DG1 3RE
DG1 3RF
DG1 3RG
DG1 3RH
DG1 3RJ
DG1 3RL
DG1 3RN
DG1 3RP
DG1 3RQ
DG1 3RR
DG1 3RS
DG1 3RT
DG1 3RU
DG1 3RW
DG1 3RX
DG1 3RY
DG1 3RZ
DG1 3SA
DG1 3SB
DG1 3SD
DG1 3SE
DG1 3SF
DG1 3SG
DG1 3SJ
DG1 3SL
DG1 3SN
DG1 3SP
DG1 3SQ
DG1 3SR
DG1 3SS
DG1 3ST
DG1 3SU
DG1 3SW
DG1 3SY
DG1 3SZ
DG1 3TA
DG1 3TE
DG1 3TF
DG1 3TG
DG1 3TH
DG1 3TJ
DG1 3TL
DG1 3UQ
DG1 3UT
DG1 3UU
DG1 3UX
DG1 3UY
DG1 3YH
DG1 4AA
DG1 4AB
DG1 4AD
DG1 4AE
DG1 4AF
DG1 4AG
DG1 4AH
DG1 4AJ
DG1 4AL
DG1 4AN
DG1 4AP
DG1 4AQ
DG1 4AR
DG1 4AS
DG1 4AT
DG1 4AW
DG1 4AX
DG1 4AY
DG1 4AZ
DG1 4BA
DG1 4BB
DG1 4BD
DG1 4BE
DG1 4BG
DG1 4BH
DG1 4BJ
DG1 4BL
DG1 4BN
DG1 4BP
DG1 4BQ
DG1 4BS
DG1 4BT
DG1 4BU
DG1 4BW
DG1 4BX
DG1 4BY
DG1 4BZ
DG1 4DA
DG1 4DB
DG1 4DD
DG1 4DE
DG1 4DF
DG1 4DG
DG1 4DH
DG1 4DJ
DG1 4DL
DG1 4DN
DG1 4DP
DG1 4DQ
DG1 4DR
DG1 4DS
DG1 4DT
DG1 4DU
DG1 4DW
DG1 4DX
DG1 4DY
DG1 4DZ
DG1 4EA
DG1 4EB
DG1 4ED
DG1 4EE
DG1 4EF
DG1 4EG
DG1 4EH
DG1 4EJ
DG1 4EN
DG1 4EP
DG1 4EQ
DG1 4ER
DG1 4ES
DG1 4ET
DG1 4EU
DG1 4EW
DG1 4EX
DG1 4EY
DG1 4EZ
DG1 4FD
DG1 4FF
DG1 4GA
DG1 4GB
DG1 4GD
DG1 4GE
DG1 4GG
DG1 4GW
DG1 4GX
DG1 4GY
DG1 4GZ
DG1 4HA
DG1 4HB
DG1 4HD
DG1 4HE
DG1 4HF
DG1 4HG
DG1 4HH
DG1 4HJ
DG1 4HL
DG1 4HN
DG1 4HP
DG1 4HQ
DG1 4HR
DG1 4HS
DG1 4HT
DG1 4HU
DG1 4HW
DG1 4HX
DG1 4HY
DG1 4HZ
DG1 4JA
DG1 4JB
DG1 4JE
DG1 4JF
DG1 4JG
DG1 4JH
DG1 4JJ
DG1 4JL
DG1 4JN
DG1 4JP
DG1 4JQ
DG1 4JR
DG1 4JS
DG1 4JT
DG1 4JU
DG1 4JW
DG1 4JX
DG1 4XN
DG1 4XP
DG1 4XQ
DG1 4XR
DG1 4XS
DG1 4XT
DG1 4XU
DG1 4XW
DG1 4XX
DG1 4XY
DG1 4XZ
DG1 4YA
DG1 4YB
DG1 4YD
DG1 4YE
DG1 4YG
DG1 4YH
DG1 4YJ
DG1 4JY
DG1 4JZ
DG1 4LA
DG1 4LB
DG1 4LD
DG1 4LE
DG1 4LF
DG1 4LG
DG1 4LH
DG1 4LJ
DG1 4LL
DG1 4LN
DG1 4LP
DG1 4LQ
DG1 4LR
DG1 4LS
DG1 4LT
DG1 4LU
DG1 4LW
DG1 4LX
DG1 4LY
DG1 4LZ
DG1 4NA
DG1 4NB
DG1 4ND
DG1 4NE
DG1 4NF
DG1 4NG
DG1 4NH
DG1 4NJ
DG1 4NL
DG1 4NN
DG1 4NP
DG1 4NQ
DG1 4NR
DG1 4NS
DG1 4NT
DG1 4NU
DG1 4NW
DG1 4NX
DG1 4NY
DG1 4NZ
DG1 4PA
DG1 4PB
DG1 4PD
DG1 4PE
DG1 4PF
DG1 4PG
DG1 4PH
DG1 4PJ
DG1 4PL
DG1 4PN
DG1 4PP
DG1 4PQ
DG1 4PR
DG1 4PS
DG1 4PT
DG1 4PU
DG1 4PW
DG1 4PX
DG1 4PY
DG1 4PZ
DG1 4QA
DG1 4QB
DG1 4QD
DG1 4QE
DG1 4QF
DG1 4QG
DG1 4QH
DG1 4QJ
DG1 4QL
DG1 4QN
DG1 4QP
DG1 4QQ
DG1 4QR
DG1 4QS
DG1 4QT
DG1 4QU
DG1 4QW
DG1 4QX
DG1 4QY
DG1 4QZ
DG1 4RA
DG1 4RB
DG1 4RD
DG1 4RE
DG1 4RF
DG1 4RG
DG1 4RH
DG1 4RJ
DG1 4RL
DG1 4RN
DG1 4RP
DG1 4RQ
DG1 4RR
DG1 4RS
DG1 4RT
DG1 4RU
DG1 4RW
DG1 4RX
DG1 4RY
DG1 4RZ
DG1 4SA
DG1 4SB
DG1 4SD
DG1 4SE
DG1 4SF
DG1 4SG
DG1 4SH
DG1 4SJ
DG1 4SL
DG1 4SN
DG1 4SP
DG1 4SQ
DG1 4SR
DG1 4ST
DG1 4SU
DG1 4SW
DG1 4SX
DG1 4SY
DG1 4TA
DG1 4TB
DG1 4TD
DG1 4TE
DG1 4TF
DG1 4TG
DG1 4TH
DG1 4TJ
DG1 4TL
DG1 4TN
DG1 4TP
DG1 4TQ
DG1 4TR
DG1 4TS
DG1 4TT
DG1 4TU
DG1 4TW
DG1 4TX
DG1 4TY
DG1 4TZ
DG1 4UA
DG1 4UB
DG1 4UD
DG1 4UE
DG1 4UF
DG1 4UG
DG1 4UH
DG1 4UJ
DG1 4UL
DG1 4UN
DG1 4UP
DG1 4UQ
DG1 4UR
DG1 4UT
DG1 4UU
DG1 4UW
DG1 4UX
DG1 4UY
DG1 4UZ
DG1 4XA
DG1 4XB
DG1 4XD
DG1 4XE
DG1 4XF
DG1 4XG
DG1 4XH
DG1 4XJ
DG1 4XL
DG1 4YL
DG1 4YS
DG1 4YT
DG1 4YU
DG1 4YX
DG1 4YY
DG10 9ER
DG10 9ES
DG10 9ET
DG10 9EU
DG10 9EX
DG10 9EY
DG11 1DH
DG11 1DJ
DG11 1DL
DG11 1DN
DG11 1DP
DG11 1DQ
DG11 1DR
DG11 1DS
DG11 1DT
DG11 1DU
DG11 1DW
DG11 1DX
DG11 1DY
DG11 1DZ
DG11 1EA
DG11 1EG
DG11 1EH
DG11 1EJ
DG11 1EL
DG11 1EN
DG11 1EP
DG11 1EQ
DG11 1ER
DG11 1ES
DG11 1ET
DG11 1EU
DG11 1EW
DG11 1EX
DG11 1EY
DG11 1GA
DG11 1GB
DG11 1GD
DG11 1GE
DG11 1GF
DG11 1HA
DG11 1HB
DG1 4YZ
DG1 4ZD
DG1 4ZE
DG1 4ZF
DG1 4ZJ
DG1 4ZL
DG1 4ZN
DG1 4ZS
DG1 4ZW
DG1 4ZZ
DG1 9AD
DG1 9AR
DG1 9DF
DG1 9DG
DG1 9DL
DG1 9DP
DG1 9DQ
DG1 9DS
DG1 9DT
DG1 9SA
DG1 9SB
DG1 9SD
DG1 9SE
DG1 9SF
DG1 9SG
DG1 9SH
DG1 9SJ
DG1 9SL
DG1 9SN
DG1 9SP
DG1 9SQ
DG1 9SR
DG1 9SS
DG1 9ST
DG1 9SU
DG1 9SW
DG1 9SX
DG1 9SY
DG1 9SZ
DG1 9TA
DG1 9TB
DG1 9TD
DG1 9TE
DG1 9TF
DG1 9TG
DG1 9TH
DG1 9TJ
DG1 9TL
DG1 9TN
DG1 9TP
DG1 9TQ
DG1 9TR
DG1 9TS
DG1 9TT
DG1 9TU
DG1 9TW
DG1 9TX
DG1 9TY
DG1 9TZ
DG1 9UA
DG1 9UB
DG1 9UD
DG1 9UE
DG1 9UF
DG1 9UG
DG1 9UH
DG1 9UJ
DG1 9UL
DG1 9UN
DG1 9UP
DG1 9UQ
DG1 9UR
DG1 9US
DG1 9UT
DG1 9UU
DG1 9UW
DG1 9UX
DG1 9UY
DG1 9UZ
DG10 9AA
DG10 9AB
DG10 9AD
DG10 9AE
DG10 9AG
DG10 9AH
DG10 9AJ
DG10 9AL
DG10 9AN
DG10 9AP
DG10 9AQ
DG10 9AR
DG10 9AS
DG10 9AT
DG10 9AW
DG10 9AX
DG10 9AY
DG10 9AZ
DG10 9BA
DG10 9BB
DG10 9BD
DG10 9BE
DG10 9BF
DG10 9BG
DG10 9BH
DG10 9BJ
DG10 9BL
DG10 9BN
DG10 9BP
DG10 9BQ
DG10 9BS
DG10 9BT
DG10 9BU
DG10 9BW
DG10 9BX
DG10 9BY
DG10 9BZ
DG10 9DA
DG10 9DB
DG10 9DD
DG10 9DE
DG10 9DF
DG10 9DG
DG10 9DH
DG10 9DJ
DG10 9DL
DG10 9DN
DG10 9DP
DG10 9DR
DG10 9DS
DG10 9DT
DG10 9DU
DG10 9DW
DG10 9DX
DG10 9DY
DG10 9EA
DG10 9EB
DG10 9ED
DG10 9EE
DG10 9EF
DG10 9EG
DG10 9EH
DG10 9EJ
DG10 9EL
DG10 9EP
DG10 9EZ
DG10 9HA
DG10 9HB
DG10 9HD
DG10 9HE
DG10 9HF
DG10 9HG
DG10 9HH
DG10 9HJ
DG10 9HL
DG10 9HN
DG10 9HP
DG10 9HQ
DG10 9HR
DG10 9HS
DG10 9HT
DG10 9HU
DG10 9HW
DG10 9HX
DG10 9HY
DG10 9HZ
DG10 9JA
DG10 9JB
DG10 9JD
DG10 9JE
DG10 9JF
DG10 9JG
DG10 9JH
DG10 9JJ
DG10 9JL
DG10 9JN
DG10 9JP
DG10 9JQ
DG10 9JT
DG10 9JU
DG10 9JW
DG10 9JX
DG10 9JY
DG10 9JZ
DG10 9LA
DG10 9LB
DG10 9LD
DG10 9LE
DG10 9LF
DG10 9LG
DG10 9LH
DG10 9LJ
DG10 9LL
DG10 9LN
DG10 9LP
DG10 9LQ
DG10 9LR
DG10 9LS
DG10 9LT
DG10 9LU
DG10 9LX
DG10 9LY
DG10 9LZ
DG10 9NA
DG10 9NB
DG10 9ND
DG10 9NE
DG10 9NF
DG10 9NG
DG10 9NH
DG10 9NJ
DG10 9NL
DG10 9NN
DG10 9NP
DG10 9NR
DG10 9QL
DG10 9QN
DG10 9QP
DG10 9QQ
DG10 9QR
DG10 9QS
DG10 9QT
DG10 9QU
DG10 9QW
DG10 9QX
DG10 9QZ
DG10 9RA
DG10 9RB
DG10 9RD
DG10 9RE
DG10 9RF
DG10 9RG
DG10 9RH
DG10 9RJ
DG10 9RL
DG10 9RN
DG10 9RQ
DG10 9RR
DG10 9RS
DG10 9RT
DG10 9RU
DG10 9RX
DG10 9RY
DG10 9RZ
DG10 9SA
DG10 9SB
DG10 9SD
DG10 9SE
DG10 9SF
DG10 9SG
DG10 9SH
DG10 9SJ
DG10 9SL
DG10 9SN
DG10 9SP
DG10 9SQ
DG10 9SR
DG10 9ST
DG10 9SW
DG10 9WT
DG11 1AA
DG11 1AB
DG11 1AD
DG11 1AE
DG11 1AF
DG11 1AG
DG11 1AH
DG11 1AJ
DG11 1AL
DG11 1AN
DG11 1AP
DG11 1AQ
DG11 1AR
DG11 1AS
DG11 1AT
DG11 1AU
DG11 1AW
DG11 1AX
DG11 1AY
DG11 1AZ
DG11 1BA
DG11 1BB
DG11 1BD
DG11 1BE
DG11 1BG
DG11 1BJ
DG11 1BL
DG11 1BN
DG11 1BP
DG11 1BS
DG11 1BT
DG11 1BU
DG11 1BW
DG11 1BX
DG11 1BY
DG11 1BZ
DG11 1DA
DG11 1DB
DG11 1DD
DG11 1DE
DG11 1DF
DG11 1DG
DG11 1HD
DG11 1HE
DG11 1HF
DG11 1HG
DG11 1HH
DG11 1HJ
DG11 1HL
DG11 1HN
DG11 1HP
DG11 1HQ
DG11 1HR
DG11 1HS
DG11 1HW
DG11 1HY
DG11 1HZ
DG11 1JA
DG11 1JB
DG11 1JD
DG11 2BA
DG11 2BB
DG11 2BD
DG11 2BE
DG11 2BF
DG11 2BG
DG11 2BH
DG11 2BJ
DG11 2BL
DG11 2BP
DG11 2BQ
DG11 2BT
DG11 2BU
DG11 2BX
DG11 2BY
DG11 2DA
DG11 2DB
DG11 2DE
DG11 2SB
DG11 2SD
DG11 2SE
DG11 2SF
DG11 2SG
DG11 2SH
DG11 2SJ
DG11 2SL
DG11 2SQ
DG11 2SS
DG11 2UA
DG11 3AA
DG11 1JE
DG11 1JF
DG11 1JG
DG11 1JH
DG11 1JJ
DG11 1JL
DG11 1JN
DG11 1JP
DG11 1JQ
DG11 1JR
DG11 1JS
DG11 1JT
DG11 1JU
DG11 1JW
DG11 1JX
DG11 1JY
DG11 1JZ
DG11 1LA
DG11 1LB
DG11 1LD
DG11 1LE
DG11 1LG
DG11 1LH
DG11 1LJ
DG11 1LL
DG11 1LN
DG11 1LP
DG11 1LQ
DG11 1LR
DG11 1LS
DG11 1LT
DG11 1LU
DG11 1LW
DG11 1LX
DG11 1LY
DG11 1LZ
DG11 1NA
DG11 1NB
DG11 1ND
DG11 1NE
DG11 1NF
DG11 1NG
DG11 1NH
DG11 1NJ
DG11 1NL
DG11 1NN
DG11 1NP
DG11 1NQ
DG11 1NR
DG11 1NS
DG11 1NT
DG11 1NU
DG11 1NW
DG11 1NX
DG11 1NY
DG11 1NZ
DG11 1PA
DG11 1PB
DG11 1PD
DG11 1PE
DG11 1PF
DG11 1PG
DG11 1PH
DG11 1PJ
DG11 1PL
DG11 1PN
DG11 1PP
DG11 1PQ
DG11 1PR
DG11 1PS
DG11 1PT
DG11 1PU
DG11 1PW
DG11 1PX
DG11 1PY
DG11 1PZ
DG11 1QA
DG11 1QB
DG11 1QD
DG11 1QE
DG11 1QF
DG11 1QG
DG11 1QH
DG11 1QJ
DG11 1QL
DG11 1QN
DG11 1QP
DG11 1QQ
DG11 1QR
DG11 1QS
DG11 1QT
DG11 1QU
DG11 1QW
DG11 1QX
DG11 1QY
DG11 1QZ
DG11 1RA
DG11 1RB
DG11 1RD
DG11 1RE
DG11 1RF
DG11 1RG
DG11 1RH
DG11 1RJ
DG11 1RL
DG11 1RN
DG11 1RP
DG11 1RQ
DG11 1RR
DG11 1RS
DG11 1RT
DG11 1RU
DG11 1RW
DG11 1RX
DG11 1RY
DG11 1RZ
DG11 1SA
DG11 1SB
DG11 1SE
DG11 1SF
DG11 1SG
DG11 1SH
DG11 1SJ
DG11 1SN
DG11 1SP
DG11 1SQ
DG11 1SR
DG11 1SS
DG11 1ST
DG11 1SU
DG11 1SW
DG11 1SX
DG11 1SY
DG11 1SZ
DG11 1TA
DG11 1TB
DG11 1TD
DG11 1TE
DG11 1TF
DG11 1TG
DG11 1TH
DG11 1TJ
DG11 1TL
DG11 1TN
DG11 1TP
DG11 1TQ
DG11 1TR
DG11 1TS
DG11 1TT
DG11 1TU
DG11 1TW
DG11 1TX
DG11 1TY
DG11 1TZ
DG11 1UB
DG11 1UD
DG11 1UE
DG11 1UG
DG11 1WP
DG11 1WR
DG11 2AA
DG11 2AB
DG11 2AD
DG11 2AE
DG11 2AF
DG11 2AG
DG11 2AH
DG11 2AJ
DG11 2AL
DG11 2AN
DG11 2AP
DG11 2AQ
DG11 2AR
DG11 2AS
DG11 2AT
DG11 2AU
DG11 2AW
DG11 2AX
DG11 2AY
DG11 2AZ
DG11 2DF
DG11 2DG
DG11 2DH
DG11 2DJ
DG11 2DL
DG11 2DN
DG11 2DP
DG11 2DQ
DG11 2DR
DG11 2DS
DG11 2DT
DG11 2DU
DG11 2DW
DG11 2DX
DG11 2DY
DG11 2DZ
DG11 2EA
DG11 2EB
DG11 2ED
DG11 2EE
DG11 2EF
DG11 2EG
DG11 2EH
DG11 2EJ
DG11 2EL
DG11 2EN
DG11 2EP
DG11 2ER
DG11 2ES
DG11 2ET
DG11 2EU
DG11 2EW
DG11 2EX
DG11 2EY
DG11 2EZ
DG11 2FA
DG11 2FB
DG11 2FD
DG11 2FE
DG11 2FF
DG11 2FG
DG11 2FH
DG11 2GA
DG11 2GB
DG11 2GD
DG11 2GZ
DG11 2HA
DG11 2HB
DG11 2HE
DG11 2HF
DG11 2HG
DG11 2HH
DG11 2HJ
DG11 2HL
DG11 2HN
DG11 2HP
DG11 2HQ
DG11 2HR
DG11 2HT
DG11 2HU
DG11 2HW
DG11 2HX
DG11 2HY
DG11 2HZ
DG11 2JA
DG11 2JB
DG11 2JD
DG11 2JE
DG11 2JF
DG11 2JG
DG11 2JH
DG11 2JJ
DG11 2JL
DG11 2JP
DG11 2JQ
DG11 2JR
DG11 2JS
DG11 2JT
DG11 2JU
DG11 2JX
DG11 2JY
DG11 2JZ
DG11 2LA
DG11 2LB
DG11 2LD
DG11 2LE
DG11 2LF
DG11 2LG
DG11 2LH
DG11 2LJ
DG11 2LL
DG11 2LN
DG11 2LP
DG11 2LQ
DG11 2LR
DG11 2LS
DG11 2LU
DG11 2LW
DG11 2LX
DG11 2LY
DG11 2LZ
DG11 2NA
DG11 2NB
DG11 2ND
DG11 2NE
DG11 2NF
DG11 2NG
DG11 2NH
DG11 2NJ
DG11 2NL
DG11 2NN
DG11 2NP
DG11 2NQ
DG11 2NR
DG11 2NS
DG11 2NT
DG11 2NU
DG11 2NW
DG11 2NX
DG11 2NY
DG11 2NZ
DG11 2PA
DG11 2PB
DG11 2PD
DG11 2PE
DG11 2PF
DG11 2PG
DG11 2PH
DG11 2PJ
DG11 2PL
DG11 2PN
DG11 2PP
DG11 2PQ
DG11 2PR
DG11 2PS
DG11 2PT
DG11 2PU
DG11 2PW
DG11 2PX
DG11 2PY
DG11 2PZ
DG11 2QA
DG11 2QB
DG11 2QD
DG11 2QE
DG11 2QF
DG11 2QG
DG11 2QH
DG11 2QJ
DG11 2QL
DG11 2QN
DG11 2QP
DG11 2QQ
DG11 2QR
DG11 2QS
DG11 2QT
DG11 2QU
DG11 2QW
DG11 2QX
DG11 2QY
DG11 2QZ
DG11 2RA
DG11 2RB
DG11 2RF
DG11 2RG
DG11 2RH
DG11 2RJ
DG11 2RL
DG11 2RN
DG11 2RP
DG11 2RQ
DG11 2RR
DG11 2RS
DG11 2RT
DG11 2RU
DG11 2RW
DG11 2RX
DG11 2RY
DG11 2RZ
DG11 2SA
DG11 3AB
DG11 3AD
DG11 3AE
DG11 3AF
DG11 3AG
DG11 3AH
DG11 3AJ
DG11 3AL
DG11 3AN
DG11 3AP
DG11 3AQ
DG11 3AR
DG11 3AS
DG11 3AT
DG11 3AU
DG11 3AW
DG11 3AX
DG11 3AY
DG11 3AZ
DG11 3BA
DG11 3BB
DG11 3BD
DG11 3BE
DG11 3BF
DG11 3BG
DG11 3BH
DG11 3BJ
DG11 3BL
DG11 3BN
DG11 3BQ
DG11 3BT
DG11 3BU
DG11 3BX
DG11 3BY
DG11 3BZ
DG11 3DA
DG11 3DB
DG11 3DD
DG11 3DE
DG11 3DF
DG11 3DG
DG11 3DH
DG11 3DJ
DG11 3DL
DG11 3DN
DG11 3DP
DG11 3DQ
DG11 3DR
DG11 3DS
DG11 3DT
DG11 3DU
DG11 3DW
DG11 3DX
DG11 3DY
DG11 3DZ
DG11 3EA
DG11 3EB
DG11 3ED
DG11 3EE
DG11 3EF
DG11 3EG
DG11 3EH
DG11 3EJ
DG11 3EL
DG11 3EN
DG11 3EP
DG11 3EQ
DG11 3ER
DG11 3ES
DG11 3ET
DG11 3EU
DG11 3EW
DG11 3EX
DG11 3EY
DG11 3EZ
DG11 3FW
DG11 3HA
DG11 3HB
DG11 3HD
DG11 3HE
DG11 3HF
DG11 3HG
DG11 3HH
DG11 3HJ
DG11 3HL
DG11 3HN
DG11 3HP
DG11 3HQ
DG11 3HR
DG11 3HT
DG11 3HW
DG11 3JA
DG11 3JB
DG11 3JD
DG11 3JE
DG11 3JF
DG11 3JG
DG11 3JH
DG11 3JJ
DG11 3JL
DG11 3JN
DG11 3JP
DG11 3JQ
DG11 3JR
DG11 3JS
DG11 3JT
DG11 3JU
DG11 3JW
DG11 3JX
DG11 3JY
DG11 3JZ
DG11 3LE
DG11 3LG
DG11 3LH
DG11 3LJ
DG11 3LL
DG11 3LN
DG11 3LP
DG11 3LQ
DG11 3LR
DG11 3LS
DG11 3LT
DG11 3LU
DG11 3LW
DG11 3LX
DG11 3LY
DG11 3LZ
DG11 3NA
DG11 3NB
DG11 3ND
DG11 3NE
DG11 3NF
DG11 3NG
DG11 3NH
DG11 3NJ
DG11 3NL
DG11 3NN
DG11 3NQ
DG11 3NS
DG11 3NT
DG11 3NU
DG11 3NX
DG11 3NY
DG11 3NZ
DG11 3PA
DG11 3PB
DG11 3PD
DG11 3PE
DG11 3PF
DG11 3PG
DG11 3PH
DG12 5RA
DG12 5RB
DG12 5RD
DG12 5RE
DG12 5RF
DG12 5RG
DG12 5RH
DG12 5RJ
DG12 5RL
DG12 5RN
DG12 5RP
DG12 5RQ
DG12 5RR
DG12 5RS
DG12 5RT
DG12 5YB
DG12 6AA
DG12 6AB
DG12 6AD
DG12 6AG
DG12 6AJ
DG12 6AL
DG12 6AN
DG12 6AP
DG12 6AQ
DG12 6AR
DG12 6AS
DG12 6AT
DG12 6AU
DG12 6AW
DG12 6AY
DG12 6AZ
DG12 6BA
DG12 6BB
DG12 6BD
DG12 6BE
DG12 6BF
DG12 6BG
DG12 6BH
DG12 6BJ
DG12 6BN
DG12 6BQ
DG12 6BR
DG12 6BS
DG12 6BT
DG12 6BU
DG11 3PJ
DG11 3PL
DG11 3PN
DG11 3PP
DG11 3PQ
DG11 3PR
DG11 3PW
DG11 3PZ
DG11 3YE
DG11 9AB
DG12 5AA
DG12 5AB
DG12 5AD
DG12 5AE
DG12 5AG
DG12 5AH
DG12 5AJ
DG12 5AL
DG12 5AN
DG12 5AP
DG12 5AQ
DG12 5AR
DG12 5AS
DG12 5AT
DG12 5AU
DG12 5AW
DG12 5AX
DG12 5AY
DG12 5AZ
DG12 5BA
DG12 5BB
DG12 5BD
DG12 5BE
DG12 5BG
DG12 5BH
DG12 5BJ
DG12 5BL
DG12 5BN
DG12 5BP
DG12 5BQ
DG12 5BW
DG12 5DA
DG12 5DB
DG12 5DD
DG12 5DE
DG12 5DF
DG12 5DG
DG12 5DH
DG12 5DJ
DG12 5DL
DG12 5DN
DG12 5DP
DG12 5DQ
DG12 5DR
DG12 5DS
DG12 5DT
DG12 5DU
DG12 5DW
DG12 5DX
DG12 5DY
DG12 5DZ
DG12 5EA
DG12 5EB
DG12 5ED
DG12 5EE
DG12 5EF
DG12 5EG
DG12 5EH
DG12 5EJ
DG12 5EL
DG12 5EN
DG12 5EP
DG12 5EQ
DG12 5ER
DG12 5ES
DG12 5ET
DG12 5EU
DG12 5EW
DG12 5EX
DG12 5EY
DG12 5EZ
DG12 5FB
DG12 5GZ
DG12 5HA
DG12 5HB
DG12 5HD
DG12 5HE
DG12 5HF
DG12 5HG
DG12 5HH
DG12 5HJ
DG12 5HL
DG12 5HN
DG12 5HP
DG12 5HQ
DG12 5HR
DG12 5HS
DG12 5HT
DG12 5HU
DG12 5HW
DG12 5HX
DG12 5HY
DG12 5HZ
DG12 5JA
DG12 5JB
DG12 5JD
DG12 5JE
DG12 5JF
DG12 5JG
DG12 5JH
DG12 5JJ
DG12 5JL
DG12 5JN
DG12 5JP
DG12 5JQ
DG12 5JR
DG12 5JS
DG12 5JT
DG12 5JU
DG12 5JW
DG12 5JX
DG12 5JY
DG12 5JZ
DG12 5LA
DG12 5LB
DG12 5LD
DG12 5LF
DG12 5LH
DG12 5LJ
DG12 5LL
DG12 5LN
DG12 5LP
DG12 5LQ
DG12 5LR
DG12 5LS
DG12 5LT
DG12 5LU
DG12 5LW
DG12 5LX
DG12 5LY
DG12 5LZ
DG12 5NA
DG12 5NB
DG12 5ND
DG12 5NE
DG12 5NF
DG12 5NG
DG12 5NH
DG12 5NJ
DG12 5NL
DG12 5NN
DG12 5NP
DG12 5NQ
DG12 5NS
DG12 5NW
DG12 5NX
DG12 5NY
DG12 5NZ
DG12 5PA
DG12 5PB
DG12 5PD
DG12 5PE
DG12 5PF
DG12 5PG
DG12 5PH
DG12 5PJ
DG12 5PL
DG12 5PN
DG12 5PQ
DG12 5PR
DG12 5PS
DG12 5PT
DG12 5PU
DG12 5PW
DG12 5PX
DG12 5PY
DG12 5PZ
DG12 5QA
DG12 5QB
DG12 5QD
DG12 5QE
DG12 5QF
DG12 5QG
DG12 5QH
DG12 5QJ
DG12 5QL
DG12 5QN
DG12 5QP
DG12 5QQ
DG12 5QR
DG12 5QS
DG12 5QT
DG12 5QU
DG12 5QW
DG12 5QX
DG12 5QY
DG12 5QZ
DG12 6JS
DG12 6JT
DG12 6JU
DG12 6JW
DG12 6JX
DG12 6JY
DG12 6LA
DG12 6LB
DG12 6LD
DG12 6LE
DG12 6LF
DG12 6LG
DG12 6LH
DG12 6LL
DG12 6LN
DG12 6LP
DG12 6LQ
DG12 6LR
DG12 6LS
DG12 6LT
DG12 6LU
DG12 6LW
DG12 6LX
DG12 6LY
DG12 6LZ
DG12 6NA
DG12 6NB
DG12 6ND
DG12 6NF
DG12 6NG
DG12 6NH
DG12 6NJ
DG12 6NL
DG12 6NN
DG12 6NQ
DG12 6NR
DG12 6NS
DG12 6NT
DG12 6NU
DG12 6BW
DG12 6BX
DG12 6BY
DG12 6BZ
DG12 6DA
DG12 6DB
DG12 6DD
DG12 6DE
DG12 6DF
DG12 6DG
DG12 6DH
DG12 6DJ
DG12 6DL
DG12 6DP
DG12 6DQ
DG12 6DR
DG12 6DS
DG12 6DT
DG12 6DU
DG12 6DW
DG12 6DX
DG12 6DY
DG12 6DZ
DG12 6EA
DG12 6EB
DG12 6ED
DG12 6EE
DG12 6EF
DG12 6EG
DG12 6EH
DG12 6EJ
DG12 6EL
DG12 6EN
DG12 6EP
DG12 6EQ
DG12 6ER
DG12 6ES
DG12 6ET
DG12 6EU
DG12 6EW
DG12 6EX
DG12 6EY
DG12 6EZ
DG12 6FB
DG12 6FD
DG12 6FE
DG12 6FF
DG12 6GA
DG12 6GB
DG12 6GN
DG12 6GS
DG12 6GT
DG12 6GY
DG12 6HA
DG12 6HB
DG12 6HD
DG12 6HE
DG12 6HF
DG12 6HG
DG12 6HH
DG12 6HJ
DG12 6HL
DG12 6HN
DG12 6HP
DG12 6HQ
DG12 6HR
DG12 6HT
DG12 6HU
DG12 6HW
DG12 6HX
DG12 6HY
DG12 6HZ
DG12 6JB
DG12 6JD
DG12 6JE
DG12 6JF
DG12 6JG
DG12 6JH
DG12 6JJ
DG12 6JL
DG12 6JN
DG12 6JP
DG12 6JQ
DG12 6JR
DG12 6RN
DG12 6RP
DG12 6RQ
DG12 6RR
DG12 6RS
DG12 6RU
DG12 6RW
DG12 6RX
DG12 6RY
DG12 6RZ
DG12 6SA
DG12 6SB
DG12 6SD
DG12 6SE
DG12 6SF
DG12 6SG
DG12 6SH
DG12 6SJ
DG12 6SL
DG12 6SN
DG12 6SP
DG12 6SQ
DG12 6SR
DG12 6SS
DG12 6ST
DG12 6SU
DG12 6SW
DG12 6NW
DG12 6NX
DG12 6NY
DG12 6NZ
DG12 6PA
DG12 6PB
DG12 6PD
DG12 6PE
DG12 6PF
DG12 6PG
DG12 6PH
DG12 6PJ
DG12 6PN
DG12 6PP
DG12 6PQ
DG12 6PR
DG12 6PS
DG12 6PT
DG12 6PU
DG12 6PW
DG12 6PX
DG12 6PY
DG12 6PZ
DG12 6QA
DG12 6QB
DG12 6QD
DG12 6QE
DG12 6QF
DG12 6QG
DG12 6QH
DG12 6QJ
DG12 6QN
DG12 6QP
DG12 6QQ
DG12 6QR
DG12 6QS
DG12 6QT
DG12 6QU
DG12 6QW
DG12 6QX
DG12 6QY
DG12 6QZ
DG12 6RA
DG12 6RB
DG12 6RD
DG12 6RF
DG12 6RG
DG12 6RH
DG12 6RJ
DG12 6RL
DG13 0BQ
DG13 0DG
DG13 0DH
DG13 0DJ
DG13 0DL
DG13 0DN
DG13 0DP
DG13 0DQ
DG13 0DR
DG13 0DS
DG13 0DT
DG13 0DU
DG13 0DW
DG13 0DX
DG13 0DY
DG13 0DZ
DG13 0EA
DG13 0EB
DG13 0EE
DG13 0EF
DG13 0EG
DG13 0EH
DG13 0EJ
DG13 0EN
DG12 6SX
DG12 6SY
DG12 6SZ
DG12 6TB
DG12 6TD
DG12 6TE
DG12 6TF
DG12 6TG
DG12 6TH
DG12 6TJ
DG12 6TL
DG12 6TN
DG12 6TP
DG12 6TQ
DG12 6TR
DG12 6TS
DG12 6TT
DG12 6TU
DG12 6TW
DG12 6TX
DG12 6TY
DG12 6YG
DG12 6YL
DG12 9AB
DG12 9AD
DG13 0AA
DG13 0AB
DG13 0AD
DG13 0AE
DG13 0AF
DG13 0AG
DG13 0AH
DG13 0AJ
DG13 0AL
DG13 0AN
DG13 0AP
DG13 0AQ
DG13 0AR
DG13 0AS
DG13 0AT
DG13 0AU
DG13 0AW
DG13 0AX
DG13 0AY
DG13 0AZ
DG13 0BA
DG13 0BB
DG13 0BD
DG13 0BE
DG13 0BF
DG13 0BG
DG13 0BH
DG13 0BJ
DG13 0BL
DG13 0BN
DG13 0LU
DG13 0LW
DG13 0ND
DG13 0NE
DG13 0NF
DG13 0NG
DG13 0NH
DG13 0NJ
DG13 0NL
DG13 0NN
DG13 0NQ
DG13 0NW
DG13 0NX
DG13 0NY
DG13 0NZ
DG13 0PA
DG13 0PB
DG13 0PD
DG13 0PE
DG13 0PF
DG13 0PG
DG13 0PJ
DG13 0PL
DG13 0PN
DG13 0PP
DG13 0PQ
DG13 0PR
DG13 0PS
DG13 0PT
DG13 0PU
DG13 0PW
DG13 0PX
DG13 0PZ
DG13 0QF
DG13 0QG
DG13 0QH
DG16 5EB
DG16 5ED
DG16 5EE
DG16 5EF
DG16 5EG
DG16 5EH
DG16 5EJ
DG16 5EL
DG16 5EN
DG16 5EP
DG16 5EQ
DG16 5ER
DG16 5ES
DG16 5ET
DG16 5EU
DG16 5EW
DG16 5EX
DG16 5EY
DG16 5EZ
DG16 5FA
DG16 5FB
DG16 5FD
DG16 5FE
DG16 5FF
DG16 5FZ
DG16 5GA
DG16 5GD
DG16 5GG
DG16 5HA
DG16 5HB
DG13 0QJ
DG13 0QL
DG13 0QN
DG13 0QP
DG13 0QQ
DG13 0QR
DG13 0QS
DG13 0EP
DG13 0EQ
DG13 0ES
DG13 0ET
DG13 0HG
DG13 0HH
DG13 0HJ
DG13 0HL
DG13 0HN
DG13 0HP
DG13 0HQ
DG13 0HR
DG13 0HS
DG13 0HT
DG13 0HU
DG13 0HW
DG13 0HX
DG13 0JD
DG13 0JE
DG13 0JF
DG13 0JG
DG13 0JH
DG13 0JL
DG13 0JN
DG13 0JP
DG13 0JQ
DG13 0JR
DG13 0JS
DG13 0JT
DG13 0JW
DG13 0JX
DG13 0JY
DG13 0JZ
DG13 0LB
DG13 0LH
DG13 0LJ
DG13 0LL
DG13 0LN
DG13 0LP
DG13 0LR
DG13 0LS
DG13 0LT
DG13 0QT
DG13 0QW
DG14 0RA
DG14 0RD
DG14 0RE
DG14 0RF
DG14 0RL
DG14 0RN
DG14 0RP
DG14 0RQ
DG14 0RR
DG14 0RS
DG14 0RT
DG14 0RW
DG14 0RX
DG14 0RY
DG14 0RZ
DG14 0SA
DG14 0SB
DG14 0SD
DG14 0SE
DG14 0SF
DG14 0SG
DG14 0SH
DG14 0SQ
DG14 0SY
DG14 0SZ
DG14 0TA
DG14 0TB
DG14 0TD
DG14 0TE
DG14 0TF
DG14 0TG
DG14 0TH
DG14 0TJ
DG14 0TL
DG14 0TN
DG14 0TP
DG14 0TQ
DG14 0TR
DG14 0TS
DG14 0TT
DG14 0TU
DG14 0TW
DG14 0TX
DG14 0UP
DG14 0UR
DG14 0UT
DG14 0UU
DG14 0UX
DG14 0UY
DG14 0WX
DG14 0XA
DG14 0XB
DG14 0XD
DG14 0XE
DG14 0XF
DG14 0XH
DG14 0XJ
DG14 0XL
DG14 0XN
DG14 0XP
DG14 0XQ
DG14 0XR
DG14 0XS
DG14 0XT
DG14 0XU
DG14 0XW
DG14 0XX
DG14 0XY
DG14 0XZ
DG14 0YA
DG14 0YB
DG14 0YD
DG14 0YE
DG14 0YF
DG14 0YG
DG16 5AA
DG16 5AB
DG16 5AD
DG16 5AE
DG16 5AF
DG16 5AG
DG16 5AH
DG16 5AJ
DG16 5AL
DG16 5AN
DG16 5AP
DG16 5AQ
DG16 5AR
DG16 5AS
DG16 5AT
DG16 5AU
DG16 5AW
DG16 5AX
DG16 5AY
DG16 5AZ
DG16 5BA
DG16 5BB
DG16 5BD
DG16 5BE
DG16 5BF
DG16 5BG
DG16 5BH
DG16 5BJ
DG16 5BL
DG16 5BN
DG16 5BP
DG16 5BQ
DG16 5BS
DG16 5BT
DG16 5BU
DG16 5BW
DG16 5BX
DG16 5BY
DG16 5BZ
DG16 5DA
DG16 5DB
DG16 5DD
DG16 5DE
DG16 5DF
DG16 5DG
DG16 5DH
DG16 5DJ
DG16 5DL
DG16 5DN
DG16 5DP
DG16 5DQ
DG16 5DR
DG16 5DS
DG16 5DT
DG16 5DU
DG16 5DW
DG16 5DX
DG16 5DY
DG16 5DZ
DG16 5EA
DG16 5HD
DG16 5HE
DG16 5HF
DG16 5HG
DG16 5HH
DG16 5HJ
DG16 5HL
DG16 5HN
DG16 5HP
DG16 5HQ
DG16 5HR
DG16 5HS
DG16 5HW
DG16 5HX
DG16 5HY
DG16 5JD
DG16 5JE
DG16 5JF
DG16 5JG
DG16 5JH
DG16 5JJ
DG16 5JL
DG16 5JN
DG16 5JP
DG16 5JQ
DG16 5JR
DG16 5JS
DG16 5JT
DG16 5JU
DG16 5JW
DG16 5JX
DG16 5JY
DG16 5JZ
DG16 5LA
DG16 5LB
DG16 5LD
DG16 5LE
DG16 5LF
DG16 5LG
DG16 5LN
DG16 5LQ
DG16 5NA
DG16 5NB
DG16 5QA
DG16 5QB
DG16 5UP
DG16 5YL
DG2 0AA
DG2 0AB
DG2 0AD
DG2 0AE
DG2 0AF
DG2 0AG
DG2 0AH
DG2 0AJ
DG2 0AL
DG2 0AN
DG2 0AP
DG2 0AQ
DG2 0AR
DG2 0AS
DG2 0AT
DG2 0AU
DG2 0AW
DG2 0AX
DG2 0AY
DG2 0AZ
DG2 0BA
DG2 0BB
DG2 0BD
DG2 0BE
DG2 0BF
DG2 0BG
DG2 0BH
DG2 0BQ
DG2 0BU
DG2 0BX
DG2 0BY
DG2 0BZ
DG2 0DA
DG2 0DB
DG2 0DD
DG2 0DE
DG2 0DF
DG2 0DG
DG2 0DH
DG2 0DJ
DG2 0DL
DG2 0DQ
DG2 0DS
DG2 0DY
DG2 0DZ
DG2 0EA
DG2 0EB
DG2 0ED
DG2 0EE
DG2 0EF
DG2 0EG
DG2 0EH
DG2 0EJ
DG2 0EL
DG2 0EP
DG2 0EQ
DG2 0ER
DG2 0GZ
DG2 0HA
DG2 0HB
DG2 0HD
DG2 0HE
DG2 0HF
DG2 0HG
DG2 0HH
DG2 0HJ
DG2 0HL
DG2 0HN
DG2 0HP
DG2 0HQ
DG2 0HR
DG2 0HS
DG2 0HT
DG2 0HU
DG2 0HW
DG2 0HX
DG2 0HY
DG2 0HZ
DG2 0JA
DG2 0JB
DG2 0JD
DG2 0JE
DG2 0JF
DG2 0JG
DG2 0JH
DG2 0JJ
DG2 0JL
DG2 0JN
DG2 0JP
DG2 0JQ
DG2 0JR
DG2 0JS
DG2 0JT
DG2 0JU
DG2 0JW
DG2 0JX
DG2 0JY
DG2 0JZ
DG2 0LA
DG2 0LB
DG2 0LD
DG2 0LE
DG2 0LF
DG2 0LG
DG2 0LL
DG2 0LP
DG2 0LQ
DG2 0LR
DG2 0LS
DG2 0LT
DG2 0LU
DG2 0LW
DG2 0LX
DG2 0LY
DG2 0LZ
DG2 0NB
DG2 0ND
DG2 0NE
DG2 0NF
DG2 0NG
DG2 0NH
DG2 0NJ
DG2 0NL
DG2 0NN
DG2 0NP
DG2 0NQ
DG2 0NR
DG2 0NU
DG2 0NW
DG2 0NZ
DG2 0PA
DG2 0PB
DG2 0PD
DG2 0PE
DG2 0PF
DG2 0PG
DG2 0PH
DG2 0PJ
DG2 0PL
DG2 0PN
DG2 0PP
DG2 0PQ
DG2 0PR
DG2 0PS
DG2 0PT
DG2 0PU
DG2 0PW
DG2 0PY
DG2 0PZ
DG2 0QA
DG2 0QB
DG2 0QD
DG2 0QE
DG2 0QF
DG2 0QG
DG2 0QH
DG2 0QJ
DG2 0QL
DG2 0QN
DG2 0QP
DG2 0QQ
DG2 0QR
DG2 0QT
DG2 0QU
DG2 0QX
DG2 0QY
DG2 0QZ
DG2 0RA
DG2 0RB
DG2 0RD
DG2 0RE
DG2 0RF
DG2 0RG
DG2 0RH
DG2 0RJ
DG2 0RL
DG2 0RP
DG2 0RQ
DG2 0RR
DG2 0RS
DG2 0RT
DG2 0RU
DG2 0RW
DG2 0RX
DG2 0RY
DG2 0RZ
DG2 0SA
DG2 0SB
DG2 0SD
DG2 0SE
DG2 0SF
DG2 0SG
DG2 0SN
DG2 0SP
DG2 0SR
DG2 0SS
DG2 0ST
DG2 0SU
DG2 0SW
DG2 0SX
DG2 0SY
DG2 0SZ
DG2 0TA
DG2 0TB
DG2 0TD
DG2 0TE
DG2 0TF
DG2 0TG
DG2 0TJ
DG2 0TL
DG2 0TN
DG2 0TP
DG2 0TQ
DG2 0TR
DG2 0TS
DG2 0UA
DG2 0UB
DG2 0UD
DG2 0UE
DG2 0UF
DG2 0UG
DG2 0UH
DG2 0UJ
DG2 0UL
DG2 0UN
DG2 0UP
DG2 0UR
DG2 0UT
DG2 0UU
DG2 0UW
DG2 0UX
DG2 0UY
DG2 0UZ
DG2 0XA
DG2 0XE
DG2 0XG
DG2 0XH
DG2 0XJ
DG2 0XL
DG2 0XN
DG2 0XP
DG2 0XQ
DG2 0XR
DG2 0XS
DG2 0XT
DG2 0XU
DG2 0XW
DG2 0XX
DG2 0XY
DG2 0XZ
DG2 0YA
DG2 0YB
DG2 0YD
DG2 0YJ
DG2 7AA
DG2 7AB
DG2 7AD
DG2 7AE
DG2 7AF
DG2 7AG
DG2 7AH
DG2 7AJ
DG2 7AL
DG2 7AN
DG2 7AP
DG2 7AQ
DG2 7AR
DG2 7AS
DG2 7AT
DG2 7AU
DG2 7AW
DG2 7AX
DG2 7AY
DG2 7AZ
DG2 7BA
DG2 7BB
DG2 7BD
DG2 7BE
DG2 7BF
DG2 7BG
DG2 7BH
DG2 7BJ
DG2 7BL
DG2 7BP
DG2 7DA
DG2 7DB
DG2 7DD
DG2 7DE
DG2 7DF
DG2 7DG
DG2 7DH
DG2 7DJ
DG2 7DL
DG2 7DN
DG2 7DR
DG2 7DS
DG2 7DT
DG2 7DU
DG2 7DW
DG2 7DX
DG2 7DY
DG2 7DZ
DG2 7EA
DG2 7EB
DG2 7ED
DG2 7EE
DG2 7EF
DG2 7EH
DG2 7EJ
DG2 7EL
DG2 7EN
DG2 7EP
DG2 7EQ
DG2 7ER
DG2 7ES
DG2 7ET
DG2 7EU
DG2 7EW
DG2 7EX
DG2 7EY
DG2 7EZ
DG2 7FB
DG2 7FD
DG2 7FE
DG2 7GZ
DG2 7HA
DG2 7HB
DG2 7HE
DG2 7HF
DG2 7HG
DG2 7HH
DG2 7HJ
DG2 7HL
DG2 7HN
DG2 7HQ
DG2 7HR
DG2 7HS
DG2 7HT
DG2 7HU
DG2 7HX
DG2 7HY
DG2 7HZ
DG2 7JJ
DG2 7JL
DG2 7JN
DG2 7JP
DG2 7JR
DG2 7JS
DG2 7JT
DG2 7JU
DG2 7JW
DG2 7JX
DG2 7JY
DG2 7JZ
DG2 7LA
DG2 7LB
DG2 7LD
DG2 7LE
DG2 7LG
DG2 7LH
DG2 7LJ
DG2 7LP
DG2 7LQ
DG2 7LR
DG2 7LS
DG2 7LT
DG2 7LU
DG2 7LW
DG2 7LZ
DG2 7NA
DG2 7NB
DG2 7ND
DG2 7NE
DG2 7NG
DG2 7NS
DG2 7NT
DG2 7NU
DG2 7NX
DG2 7NY
DG2 7NZ
DG2 7PA
DG2 7PD
DG2 7PE
DG2 7PF
DG2 7PH
DG2 7PJ
DG2 7PL
DG2 7PN
DG2 7PQ
DG2 7PT
DG2 7PU
DG2 7PW
DG2 7PX
DG2 7PY
DG2 7PZ
DG2 7QA
DG2 7QB
DG2 7QD
DG2 7QE
DG2 7QF
DG2 7QG
DG2 7QH
DG2 7QJ
DG2 7QQ
DG2 7QU
DG2 7QX
DG2 7QY
DG2 7QZ
DG2 7RA
DG2 7RD
DG2 7RE
DG2 7RF
DG2 7RG
DG2 7RH
DG2 7RJ
DG2 7RL
DG2 7RN
DG2 7RP
DG2 7RQ
DG2 7RR
DG2 7RS
DG2 7RT
DG2 7RU
DG2 7RW
DG2 7RX
DG2 7RY
DG2 7RZ
DG2 7SA
DG2 7SB
DG2 7SD
DG2 7SE
DG2 7SF
DG2 7SL
DG2 7SN
DG2 7SP
DG2 7SQ
DG2 7SR
DG2 7SS
DG2 7ST
DG2 7SU
DG2 7SW
DG2 7TH
DG2 7TJ
DG2 7TL
DG2 7TN
DG2 8AA
DG2 8AB
DG2 8AD
DG2 8AE
DG2 8AF
DG2 8AG
DG2 8AH
DG2 8AJ
DG2 8AL
DG2 8AN
DG2 8AP
DG2 8AQ
DG2 8AR
DG2 8AS
DG2 8AT
DG2 8AU
DG2 8AW
DG2 8AX
DG2 8AY
DG2 8AZ
DG2 8BB
DG2 8BD
DG2 8BE
DG2 8BG
DG2 8BH
DG2 8BJ
DG2 8BL
DG2 8BN
DG2 8BP
DG2 8BQ
DG2 8BU
DG2 8BW
DG2 8BX
DG2 8BY
DG2 8BZ
DG2 8DA
DG2 8DB
DG2 8DD
DG2 8DE
DG2 8DF
DG2 8DG
DG2 8DH
DG2 8DJ
DG2 8DL
DG2 8DN
DG2 8DP
DG2 8DQ
DG2 8DR
DG2 8DS
DG2 8DT
DG2 8DU
DG2 8DW
DG2 8DX
DG2 8DY
DG2 8DZ
DG2 8EA
DG2 8EB
DG2 8ED
DG2 8EE
DG2 8EF
DG2 8EG
DG2 8EH
DG2 8EJ
DG2 8EN
DG2 8EP
DG2 8EQ
DG2 8ER
DG2 8ES
DG2 8ET
DG2 8EU
DG2 8EW
DG2 8EX
DG2 8EY
DG2 8EZ
DG2 8HA
DG2 8HB
DG2 8HD
DG2 8HE
DG2 8HF
DG2 8HG
DG2 8HH
DG2 8HJ
DG2 8HL
DG2 8HN
DG2 8HP
DG2 8HQ
DG2 8HW
DG2 8HY
DG2 8HZ
DG2 8JA
DG2 8JB
DG2 8JD
DG2 8JE
DG2 8JF
DG2 8JG
DG2 8JH
DG2 8JJ
DG2 8JL
DG2 8JN
DG2 8JP
DG2 8JQ
DG2 8JR
DG2 8JS
DG2 8JT
DG2 8JU
DG2 8JW
DG2 8JX
DG2 8JY
DG2 8JZ
DG2 8LA
DG2 8LB
DG2 8LD
DG2 8LE
DG2 8LF
DG2 8LG
DG2 8LH
DG2 8LJ
DG2 8LL
DG2 8LN
DG2 8LP
DG2 8LQ
DG2 8LR
DG2 8LS
DG2 8LT
DG2 8LW
DG2 8LX
DG2 8LY
DG2 8LZ
DG2 8NA
DG2 8NB
DG2 8ND
DG2 8NF
DG2 8NG
DG2 8NH
DG2 8NJ
DG2 8NL
DG2 8NN
DG2 8NP
DG2 8NQ
DG2 8NR
DG2 8NS
DG2 8NU
DG2 8NW
DG2 8NX
DG2 8NY
DG2 8NZ
DG2 8PA
DG2 8PB
DG2 8PD
DG2 8PE
DG2 8PF
DG2 8PH
DG2 8PJ
DG2 8PN
DG2 8PP
DG2 8PR
DG2 8PS
DG2 8PT
DG2 8PU
DG2 8PX
DG2 8PY
DG2 8PZ
DG2 8QA
DG2 8QB
DG2 8QD
DG2 8QE
DG2 8QF
DG2 8QG
DG2 8QH
DG2 8QL
DG2 8QN
DG2 8QP
DG2 8QQ
DG2 8QR
DG2 8QS
DG2 8QT
DG2 8QU
DG2 8QW
DG2 8QX
DG2 8QY
DG2 8RA
DG2 8RB
DG2 8RD
DG2 8RE
DG2 8RF
DG2 8RG
DG2 8RH
DG2 8RJ
DG2 8RL
DG2 8RN
DG2 8RP
DG2 8RQ
DG2 8RS
DG2 8RT
DG2 8RU
DG2 8RW
DG2 8YA
DG2 8YD
DG2 9AA
DG2 9AB
DG2 9AE
DG2 9AF
DG2 9AG
DG2 9AH
DG2 9AL
DG2 9AN
DG2 9AP
DG2 9AQ
DG2 9AR
DG2 9AS
DG2 9AU
DG2 9AX
DG2 9AY
DG2 9AZ
DG2 9BA
DG2 9BD
DG2 9BE
DG2 9BF
DG2 9BG
DG2 9BH
DG2 9BN
DG2 9BP
DG2 9BQ
DG2 9BS
DG2 9BT
DG2 9BU
DG2 9BW
DG2 9BX
DG2 9BY
DG2 9BZ
DG2 9DB
DG2 9DE
DG2 9DL
DG2 9DN
DG2 9DP
DG2 9DR
DG2 9DS
DG2 9DT
DG2 9DW
DG2 9DX
DG2 9DZ
DG2 9EA
DG2 9EB
DG2 9ED
DG2 9EE
DG2 9EF
DG2 9EG
DG2 9EH
DG2 9EJ
DG2 9EL
DG2 9EN
DG2 9EP
DG2 9ER
DG2 9ES
DG2 9ET
DG2 9EU
DG2 9EW
DG2 9EX
DG2 9EY
DG2 9EZ
DG2 9FB
DG2 9FD
DG2 9FE
DG2 9FF
DG2 9FG
DG2 9FH
DG2 9FJ
DG2 9FL
DG2 9FN
DG3 4DL
DG3 4DQ
DG3 4DS
DG3 4DU
DG3 4DX
DG3 4DZ
DG3 4EA
DG3 4EB
DG3 4ED
DG3 4EE
DG3 4EF
DG3 4EG
DG3 4EH
DG3 4EJ
DG3 4EN
DG3 4EQ
DG3 4ER
DG3 4ES
DG3 4EW
DG3 4GZ
DG3 4HA
DG3 4HB
DG3 4HD
DG3 4HE
DG3 4HF
DG3 4HH
DG3 4HJ
DG3 4HL
DG3 4HN
DG3 4HP
DG3 4HQ
DG3 4HR
DG3 4HS
DG3 4HT
DG3 4HU
DG3 4HW
DG3 4HX
DG3 4HZ
DG3 4JA
DG3 4JB
DG3 4JD
DG3 4JE
DG2 9FP
DG2 9FQ
DG2 9FR
DG2 9HA
DG2 9HB
DG2 9HE
DG2 9HF
DG2 9HG
DG2 9HH
DG2 9HJ
DG2 9HL
DG2 9HP
DG2 9HQ
DG2 9HR
DG2 9HS
DG2 9HT
DG2 9HU
DG2 9HW
DG2 9HX
DG2 9HY
DG2 9HZ
DG2 9JA
DG2 9JB
DG2 9JD
DG2 9JE
DG2 9JF
DG2 9JG
DG2 9JH
DG2 9JJ
DG2 9JL
DG2 9JN
DG2 9JP
DG2 9JQ
DG2 9JR
DG2 9JS
DG2 9JT
DG2 9JU
DG2 9JW
DG2 9JX
DG2 9JY
DG2 9LL
DG2 9LR
DG2 9LS
DG2 9LT
DG2 9LU
DG2 9LX
DG2 9NA
DG2 9NB
DG2 9ND
DG2 9NE
DG2 9NF
DG2 9NG
DG2 9NH
DG2 9NJ
DG2 9NL
DG2 9NN
DG2 9NP
DG2 9NQ
DG2 9NR
DG2 9NS
DG2 9NT
DG2 9NU
DG2 9NW
DG2 9NX
DG2 9NY
DG2 9NZ
DG2 9PA
DG2 9PB
DG2 9PD
DG2 9PE
DG2 9PF
DG2 9PG
DG2 9PN
DG2 9PP
DG2 9PR
DG2 9PS
DG2 9PT
DG2 9PU
DG2 9PW
DG2 9PX
DG2 9PY
DG2 9PZ
DG2 9QA
DG2 9QB
DG2 9QD
DG2 9QE
DG2 9QF
DG2 9QG
DG2 9QH
DG2 9QJ
DG2 9QL
DG2 9QN
DG2 9QP
DG2 9QQ
DG2 9QR
DG2 9QU
DG2 9QW
DG2 9RA
DG2 9RD
DG2 9RF
DG2 9RH
DG2 9RJ
DG2 9RL
DG2 9RN
DG2 9RP
DG2 9RQ
DG2 9RR
DG2 9RS
DG2 9RT
DG2 9RU
DG2 9RW
DG2 9RX
DG2 9RY
DG2 9RZ
DG2 9SA
DG2 9SB
DG2 9SD
DG2 9SE
DG2 9SF
DG2 9SH
DG2 9SJ
DG2 9SL
DG2 9SN
DG2 9SQ
DG2 9SR
DG2 9SS
DG2 9ST
DG2 9TA
DG2 9TG
DG2 9TH
DG2 9TJ
DG2 9TN
DG2 9TP
DG2 9TQ
DG2 9TR
DG2 9TS
DG2 9TT
DG2 9TU
DG2 9TX
DG2 9TY
DG2 9TZ
DG2 9UA
DG2 9UB
DG2 9UD
DG2 9UE
DG2 9UF
DG2 9UG
DG2 9UQ
DG2 9UW
DG3 4AB
DG3 4AD
DG3 4AE
DG3 4AF
DG3 4AG
DG3 4AH
DG3 4AJ
DG3 4AL
DG3 4AN
DG3 4AP
DG3 4AQ
DG3 4AR
DG3 4AS
DG3 4AT
DG3 4AU
DG3 4AW
DG3 4AX
DG3 4BA
DG3 4BD
DG3 4BE
DG3 4BG
DG3 4BH
DG3 4BJ
DG3 4BL
DG3 4BN
DG3 4BP
DG3 4BQ
DG3 4BS
DG3 4BT
DG3 4BU
DG3 4BW
DG3 4BX
DG3 4BY
DG3 4BZ
DG3 4DA
DG3 4DB
DG3 4DD
DG3 4DE
DG3 4DF
DG3 4DG
DG3 4DH
DG3 4DJ
DG3 4JL
DG3 4JN
DG3 4JS
DG3 4JT
DG3 4JU
DG3 4JW
DG3 4JX
DG3 4JY
DG3 4JZ
DG3 4LA
DG3 4LB
DG3 4LD
DG3 4LE
DG3 4LF
DG3 4LN
DG3 4LP
DG3 4LR
DG3 4LU
DG3 4LX
DG3 4LZ
DG3 4NB
DG3 4ND
DG3 4NE
DG3 4NF
DG3 4NG
DG3 4NH
DG3 4NJ
DG3 4NL
DG3 4NN
DG3 4NQ
DG3 4NW
DG3 5AA
DG3 5AB
DG3 5AD
DG3 5AE
DG3 5AF
DG3 5AG
DG3 5AH
DG3 5AJ
DG3 5AL
DG3 5AN
DG3 5AP
DG3 5AQ
DG3 5AR
DG3 5AS
DG3 5AT
DG3 5AU
DG3 5AW
DG3 5AX
DG3 5AY
DG3 5AZ
DG3 5BA
DG3 5BB
DG3 5BD
DG3 5BE
DG3 5BF
DG3 5BG
DG3 5BH
DG3 5BJ
DG3 5BL
DG3 5BN
DG3 5BP
DG3 5BQ
DG3 5BS
DG3 5BT
DG3 5BU
DG3 5BW
DG3 5BX
DG3 5BY
DG3 5BZ
DG3 5DA
DG3 5DB
DG3 5DD
DG3 5DH
DG3 5DJ
DG3 5DL
DG3 5DN
DG3 5DP
DG3 5DR
DG3 5DS
DG3 5DT
DG3 5DW
DG3 5DX
DG3 5DY
DG3 5DZ
DG3 5EA
DG3 5EB
DG3 5ED
DG3 5EE
DG3 5EF
DG3 5EG
DG3 5EH
DG3 5EJ
DG3 5EL
DG3 5EN
DG3 5EQ
DG3 5ER
DG3 5EW
DG3 5EX
DG3 5EY
DG3 5EZ
DG3 5HA
DG3 5HB
DG3 5HD
DG3 5HE
DG3 5HG
DG3 5HH
DG3 5HJ
DG3 5HL
DG3 5HN
DG3 5HP
DG3 5HQ
DG3 5HR
DG3 5HT
DG3 5HU
DG3 5HW
DG3 5HX
DG3 5HY
DG3 5HZ
DG3 5JA
DG3 5JD
DG3 5JE
DG3 5JF
DG3 5JG
DG3 5JH
DG3 5JJ
DG3 5JL
DG3 5JP
DG3 5JQ
DG3 5JR
DG3 5JS
DG3 5JT
DG3 5JU
DG3 5JW
DG3 5JX
DG3 5JY
DG3 5JZ
DG3 5LA
DG3 5LH
DG3 5LJ
DG3 5LL
DG3 5LN
DG3 5LP
DG3 5LS
DG3 5LU
DG3 5LW
DG3 5LX
DG3 5LY
DG3 5LZ
DG3 5NA
DG3 5NB
DG3 5ND
DG3 5NE
DG3 5NF
DG3 5NG
DG3 5NH
DG3 5NJ
DG3 5NL
DG3 5NN
DG3 5NP
DG3 5NQ
DG3 5NR
DG3 5NS
DG3 5NT
DG3 5NU
DG3 5NW
DG3 5NX
DG3 5NY
DG3 5NZ
DG3 5PA
DG3 5PB
DG3 5PD
DG3 5PG
DG3 5PJ
DG3 5PL
DG3 5WA
DG3 5WX
DG4 6AA
DG4 6AB
DG4 6AD
DG4 6AE
DG4 6AF
DG4 6AG
DG4 6AH
DG4 6AJ
DG4 6AL
DG4 6AN
DG4 6AP
DG4 6AQ
DG4 6AR
DG4 6AS
DG4 6AT
DG4 6AU
DG4 6AW
DG4 6AX
DG4 6AY
DG4 6AZ
DG4 6BA
DG4 6BB
DG4 6BD
DG4 6BF
DG4 6BG
DG4 6BH
DG4 6BJ
DG4 6BL
DG4 6BN
DG4 6BP
DG4 6BQ
DG4 6BS
DG4 6BT
DG4 6BU
DG4 6BW
DG4 6BX
DG4 6BY
DG4 6BZ
DG4 6DA
DG4 6DB
DG4 6DE
DG4 6DF
DG4 6DG
DG4 6DH
DG4 6DJ
DG4 6DL
DG4 6DN
DG4 6DP
DG4 6DQ
DG4 6DR
DG4 6DS
DG4 6DT
DG4 6DU
DG4 6DW
DG4 6DX
DG4 6DY
DG4 6DZ
DG4 6EB
DG4 6EF
DG4 6EH
DG4 6EL
DG4 6EN
DG4 6EP
DG4 6ER
DG4 6ES
DG4 6ET
DG4 6EU
DG4 6EW
DG4 6EX
DG4 6EY
DG4 6EZ
DG4 6HA
DG4 6HB
DG4 6HD
DG4 6HE
DG4 6HL
DG4 6HN
DG4 6HR
DG4 6HS
DG4 6HT
DG4 6HU
DG4 6HW
DG4 6HX
DG4 6HY
DG4 6HZ
DG4 6JA
DG4 6JB
DG4 6JD
DG4 6JJ
DG4 6JL
DG4 6JN
DG4 6JP
DG4 6JS
DG4 6JT
DG4 6JU
DG4 6JW
DG4 6JX
DG4 6JY
DG4 6JZ
DG4 6LA
DG4 6LB
DG4 6LD
DG4 6LE
DG4 6LF
DG4 6LG
DG4 6LJ
DG4 6LL
DG4 6LQ
DG4 6LS
DG4 6LU
DG4 6LX
DG4 6LY
DG4 6LZ
DG4 6NA
DG4 6NB
DG4 6ND
DG4 6NE
DG4 6NJ
DG4 6NL
DG4 6NN
DG4 6NP
DG4 6NQ
DG4 6NR
DG4 6NS
DG4 6NU
DG4 6NW
DG4 6NX
DG4 6NY
DG4 6PH
DG4 6PL
DG4 6PN
DG4 6PW
DG4 6PZ
DG4 6QA
DG4 6QB
DG4 6QH
DG4 6QJ
DG4 6QL
DG4 6QN
DG4 6QP
DG4 6QQ
DG4 6QR
DG4 6QS
DG4 6QT
DG4 6QU
DG4 6QW
DG4 6QX
DG4 6QY
DG4 6QZ
DG4 6RA
DG4 6RB
DG4 6RP
DG4 6RR
DG4 6RS
DG4 6RT
DG4 6RX
DG4 6RY
DG4 6RZ
DG4 6SA
DG4 6SB
DG4 6SD
DG4 6SE
DG4 6SG
DG4 6SH
DG4 6SJ
DG4 6SL
DG4 6SN
DG4 6SR
DG4 6SS
DG4 6ST
DG4 6SU
DG4 6SX
DG4 6WW
DG5 4AA
DG5 4AB
DG5 4AD
DG5 4AE
DG5 4AF
DG5 4AG
DG5 4AH
DG5 4AJ
DG5 4AL
DG5 4AN
DG5 4AP
DG5 4AQ
DG5 4AR
DG5 4AS
DG5 4AT
DG5 4AU
DG5 4AW
DG5 4AX
DG5 4AY
DG5 4AZ
DG5 4BA
DG5 4BB
DG5 4BE
DG5 4BF
DG5 4BG
DG5 4BH
DG5 4BJ
DG5 4BN
DG5 4BP
DG5 4BQ
DG5 4BS
DG5 4BT
DG5 4BU
DG5 4BW
DG5 4BX
DG5 4BY
DG5 4BZ
DG5 4DA
DG5 4DB
DG5 4DD
DG5 4DE
DG5 4DF
DG5 4DG
DG5 4DH
DG5 4DJ
DG5 4DL
DG5 4DN
DG5 4DP
DG5 4DQ
DG5 4DR
DG5 4DS
DG5 4DT
DG5 4DU
DG5 4DW
DG5 4DX
DG5 4DY
DG5 4DZ
DG5 4EA
DG5 4EB
DG5 4ED
DG5 4EE
DG5 4EF
DG5 4EG
DG5 4EH
DG5 4EJ
DG5 4EL
DG5 4EN
DG5 4EP
DG5 4EQ
DG5 4ER
DG5 4ES
DG5 4ET
DG5 4EU
DG5 4EW
DG5 4EX
DG5 4EY
DG5 4EZ
DG5 4FA
DG5 4FB
DG5 4GA
DG5 4GH
DG5 4GY
DG5 4GZ
DG5 4HA
DG5 4HB
DG5 4HD
DG5 4HE
DG5 4HF
DG5 4HG
DG5 4HH
DG5 4HJ
DG5 4HL
DG5 4HN
DG5 4HP
DG5 4HQ
DG5 4HR
DG5 4HS
DG5 4HT
DG5 4HU
DG5 4HW
DG5 4HX
DG5 4HY
DG5 4HZ
DG5 4JA
DG5 4JB
DG5 4JD
DG5 4JE
DG5 4JF
DG5 4JG
DG5 4JH
DG5 4JJ
DG5 4JL
DG5 4JN
DG5 4JP
DG5 4JR
DG5 4JS
DG5 4JT
DG5 4JU
DG5 4JW
DG5 4JX
DG5 4JY
DG5 4JZ
DG5 4LA
DG5 4LB
DG5 4LD
DG5 4LE
DG5 4LF
DG5 4LG
DG5 4LH
DG5 4LJ
DG5 4LL
DG5 4LN
DG5 4LP
DG5 4LQ
DG5 4LR
DG5 4LS
DG5 4LT
DG5 4LU
DG5 4LW
DG5 4LX
DG5 4LY
DG5 4LZ
DG5 4NA
DG5 4NB
DG5 4ND
DG5 4NE
DG5 4NF
DG5 4NH
DG5 4NJ
DG5 4NL
DG5 4NN
DG5 4NP
DG5 4NQ
DG5 4NR
DG5 4NS
DG5 4NU
DG5 4NW
DG5 4NX
DG5 4NY
DG5 4NZ
DG5 4PA
DG5 4PB
DG5 4PD
DG5 4PE
DG5 4PF
DG5 4PG
DG5 4PH
DG5 4PJ
DG5 4PL
DG5 4PN
DG5 4PP
DG5 4PQ
DG5 4PR
DG5 4PT
DG5 4PU
DG5 4PW
DG5 4PX
DG5 4PY
DG5 4PZ
DG5 4QA
DG5 4QB
DG5 4QD
DG5 4QE
DG5 4QF
DG5 4QG
DG5 4QH
DG5 4QJ
DG5 4QL
DG5 4QN
DG5 4QP
DG5 4QQ
DG5 4QR
DG5 4QS
DG5 4QT
DG5 4QU
DG5 4QW
DG5 4QX
DG5 4QZ
DG5 4RA
DG5 4RB
DG5 4RD
DG5 4RE
DG5 4RF
DG5 4RG
DG5 4RH
DG5 4RJ
DG5 4TA
DG5 4TB
DG5 4UN
DG5 4UP
DG5 4UQ
DG5 4WA
DG5 4XL
DG6 4AA
DG6 4AD
DG6 4AE
DG6 4AF
DG6 4AH
DG6 4AJ
DG6 4AL
DG6 4AN
DG6 4AP
DG6 4AQ
DG6 4AR
DG6 4AS
DG6 4AT
DG6 4AU
DG6 4AW
DG6 4AX
DG6 4AZ
DG6 4BA
DG6 4BB
DG6 4BD
DG6 4BE
DG6 4BF
DG6 4BG
DG6 4BH
DG6 4BJ
DG6 4BL
DG6 4BN
DG6 4BP
DG6 4BQ
DG6 4BS
DG6 4BT
DG6 4BU
DG6 4BW
DG6 4BX
DG6 4BY
DG6 4BZ
DG6 4DA
DG6 4DB
DG6 4DE
DG6 4DH
DG6 4DJ
DG6 4DL
DG6 4DN
DG6 4DP
DG6 4DQ
DG6 4DR
DG6 4DS
DG6 4DT
DG6 4DU
DG6 4DW
DG6 4DX
DG6 4DY
DG6 4DZ
DG6 4EA
DG6 4EB
DG6 4ED
DG6 4EF
DG6 4EG
DG6 4EH
DG6 4EJ
DG6 4EL
DG6 4EN
DG6 4EP
DG6 4EQ
DG6 4ER
DG6 4ES
DG6 4EU
DG6 4EW
DG6 4EX
DG6 4EY
DG6 4EZ
DG6 4HA
DG6 4HB
DG6 4HD
DG6 4HE
DG6 4HF
DG6 4HG
DG6 4HH
DG6 4HJ
DG6 4HL
DG6 4HN
DG6 4HP
DG6 4HQ
DG6 4HR
DG6 4HS
DG6 4HT
DG6 4HU
DG6 4HW
DG6 4HX
DG6 4HY
DG6 4HZ
DG6 4JA
DG6 4JB
DG6 4JD
DG6 4JE
DG6 4JF
DG6 4JG
DG6 4JH
DG6 4JJ
DG6 4JL
DG6 4JN
DG6 4JP
DG6 4JQ
DG6 4JR
DG6 4JS
DG6 4JT
DG6 4JU
DG6 4JW
DG6 4JX
DG6 4JY
DG6 4JZ
DG6 4LA
DG6 4LB
DG6 4LD
DG6 4LE
DG6 4LF
DG6 4LG
DG6 4LH
DG6 4LJ
DG6 4LL
DG6 4LP
DG6 4LQ
DG6 4LR
DG6 4LS
DG6 4LT
DG6 4LU
DG6 4LW
DG6 4LX
DG6 4LY
DG6 4LZ
DG6 4NA
DG6 4NB
DG6 4ND
DG6 4NE
DG6 4NF
DG6 4NG
DG6 4NH
DG6 4NJ
DG6 4NL
DG6 4NN
DG6 4NP
DG6 4NQ
DG6 4NR
DG6 4NS
DG6 4NT
DG6 4NU
DG6 4NW
DG6 4NX
DG6 4NY
DG6 4NZ
DG6 4PB
DG6 4PD
DG6 4PE
DG6 4PF
DG6 4PG
DG6 4PH
DG6 4PJ
DG6 4PN
DG6 4PP
DG6 4PQ
DG6 4PR
DG6 4PS
DG6 4PT
DG6 4PU
DG6 4PW
DG6 4PX
DG6 4QA
DG6 4QB
DG6 4QD
DG6 4QE
DG6 4QF
DG6 4QG
DG6 4QH
DG6 4QJ
DG6 4QN
DG6 4QP
DG6 4QQ
DG6 4QR
DG6 4QS
DG6 4QT
DG6 4QU
DG6 4QW
DG6 4QY
DG6 4QZ
DG6 4RA
DG6 4RB
DG6 4RD
DG6 4RL
DG6 4RN
DG6 4RP
DG6 4RR
DG6 4RS
DG6 4RT
DG6 4RU
DG6 4RW
DG6 4RX
DG6 4SA
DG6 4SB
DG6 4SD
DG6 4SE
DG6 4SF
DG6 4SG
DG6 4SH
DG6 4SJ
DG6 4SN
DG6 4SP
DG6 4SQ
DG6 4SR
DG6 4SS
DG6 4ST
DG6 4SU
DG6 4SW
DG6 4SX
DG6 4SY
DG6 4SZ
DG6 4TA
DG6 4TH
DG6 4TJ
DG6 4TL
DG6 4TN
DG6 4TP
DG6 4TQ
DG6 4TR
DG6 4TS
DG6 4TT
DG6 4TU
DG6 4TW
DG6 4TX
DG6 4TY
DG6 4TZ
DG6 4UA
DG6 4UB
DG6 4UD
DG6 4UE
DG6 4UG
DG6 4UR
DG6 4UT
DG6 4UU
DG6 4UY
DG6 4UZ
DG6 4WA
DG6 4WS
DG6 4XA
DG6 4XB
DG6 4XD
DG6 4XE
DG6 4XF
DG6 4XG
DG6 4XH
DG6 4XJ
DG6 4XL
DG6 4XN
DG6 4XP
DG6 4XQ
DG6 4XR
DG6 4XS
DG6 4XT
DG6 4XU
DG6 4XW
DG6 4XX
DG6 4XY
DG6 4YH
DG6 4YJ
DG6 4YL
DG7 1AA
DG7 1AB
DG7 1AD
DG7 1AE
DG7 1AF
DG7 1AG
DG7 1AH
DG7 1AJ
DG7 1AL
DG7 1AN
DG7 1AP
DG7 1AQ
DG7 1AR
DG7 1AT
DG7 1AU
DG7 1AX
DG7 1AY
DG7 1AZ
DG7 1BA
DG7 1BB
DG7 1BD
DG7 1BE
DG7 1BG
DG7 1BH
DG7 1BJ
DG7 1BL
DG7 1BN
DG7 1BP
DG7 1BQ
DG7 1BS
DG7 1BT
DG7 1BU
DG7 1BW
DG7 1BX
DG7 1BY
DG7 1BZ
DG7 1DA
DG7 1DB
DG7 1DD
DG7 1DE
DG7 1DG
DG7 1DH
DG7 1DJ
DG7 1DL
DG7 1DN
DG7 1DQ
DG7 1DS
DG7 1DT
DG7 1DU
DG7 1DW
DG7 1DX
DG7 1DZ
DG7 1EA
DG7 1EB
DG7 1ED
DG7 1EE
DG7 1EF
DG7 1EG
DG7 1EH
DG7 1EJ
DG7 1EL
DG7 1EN
DG7 1EP
DG7 1EQ
DG7 1ER
DG7 1ES
DG7 1ET
DG7 1EU
DG7 1EW
DG7 1EX
DG7 1EY
DG7 1EZ
DG7 1FB
DG7 1FD
DG7 1GA
DG7 1GB
DG7 1GD
DG7 1GE
DG7 1GF
DG7 1HA
DG7 1HB
DG7 1HD
DG7 1HE
DG7 1HF
DG7 1HG
DG7 1HH
DG7 1HJ
DG7 1HL
DG7 1HN
DG7 1HP
DG7 1HQ
DG7 1HR
DG7 1HS
DG7 1HU
DG7 1HX
DG7 1HY
DG7 1HZ
DG7 1JA
DG7 1JF
DG7 1JG
DG7 1JH
DG7 1JJ
DG7 1JL
DG7 1JN
DG7 1JP
DG7 1JQ
DG7 1JT
DG7 1JU
DG7 1JX
DG7 1JY
DG7 1JZ
DG7 1LA
DG7 1LB
DG7 1LD
DG7 1LE
DG7 1LG
DG7 1LH
DG7 1LJ
DG7 1LL
DG7 1LN
DG7 1LQ
DG7 1LU
DG7 1LW
DG7 1LX
DG7 1LZ
DG7 1NA
DG7 1NB
DG7 1NF
DG7 1NG
DG7 1NH
DG7 1NJ
DG7 1NL
DG7 1NN
DG7 1NP
DG7 1NQ
DG7 1NR
DG7 1NS
DG7 1NT
DG7 1NU
DG7 1NW
DG7 1NX
DG7 1NY
DG7 1NZ
DG7 1PA
DG7 1PB
DG7 1PD
DG7 1PE
DG7 1PF
DG7 1PG
DG7 1PH
DG7 1PL
DG7 1PN
DG7 1PP
DG7 1PQ
DG7 1PR
DG7 1PS
DG7 1PW
DG7 1QA
DG7 1QB
DG7 1QD
DG7 1QE
DG7 1QF
DG7 1QG
DG7 1QH
DG7 1QJ
DG7 1QL
DG7 1QN
DG7 1QP
DG7 1QQ
DG7 1QR
DG7 1QS
DG7 1QT
DG7 1QU
DG7 1QW
DG7 1QX
DG7 1QY
DG7 1QZ
DG7 1RB
DG7 1RD
DG7 1RE
DG7 1RF
DG7 1RG
DG7 1RH
DG7 1RJ
DG7 1RL
DG7 1RN
DG7 1RQ
DG7 1RR
DG7 1RS
DG7 1RT
DG7 1RU
DG7 1RW
DG7 1RX
DG7 1RY
DG7 1RZ
DG7 1SA
DG7 1SB
DG7 1SD
DG7 1SE
DG7 1SF
DG7 1SG
DG7 1SH
DG7 1SJ
DG7 1SL
DG7 1SN
DG7 1SP
DG7 1SQ
DG7 1SR
DG7 1SS
DG7 1ST
DG7 1SU
DG7 1SW
DG7 1SX
DG7 1SY
DG7 1SZ
DG7 1TA
DG7 1TB
DG7 1TD
DG7 1TH
DG7 1TJ
DG7 1TL
DG7 1TN
DG7 1TP
DG7 1TR
DG7 1TS
DG7 1TT
DG7 1TU
DG7 1TW
DG7 1TX
DG7 1TY
DG7 1TZ
DG7 1UA
DG7 1UB
DG7 1UE
DG7 1UF
DG7 1UG
DG7 1UQ
DG7 2AA
DG7 2AB
DG7 2AD
DG7 2AE
DG7 2AF
DG7 2AG
DG7 2AH
DG7 2AJ
DG7 2AL
DG7 2AN
DG7 2AP
DG7 2AQ
DG7 2AR
DG7 2AS
DG7 2AT
DG7 2AU
DG7 2AW
DG7 2AY
DG7 2AZ
DG7 2BA
DG7 2BB
DG7 2BD
DG7 2BE
DG7 2BG
DG7 2BH
DG7 2BJ
DG7 2BL
DG7 2BN
DG7 2BP
DG7 2BQ
DG7 2BS
DG7 2BW
DG7 2DA
DG7 2DB
DG7 2DD
DG7 2DE
DG7 2DF
DG7 2DG
DG7 2DH
DG7 2DJ
DG7 2DL
DG7 2DN
DG7 2DP
DG7 2DQ
DG7 2DR
DG7 2DS
DG7 2DT
DG7 2DU
DG7 2DW
DG7 2EA
DG7 2EB
DG7 2ED
DG7 2EE
DG7 2EF
DG7 2EG
DG7 2EH
DG7 2EJ
DG7 2EN
DG7 2EP
DG7 2EQ
DG7 2ER
DG7 2ES
DG7 2ET
DG7 2EU
DG7 2EW
DG7 2EX
DG7 2EY
DG7 2EZ
DG7 2FA
DG7 2FB
DG7 2HA
DG7 2HB
DG7 2HD
DG7 2HE
DG7 2HF
DG7 2HG
DG7 2HH
DG7 2HJ
DG7 2HL
DG7 2HP
DG7 2HQ
DG7 2HR
DG7 2HS
DG7 2HT
DG7 2HU
DG7 2HW
DG7 2HX
DG7 2HY
DG7 2HZ
DG7 2JA
DG7 2JB
DG7 2JD
DG7 2JE
DG7 2JF
DG7 2JG
DG7 2JH
DG7 2JJ
DG7 2JL
DG7 2JN
DG7 2JP
DG7 2JQ
DG7 2JR
DG7 2JS
DG7 2JT
DG7 2JU
DG7 2JW
DG7 2JX
DG7 2JY
DG7 2JZ
DG7 2LA
DG7 2LB
DG7 2LD
DG7 2LE
DG7 2LF
DG7 2LG
DG7 2LH
DG7 2LJ
DG7 2LL
DG7 2LN
DG7 2LP
DG7 2LQ
DG7 2LR
DG7 2LS
DG7 2LT
DG7 2LU
DG7 2LW
DG7 2LX
DG7 2LY
DG7 2LZ
DG7 2NA
DG7 2NB
DG7 2ND
DG7 2NE
DG7 2NF
DG7 2NG
DG7 2NJ
DG7 2NL
DG7 2NN
DG7 2NP
DG7 2NQ
DG7 2NR
DG7 2NS
DG7 2NT
DG7 2NU
DG7 2NX
DG7 2NY
DG7 2NZ
DG7 2PA
DG7 2PB
DG7 2PD
DG7 2PE
DG7 2PF
DG7 2PG
DG7 2PH
DG7 2PJ
DG7 2PL
DG7 2PN
DG7 2PP
DG7 2PQ
DG7 2PR
DG7 2PS
DG7 2PT
DG7 2PU
DG7 2PW
DG7 2PX
DG7 2PY
DG7 2PZ
DG7 2WB
DG7 3AA
DG7 3AB
DG7 3AD
DG7 3AE
DG7 3AF
DG7 3AG
DG7 3AH
DG7 3AJ
DG7 3AL
DG7 3AN
DG7 3AP
DG7 3AQ
DG7 3AR
DG7 3AS
DG7 3AT
DG7 3AU
DG7 3AW
DG7 3AX
DG7 3AY
DG7 3AZ
DG7 3BA
DG7 3BB
DG7 3BD
DG7 3BE
DG7 3BF
DG7 3BG
DG7 3BH
DG7 3BJ
DG7 3BL
DG7 3BN
DG7 3BQ
DG7 3BU
DG7 3BX
DG7 3BY
DG7 3BZ
DG7 3DA
DG7 3DB
DG7 3DD
DG7 3DE
DG7 3DF
DG7 3DG
DG7 3DH
DG7 3DJ
DG7 3DL
DG7 3DN
DG7 3DP
DG7 3DQ
DG7 3DR
DG7 3DS
DG7 3DT
DG7 3DU
DG7 3DW
DG7 3DX
DG7 3DY
DG7 3DZ
DG7 3EA
DG7 3EB
DG7 3ED
DG7 3EE
DG7 3EF
DG7 3EG
DG7 3EH
DG7 3EJ
DG7 3EL
DG7 3EN
DG7 3EP
DG7 3ER
DG7 3ES
DG7 3ET
DG7 3EU
DG7 3EW
DG7 3EX
DG7 3EY
DG7 3EZ
DG7 3HA
DG7 3HB
DG7 3HD
DG7 3HE
DG7 3HF
DG7 3HG
DG7 3HL
DG7 3HN
DG7 3HP
DG7 3HQ
DG7 3HR
DG7 3HS
DG7 3HT
DG7 3HU
DG7 3HW
DG7 3HX
DG7 3HY
DG7 3HZ
DG7 3JF
DG7 3JG
DG7 3JH
DG7 3JJ
DG7 3JL
DG7 3JN
DG7 3JQ
DG7 3JT
DG7 3JU
DG7 3JW
DG7 3JX
DG7 3JY
DG7 3JZ
DG7 3LA
DG7 3LB
DG7 3LD
DG7 3LE
DG7 3LF
DG7 3LG
DG7 3LH
DG7 3LJ
DG7 3LL
DG7 3LN
DG7 3LP
DG7 3LQ
DG7 3LR
DG7 3LS
DG7 3LT
DG7 3LU
DG7 3LW
DG7 3LX
DG7 3LY
DG7 3LZ
DG7 3NA
DG7 3NB
DG7 3ND
DG7 3NE
DG7 3NF
DG7 3NG
DG7 3NH
DG7 3NJ
DG7 3NL
DG7 3NN
DG7 3NP
DG7 3NQ
DG7 3NR
DG7 3NS
DG7 3NT
DG7 3NU
DG7 3NW
DG7 3NX
DG7 3NY
DG7 3NZ
DG7 3PA
DG7 3PB
DG7 3PD
DG7 3PE
DG7 3PG
DG7 3PH
DG7 3PJ
DG7 3PL
DG7 3PN
DG7 3PP
DG7 3PR
DG7 3PS
DG7 3PT
DG7 3PU
DG7 3PW
DG7 3PX
DG7 3PY
DG7 3PZ
DG7 3QA
DG7 3QB
DG7 3QD
DG7 3QE
DG7 3QF
DG7 3QG
DG7 3QH
DG7 3QJ
DG7 3QL
DG7 3QN
DG7 3QP
DG7 3QQ
DG7 3QR
DG7 3QS
DG7 3QW
DG8 0NL
DG8 0NN
DG8 0NP
DG8 0NR
DG8 0NS
DG8 0NT
DG8 0NU
DG8 0NW
DG8 0NX
DG8 0NY
DG8 0NZ
DG8 0PA
DG8 0PB
DG8 0PD
DG8 0PE
DG8 0PF
DG8 0PL
DG8 0PN
DG8 0PP
DG8 0PR
DG8 0PS
DG8 0PT
DG8 0PU
DG8 0PW
DG8 0PX
DG8 0PY
DG8 0PZ
DG8 0QA
DG8 0QB
DG8 0QD
DG8 0QE
DG8 0QF
DG8 0QG
DG8 0QH
DG8 0QJ
DG8 0QL
DG8 0QN
DG8 0QP
DG8 0QQ
DG8 0QR
DG8 0QW
DG8 0QY
DG8 0QZ
DG8 1AB
DG8 1AD
DG8 6AA
DG8 6AB
DG8 6AD
DG8 6AE
DG7 3RA
DG7 3RB
DG7 3RD
DG7 3RE
DG7 3RF
DG7 3RG
DG7 3RH
DG7 3RJ
DG7 3RL
DG7 3RN
DG7 3RP
DG7 3RQ
DG7 3RR
DG7 3RS
DG7 3RT
DG7 3RU
DG7 3RW
DG7 3RX
DG7 3RY
DG7 3RZ
DG7 3SA
DG7 3SB
DG7 3SD
DG7 3SE
DG7 3SF
DG7 3SG
DG7 3SH
DG7 3SL
DG7 3SN
DG7 3SP
DG7 3SQ
DG7 3SR
DG7 3SS
DG7 3ST
DG7 3SU
DG7 3SW
DG7 3SX
DG7 3SY
DG7 3SZ
DG7 3TA
DG7 3TB
DG7 3TD
DG7 3TE
DG7 3TF
DG7 3TG
DG7 3TH
DG7 3TJ
DG7 3TQ
DG7 3TR
DG7 3TS
DG7 3TT
DG7 3TU
DG7 3TX
DG7 3TY
DG7 3TZ
DG7 3UA
DG7 3UB
DG7 3UD
DG7 3UE
DG7 3UF
DG7 3UG
DG7 3UP
DG7 3UQ
DG7 3UR
DG7 3UT
DG7 3UU
DG7 3UW
DG7 3UX
DG7 3UY
DG7 3UZ
DG7 3WA
DG7 3XE
DG7 3XF
DG7 3XG
DG7 3XH
DG7 3XJ
DG7 3XL
DG7 3XN
DG7 3XP
DG7 3XQ
DG7 3XR
DG7 3XS
DG7 3XW
DG7 3YA
DG7 3YB
DG7 3YD
DG7 3YE
DG7 9AB
DG8 0AA
DG8 0AB
DG8 0AD
DG8 0AE
DG8 0AF
DG8 0AG
DG8 0AH
DG8 0AJ
DG8 0AL
DG8 0AN
DG8 0AP
DG8 0AQ
DG8 0AT
DG8 0AU
DG8 0AW
DG8 0AX
DG8 0AY
DG8 0AZ
DG8 0BA
DG8 0BH
DG8 0BJ
DG8 0BL
DG8 0BN
DG8 0BS
DG8 0BT
DG8 0BU
DG8 0BW
DG8 0BX
DG8 0BY
DG8 0BZ
DG8 0DA
DG9 8JH
DG9 8JJ
DG9 8JL
DG9 8JN
DG9 8JP
DG9 8JQ
DG9 8JR
DG9 8JS
DG9 8JT
DG9 8JU
DG9 8JW
DG9 8JX
DG9 8JY
DG9 8JZ
DG8 0DB
DG8 0DD
DG8 0DE
DG8 0DF
DG8 0DG
DG8 0DH
DG8 0DJ
DG8 0DL
DG8 0DN
DG8 0DP
DG8 0DQ
DG8 0DR
DG8 0DW
DG8 0EA
DG8 0EB
DG8 0ED
DG8 0EE
DG8 0EF
DG8 0EG
DG8 0EH
DG8 0EJ
DG8 0EL
DG8 0EN
DG8 0EP
DG8 0EQ
DG8 0ER
DG8 0ET
DG8 0HA
DG8 0HB
DG8 0HD
DG8 0HE
DG8 0HF
DG8 0HG
DG8 0HH
DG8 0HJ
DG8 0HL
DG8 0HN
DG8 0HP
DG8 0HQ
DG8 0HR
DG8 0HS
DG8 0HT
DG8 0HU
DG8 0HW
DG8 0HX
DG8 0HY
DG8 0JE
DG8 0JF
DG8 0JG
DG8 0JH
DG8 0JJ
DG8 0JL
DG8 0JN
DG8 0JP
DG8 0JQ
DG8 0JR
DG8 0JS
DG8 0JT
DG8 0JU
DG8 0JW
DG8 0JX
DG8 0JY
DG8 0JZ
DG8 0LA
DG8 0LB
DG8 0LD
DG8 0LE
DG8 0LF
DG8 0LG
DG8 0LP
DG8 0LR
DG8 0LS
DG8 0LT
DG8 0LU
DG8 0LW
DG8 0LX
DG8 0LY
DG8 0LZ
DG8 0NA
DG8 0NB
DG8 0NH
DG8 0NJ
DG8 6AF
DG8 6AG
DG8 6AH
DG8 6AJ
DG8 6AL
DG8 6AN
DG8 6AP
DG8 6AQ
DG8 6AS
DG8 6AT
DG8 6AU
DG8 6AW
DG8 6AX
DG8 6AY
DG8 6AZ
DG8 6BA
DG8 6BB
DG8 6BD
DG8 6BE
DG8 6BF
DG8 6BG
DG8 6BH
DG8 6BJ
DG8 6BL
DG8 6BN
DG8 6BP
DG8 6BQ
DG8 6BS
DG8 6BT
DG8 6BZ
DG8 6DA
DG8 6DB
DG8 6DD
DG8 6DE
DG8 6DF
DG8 6DG
DG8 6DH
DG8 6DJ
DG8 6DL
DG8 6DN
DG8 6DQ
DG8 6DR
DG8 6DS
DG8 6DT
DG8 6DU
DG8 6DW
DG8 6DX
DG8 6DY
DG8 6DZ
DG8 6EA
DG8 6EB
DG8 6EE
DG8 6EF
DG8 6EG
DG8 6EH
DG8 6EJ
DG8 6EL
DG8 6EN
DG8 6EP
DG8 6EQ
DG8 6ER
DG8 6ES
DG8 6ET
DG8 6EU
DG8 6EW
DG8 6EX
DG8 6EY
DG8 6EZ
DG8 6HA
DG8 6HB
DG8 6HD
DG8 6HE
DG8 6HF
DG8 6HG
DG8 6HH
DG8 6HJ
DG8 6HL
DG8 6HN
DG8 6HP
DG8 6HQ
DG8 6HR
DG8 6HT
DG8 6HU
DG8 6HW
DG8 6HX
DG8 6HY
DG8 6HZ
DG8 6JA
DG8 6JB
DG8 6JD
DG8 6JE
DG8 6JF
DG8 6JG
DG8 6JH
DG8 6JJ
DG8 6JL
DG8 6JQ
DG8 6JR
DG8 6JS
DG8 6JT
DG8 6JW
DG8 6JX
DG8 6JY
DG8 6JZ
DG8 6LA
DG8 6LB
DG8 6LD
DG8 6LE
DG8 6LF
DG8 6LG
DG8 6LH
DG8 6LJ
DG8 6LL
DG8 6LN
DG8 6LP
DG8 6LQ
DG8 6LR
DG8 6LS
DG8 6LT
DG8 6LU
DG8 6LW
DG8 6LX
DG8 6LY
DG8 6LZ
DG8 6NA
DG8 6NB
DG8 6ND
DG8 6NE
DG8 6NF
DG8 6NG
DG8 6NH
DG8 6NL
DG8 6NP
DG8 6NQ
DG8 6NR
DG8 6NS
DG8 6NT
DG8 6NU
DG8 6NW
DG8 6NX
DG8 6NY
DG8 6NZ
DG8 6PA
DG8 6PB
DG8 6PD
DG8 6PE
DG8 6PF
DG8 6PG
DG8 6PH
DG8 6PJ
DG8 6PL
DG8 6PN
DG8 6PP
DG8 6PQ
DG8 6PR
DG8 6PS
DG8 6PT
DG8 6PU
DG8 6PW
DG8 6PX
DG8 6PY
DG8 6PZ
DG8 6QA
DG8 6QB
DG8 6QD
DG8 6QE
DG8 6QF
DG8 6QG
DG8 6QH
DG8 6QJ
DG8 6QL
DG8 6QN
DG8 6QP
DG8 6QQ
DG8 6QR
DG8 6QS
DG8 6QT
DG8 6QU
DG8 6QW
DG8 6QY
DG8 6QZ
DG8 6RA
DG8 6RB
DG8 6RD
DG8 6RE
DG8 6RH
DG8 6RJ
DG8 6RL
DG8 6RN
DG8 6RP
DG8 6RR
DG8 6RS
DG8 6RT
DG8 6RU
DG8 6RW
DG8 6RX
DG8 6RY
DG8 6RZ
DG8 6SA
DG8 6SH
DG8 6SJ
DG8 6SL
DG8 6SN
DG8 6SP
DG8 6SR
DG9 8QJ
DG9 8QL
DG9 8QN
DG9 8QP
DG9 8QR
DG9 8QS
DG9 8QT
DG9 8QU
DG9 8QW
DG9 8QX
DG9 8QY
DG9 8QZ
DG9 8RA
DG9 8RB
DG9 8RD
DG9 8RE
DG9 8RF
DG9 8RG
DG9 8RL
DG9 8RN
DG9 8RP
DG9 8RQ
DG9 8RR
DG9 8RS
DG9 8RT
DG9 8RU
DG9 8RX
DG9 8RY
DG8 6SS
DG8 6SU
DG8 6SW
DG8 6SX
DG8 6SY
DG8 6SZ
DG8 6TA
DG8 6TD
DG8 6TH
DG8 6TJ
DG8 6TQ
DG8 6UA
DG8 7AA
DG8 7AB
DG8 7AD
DG8 7AE
DG8 7AF
DG8 7AG
DG8 7AJ
DG8 7AL
DG8 7AN
DG8 7AR
DG8 7AS
DG8 7AT
DG8 7AU
DG8 7AW
DG8 7AX
DG8 7AZ
DG8 7BA
DG8 7BB
DG8 7BD
DG8 7BE
DG8 7BG
DG8 7BH
DG8 7BJ
DG8 7BL
DG8 7BN
DG8 7BQ
DG8 7BW
DG8 7BX
DG8 7BY
DG8 7BZ
DG8 7DA
DG8 7DB
DG8 7DE
DG8 7DF
DG8 7DG
DG8 7DL
DG8 7DN
DG8 7DP
DG8 7DQ
DG8 7DR
DG8 7DS
DG8 7DT
DG8 7DU
DG8 7DW
DG8 7DX
DG8 7DY
DG8 7DZ
DG8 7EA
DG8 7EB
DG8 7EN
DG8 7EP
DG8 7ER
DG8 7ES
DG8 7ET
DG8 7EU
DG8 7EW
DG8 7EX
DG8 7EY
DG8 7HB
DG8 7HF
DG8 7HG
DG8 7HH
DG8 7HJ
DG8 7HL
DG8 7HN
DG8 7HP
DG8 7HQ
DG8 7HR
DG8 7HS
DG8 7HT
DG8 7HU
DG8 7HW
DG8 7HX
DG8 7HY
DG8 7HZ
DG8 7JA
DG8 7JB
DG8 7JE
DG8 7JF
DG8 7JG
DG8 7JH
DG8 7JJ
DG8 7JL
DG8 7JN
DG8 7JP
DG8 7JQ
DG8 7JR
DG8 7JS
DG8 7JT
DG8 7JW
DG8 7JX
DG8 7JY
DG8 8AA
DG8 8AB
DG8 8AD
DG8 8AE
DG8 8AF
DG8 8AG
DG8 8AH
DG8 8AJ
DG8 8AL
DG8 8AN
DG8 8AP
DG8 8AQ
DG8 8AR
DG8 8AS
DG8 8AT
DG8 8AW
DG8 8BA
DG8 8BB
DG8 8BD
DG8 8BE
DG8 8BG
DG8 8BH
DG8 8BJ
DG8 8BL
DG8 8BN
DG8 8BP
DG8 8BQ
DG8 8BR
DG8 8BS
DG8 8BT
DG8 8BU
DG8 8BW
DG8 8BX
DG8 8BY
DG8 8BZ
DG8 8DE
DG8 8DF
DG8 8DG
DG8 8DH
DG8 8DJ
DG8 8DL
DG8 8DN
DG8 8DP
DG8 8DQ
DG8 8DR
DG8 8DS
DG8 8DT
DG8 8DU
DG8 8DW
DG8 8DX
DG8 8DY
DG8 8DZ
DG8 8EG
DG8 8EH
DG8 8EJ
DG8 8EL
DG8 8EN
DG8 8EP
DG8 8EQ
DG8 8ER
DG8 8EW
DG8 8EY
DG8 8EZ
DG8 8HA
DG8 8HB
DG8 8HD
DG8 8HE
DG8 8HF
DG8 8HH
DG8 8HJ
DG8 8HL
DG8 8HN
DG8 8HP
DG8 8HQ
DG8 8HR
DG8 8HS
DG8 8HT
DG8 8HU
DG8 8HW
DG8 8HX
DG8 8HY
DG8 8HZ
DG8 8JA
DG8 8JB
DG8 8JD
DG8 8JE
DG8 8JF
DG8 8JG
DG8 8JH
DG8 8JQ
DG8 8JS
DG8 8JT
DG8 8JU
DG8 8JW
DG8 8JX
DG8 8JY
DG8 8JZ
DG8 8LA
DG8 8LB
DG8 8LD
DG8 8LE
DG8 8LF
DG8 8LG
DG8 8LH
DG8 8LJ
DG8 8LL
DG8 8LN
DG8 8LP
DG8 8LQ
DG8 8LR
DG8 8LS
DG8 8LT
DG8 8LU
DG8 8LX
DG8 8LY
DG8 8LZ
DG8 8NA
DG8 8NB
DG8 8ND
DG8 8NE
DG8 8NF
DG8 8NG
DG8 8NH
DG8 8NJ
DG8 8NL
DG8 8NQ
DG8 8NS
DG8 8NT
DG8 8NU
DG8 8NY
DG8 8NZ
DG8 8PB
DG8 8PD
DG8 8PE
DG8 8PF
DG8 8PG
DG8 8PH
DG8 8PJ
DG8 8PL
DG8 8PN
DG8 8PP
DG8 8PQ
DG8 8PS
DG8 8PT
DG8 8PU
DG8 8PW
DG8 8PY
DG8 8PZ
DG8 8QA
DG8 8QB
DG8 8QE
DG8 8QF
DG8 8QG
DG8 8QH
DG8 8QJ
DG8 8QL
DG8 8QN
DG8 8QP
DG8 8QQ
DG8 8QW
DG8 9AA
DG8 9AB
DG8 9AD
DG8 9AF
DG8 9AG
DG8 9AH
DG8 9AL
DG8 9AN
DG8 9AP
DG8 9AQ
DG8 9AR
DG8 9AS
DG8 9AT
DG8 9AU
DG8 9AW
DG8 9AX
DG8 9AY
DG8 9AZ
DG8 9BA
DG8 9BB
DG8 9BD
DG8 9BE
DG8 9BG
DG8 9BH
DG8 9BJ
DG8 9BL
DG8 9BQ
DG8 9BT
DG8 9BX
DG8 9BY
DG8 9BZ
DG8 9DA
DG8 9DB
DG8 9DD
DG8 9DE
DG8 9DF
DG8 9DH
DG8 9DJ
DG8 9DL
DG8 9DQ
DG8 9DS
DG8 9DT
DG8 9DU
DG8 9DX
DG8 9DY
DG8 9DZ
DG8 9ED
DG8 9EE
DG8 9EF
DG8 9EG
DG8 9EH
DG8 9EJ
DG8 9EL
DG8 9EN
DG8 9EP
DG8 9EQ
DG8 9ER
DG8 9ES
DG8 9ET
DG8 9EU
DG8 9EW
DG8 9HG
DG8 9HH
DG8 9HJ
DG8 9HL
DG8 9HN
DG8 9HP
DG8 9HQ
DG8 9HR
DG8 9HS
DG8 9HT
DG8 9HU
DG8 9HW
DG8 9HX
DG8 9HY
DG8 9HZ
DG8 9JA
DG8 9JB
DG8 9JD
DG8 9JE
DG8 9JF
DG8 9JG
DG8 9JH
DG8 9JJ
DG8 9JL
DG8 9JN
DG8 9JP
DG8 9JQ
DG8 9JS
DG8 9JX
DG8 9JY
DG8 9JZ
DG8 9LA
DG8 9LB
DG8 9LD
DG8 9LE
DG8 9LG
DG8 9LH
DG8 9LJ
DG8 9LL
DG8 9LN
DG8 9LP
DG8 9LQ
DG8 9LR
DG8 9LS
DG8 9LT
DG8 9LU
DG8 9LW
DG8 9LX
DG8 9LY
DG8 9LZ
DG8 9NA
DG8 9NB
DG8 9ND
DG8 9NE
DG8 9NF
DG8 9NG
DG8 9NN
DG8 9NP
DG8 9NR
DG8 9NS
DG8 9NT
DG8 9NU
DG8 9NX
DG8 9NY
DG8 9NZ
DG8 9PA
DG8 9PB
DG8 9PD
DG8 9PE
DG8 9PF
DG8 9PG
DG8 9PH
DG8 9PJ
DG8 9PL
DG8 9PN
DG8 9PP
DG8 9PQ
DG8 9PR
DG8 9PS
DG8 9PT
DG8 9PU
DG8 9PW
DG8 9PX
DG8 9PY
DG8 9QA
DG8 9QB
DG8 9QE
DG8 9QH
DG8 9QJ
DG8 9QL
DG8 9QN
DG8 9QP
DG8 9QR
DG8 9QS
DG8 9QT
DG8 9QU
DG8 9QX
DG8 9QY
DG8 9QZ
DG8 9RA
DG8 9RB
DG8 9RD
DG8 9RE
DG8 9RF
DG8 9RG
DG8 9RH
DG8 9RJ
DG8 9RL
DG8 9RN
DG8 9RP
DG8 9RQ
DG8 9RR
DG8 9RS
DG8 9RT
DG8 9RU
DG8 9SA
DG8 9SB
DG8 9SD
DG8 9SE
DG8 9SG
DG8 9SH
DG8 9SJ
DG8 9SL
DG8 9SN
DG8 9SP
DG8 9SQ
DG8 9SR
DG8 9SW
DG8 9TA
DG8 9TB
DG8 9TD
DG8 9TE
DG8 9TF
DG8 9TG
DG8 9TH
DG8 9TJ
DG8 9TL
DG8 9TQ
DG9 0AA
DG9 0AB
DG9 0AD
DG9 0AE
DG9 0AF
DG9 0AG
DG9 0AH
DG9 0AJ
DG9 0AL
DG9 0AN
DG9 0AP
DG9 0AQ
DG9 0AR
DG9 0AS
DG9 0AT
DG9 0AU
DG9 0AW
DG9 0AX
DG9 0AY
DG9 0AZ
DG9 0BA
DG9 0BB
DG9 0BD
DG9 0BE
DG9 0BG
DG9 0BH
DG9 0BJ
DG9 0BL
DG9 0BN
DG9 0BP
DG9 0BQ
DG9 0BS
DG9 0BT
DG9 0BU
DG9 0BW
DG9 0BX
DG9 0BY
DG9 0BZ
DG9 0DA
DG9 0DB
DG9 0DD
DG9 0DE
DG9 0DF
DG9 0DG
DG9 0DH
DG9 0DJ
DG9 0DL
DG9 0DN
DG9 0DQ
DG9 0DS
DG9 0DT
DG9 0DU
DG9 0DX
DG9 0DY
DG9 0DZ
DG9 0EA
DG9 0EB
DG9 0ED
DG9 0EE
DG9 0EF
DG9 0EG
DG9 0EH
DG9 0EJ
DG9 0EL
DG9 0EN
DG9 0EP
DG9 0EQ
DG9 0ER
DG9 0ES
DG9 0ET
DG9 0EU
DG9 0EW
DG9 0EX
DG9 0EY
DG9 0EZ
DG9 0HA
DG9 0HB
DG9 0HD
DG9 0HE
DG9 0HF
DG9 0HG
DG9 0HH
DG9 0HL
DG9 0HQ
DG9 0HS
DG9 0HW
DG9 0HX
DG9 0HY
DG9 0HZ
DG9 0JA
DG9 0JB
DG9 0JD
DG9 0JE
DG9 0JF
DG9 0JH
DG9 0JJ
DG9 0JL
DG9 0JN
DG9 0JP
DG9 0JQ
DG9 0JT
DG9 0JZ
DG9 0LA
DG9 0LB
DG9 0LD
DG9 0LE
DG9 0LF
DG9 0LG
DG9 0LH
DG9 0LJ
DG9 0LL
DG9 0LN
DG9 0LP
DG9 0LQ
DG9 0LR
DG9 0LS
DG9 0LT
DG9 0LU
DG9 0LW
DG9 0LX
DG9 0LY
DG9 0LZ
DG9 0NA
DG9 0NB
DG9 0ND
DG9 0NE
DG9 0NL
DG9 0NN
DG9 0NP
DG9 0NS
DG9 0NT
DG9 0NU
DG9 0NW
DG9 0NX
DG9 0NY
DG9 0NZ
DG9 0PA
DG9 0PB
DG9 0PD
DG9 0PE
DG9 0PL
DG9 0PN
DG9 0PP
DG9 0PR
DG9 0PS
DG9 0PT
DG9 0PU
DG9 0PW
DG9 0PX
DG9 0PY
DG9 0PZ
DG9 0QA
DG9 0QB
DG9 0QD
DG9 0QE
DG9 0QF
DG9 0QG
DG9 0QP
DG9 0QQ
DG9 0QR
DG9 0QS
DG9 0QT
DG9 0QU
DG9 0QX
DG9 0QY
DG9 0QZ
DG9 0RA
DG9 0RB
DG9 0RD
DG9 0RE
DG9 0RF
DG9 0RG
DG9 0RH
DG9 0RJ
DG9 0RL
DG9 0RN
DG9 0RP
DG9 0RQ
DG9 0RR
DG9 0RS
DG9 0RT
DG9 0RU
DG9 0RW
DG9 0RX
DG9 0RY
DG9 1AB
DG9 7AA
DG9 7AB
DG9 7AD
DG9 7AE
DG9 7AF
DG9 7AG
DG9 7AH
DG9 7AJ
DG9 7AL
DG9 7AN
DG9 7AP
DG9 7AQ
DG9 7AR
DG9 7AS
DG9 7AT
DG9 7AU
DG9 7AW
DG9 7AX
DG9 7AY
DG9 7AZ
DG9 7BA
DG9 7BB
DG9 7BD
DG9 7BE
DG9 7BG
DG9 7BH
DG9 7BJ
DG9 7BL
DG9 7BN
DG9 7BP
DG9 7BQ
DG9 7BS
DG9 7BT
DG9 7BU
DG9 7BW
DG9 7BX
DG9 7BY
DG9 7BZ
DG9 7DA
DG9 7DB
DG9 7DD
DG9 7DE
DG9 7DF
DG9 7DH
DG9 7DL
DG9 7DN
DG9 7DQ
DG9 7DR
DG9 7DS
DG9 7DU
DG9 7DW
DG9 7DX
DG9 7DZ
DG9 7EA
DG9 7EB
DG9 7ED
DG9 7EE
DG9 7EF
DG9 7EG
DG9 7EH
DG9 7EJ
DG9 7EL
DG9 7EN
DG9 7EP
DG9 7ER
DG9 7ES
DG9 7ET
DG9 7EU
DG9 7EW
DG9 7EX
DG9 7EY
DG9 7EZ
DG9 7FB
DG9 7FD
DG9 7FE
DG9 7FG
DG9 7FH
DG9 7FJ
DG9 7GA
DG9 7HA
DG9 7HB
DG9 7HD
DG9 7HE
DG9 7HF
DG9 7HH
DG9 7HJ
DG9 7HL
DG9 7HN
DG9 7HP
DG9 7HR
DG9 7HS
DG9 7HT
DG9 7HU
DG9 7HW
DG9 7HX
DG9 7HY
DG9 7JA
DG9 7JB
DG9 7JE
DG9 7JF
DG9 7JG
DG9 7JH
DG9 7JJ
DG9 7JL
DG9 7JN
DG9 7JP
DG9 7JS
DG9 7JU
DG9 7JW
DG9 7JY
DG9 7JZ
DG9 7LA
DG9 7LB
DG9 7LD
DG9 7LF
DG9 7LG
DG9 7LH
DG9 7LJ
DG9 7LL
DG9 7LN
DG9 7LP
DG9 7LQ
DG9 7LR
DG9 7LS
DG9 7LT
DG9 7LU
DG9 7LW
DG9 7LX
DG9 7LY
DG9 7LZ
DG9 7NA
DG9 7NB
DG9 7ND
DG9 7NE
DG9 7NF
DG9 7NG
DG9 7NH
DG9 7NL
DG9 7NN
DG9 7NP
DG9 7NQ
DG9 7NS
DG9 7NU
DG9 7NW
DG9 7NX
DG9 7NY
DG9 7NZ
DG9 7PA
DG9 7PB
DG9 7PD
DG9 7PG
DG9 7PH
DG9 7PJ
DG9 7PQ
DG9 7PT
DG9 7PU
DG9 7PW
DG9 7PX
DG9 7PY
DG9 7PZ
DG9 7QA
DG9 7QB
DG9 7QD
DG9 7QE
DG9 7QF
DG9 7QG
DG9 7QH
DG9 7QJ
DG9 7QL
DG9 7QN
DG9 7QP
DG9 7QQ
DG9 7QR
DG9 7QS
DG9 7QT
DG9 7QU
DG9 7QW
DG9 7QX
DG9 7QY
DG9 7QZ
DG9 8TQ
DG9 8TR
DG9 8TS
DG9 8TT
DG9 8TW
DG9 8TX
DG9 8TY
DG9 8TZ
DG9 8UA
DG9 8UB
DG9 8WX
DG9 9AA
DG9 9AB
DG9 9AD
DG9 9AE
DG9 9AF
DG9 9AG
DG9 9AH
DG9 9AJ
DG9 9AL
DG9 9AN
DG9 9AP
DG9 9AQ
DG9 9AR
DG9 9AS
DG9 9AT
DG9 9AU
DG9 9AW
DG9 9AX
DG9 9AZ
DG9 9BA
DG9 9BB
DG9 9BD
DG9 9BE
DG9 9BG
DG9 9BH
DG9 9BJ
DG9 9BL
DG9 9BN
DG9 9BP
DG9 9BQ
DG9 9BS
DG9 9BT
DG9 9BU
DG9 9BW
DG9 9BX
DG9 9BY
DG9 9BZ
DG9 9DB
DG9 9DE
DG9 9DF
DG9 9DG
DG9 9DH
DG9 9DJ
DG9 9DL
DG9 9DN
DG9 9DP
DG9 9DQ
DG9 9DR
DG9 9DU
DG9 9DW
DG9 9DX
DG9 9DY
DG9 9DZ
DG9 9EA
DG9 9EB
DG9 9ED
DG9 9EE
DG9 9EF
DG9 9EG
DG9 9EH
DG9 9EN
DG9 9EQ
DG9 9ES
DG9 9ET
DG9 9EU
DG9 9EX
DG9 9HA
DG9 9HB
DG9 9HD
DG9 9HE
DG9 9HF
DG9 9HG
DG9 9HH
DG9 9HJ
DG9 9HL
DG9 9HN
DG9 9HP
DG9 9HQ
DG9 9HR
DG9 9HS
DG9 9HW
DG9 9JA
DG9 9JB
DG9 9JD
DG9 9JE
DG9 9JF
DG9 9JG
DG9 9JH
DG9 9JJ
DG9 9JL
DG9 9JN
DG9 9JP
DG9 9JQ
DG9 9JR
DG9 9JS
DG9 9JT
DG9 9JU
DG9 9JW
DG9 9JX
DG9 9JY
DG9 9JZ
DG9 9LA
DG9 9LB
DG9 9LD
DG9 9LE
DG9 9LF
DG9 9LG
DG9 9LH
DG9 9LJ
DG9 9LL
DG9 9LQ
DG9 9LS
DG9 9LT
DG9 9LU
DG9 9LX
DG9 7RA
DG9 7RB
DG9 7RD
DG9 7RE
DG9 7RF
DG9 7RH
DG9 7RJ
DG9 7RL
DG9 7RN
DG9 7RP
DG9 7RQ
DG9 7RR
DG9 7RS
DG9 7RT
DG9 7RW
DG9 7RX
DG9 7RY
DG9 7RZ
DG9 7SA
DG9 7SB
DG9 7SD
DG9 7SE
DG9 7SF
DG9 7SG
DG9 7SJ
DG9 7SL
DG9 7SN
DG9 7SP
DG9 7SQ
DG9 7SR
DG9 7SS
DG9 7ST
DG9 7SU
DG9 7SW
DG9 7SX
DG9 7SY
DG9 7SZ
DG9 7TA
DG9 7TB
DG9 7TE
DG9 7TF
DG9 7TG
DG9 7TH
DG9 7TJ
DG9 7TL
DG9 7TN
DG9 7TP
DG9 7TQ
DG9 7TR
DG9 7TS
DG9 7TT
DG9 7TU
DG9 7TW
DG9 7TY
DG9 7UD
DG9 7UE
DG9 7UF
DG9 8AA
DG9 8AB
DG9 8AD
DG9 8AE
DG9 8AF
DG9 8AG
DG9 8AH
DG9 8AJ
DG9 8AN
DG9 8AP
DG9 8AQ
DG9 8AR
DG9 8AS
DG9 8AT
DG9 8AU
DG9 8AW
DG9 8AX
DG9 8AY
DG9 8AZ
DG9 8BA
DG9 8BB
DG9 8BD
DG9 8BE
DG9 8BF
DG9 8BG
DG9 8BH
DG9 8BJ
DG9 8BL
DG9 8BN
DG9 8BP
DG9 8BQ
DG9 8BS
DG9 8BT
DG9 8BU
DG9 8BW
DG9 8BX
DG9 8BY
DG9 8BZ
DG9 8DA
DG9 8DB
DG9 8DD
DG9 8DE
DG9 8DF
DG9 8DG
DG9 8DH
DG9 8DJ
DG9 8DL
DG9 8DP
DG9 8DQ
DG9 8DR
DG9 8DS
DG9 8DT
DG9 8DU
DG9 8DX
DG9 8DY
DG9 8ED
DG9 8EE
DG9 8EG
DG9 8EH
DG9 8EJ
DG9 8EP
DG9 8ER
DG9 8ES
DG9 8ET
DG9 8EU
DG9 8EX
DG9 8EY
DG9 8EZ
DG9 8HA
DG9 8HB
DG9 8HG
DG9 8HH
DG9 8HJ
DG9 8HL
DG9 8HN
DG9 8HP
DG9 8HQ
DG9 8HT
DG9 8HU
DG9 8HW
DG9 8HX
DG9 8HY
DG9 8HZ
DG9 8JA
DG9 8JB
DG9 8JD
DG9 8JE
DG9 8JG
DG9 8LA
DG9 8LB
DG9 8LD
DG9 8LE
DG9 8LF
DG9 8LG
DG9 8LH
DG9 8LJ
DG9 8LN
DG9 8LP
DG9 8LQ
DG9 8LR
DG9 8LX
DG9 8LY
DG9 8LZ
DG9 8NA
DG9 8NB
DG9 8ND
DG9 8NE
DG9 8NJ
DG9 8NL
DG9 8NN
DG9 8NP
DG9 8NR
DG9 8NU
DG9 8NW
DG9 8NX
DG9 8NY
DG9 8NZ
DG9 8PA
DG9 8PB
DG9 8PD
DG9 8PF
DG9 8PG
DG9 8PH
DG9 8PJ
DG9 8PL
DG9 8PP
DG9 8PQ
DG9 8PR
DG9 8PS
DG9 8PT
DG9 8PW
DG9 8PX
DG9 8PY
DG9 8PZ
DG9 8QA
DG9 8QB
DG9 8QH
DG9 8SA
DG9 8SB
DG9 8SD
DG9 8SE
DG9 8SF
DG9 8SG
DG9 8SH
DG9 8SJ
DG9 8SL
DG9 8SN
DG9 8SP
DG9 8SQ
DG9 8SR
DG9 8SU
DG9 8SW
DG9 8SX
DG9 8SY
DG9 8SZ
DG9 8TA
DG9 8TB
DG9 8TD
DG9 8TE
DG9 8TF
DG9 8TH
DG9 8TJ
DG9 8TL
DG9 8TN
DG9 8TP
DG9 9LY
DG9 9LZ
DG9 9NA
DG9 9NB
DG9 9ND
DG9 9NE
DG9 9NF
DG9 9NG
DG9 9NH
DG9 9NJ
DG9 9NL
DG9 9NN
DG9 9NP
DG9 9NQ
DG9 9NR
DG9 9NS
DG9 9NT
DG9 9NU
DG9 9NW
DG9 9NX
DG9 9NY
DG9 9NZ
DG9 9PA
DG9 9PB
DG9 9PD
DG9 9PE
DG9 9PF
DG9 9PG
DG9 9PH
DG9 9PJ
DG9 9PQ
DG9 9PS
DG9 9PT
DG9 9PU
DG9 9PX
DG9 9PY
DG9 9PZ
DG9 9QA
DG9 9QD
DG9 9QE
DG9 9QF
DG9 9QG
DG9 9QL
DG9 9QN
DG9 9QP
DG9 9QQ
DG9 9QR
DG9 9QS
DG9 9QT
DG9 9QU
DG9 9QW
DG9 9QX
DG9 9QY
DG9 9QZ
DG9 9RB
DG9 9RD
DG9 9WT
DG9 9WW
ML12 6FR
ML12 6UH
ML12 6UJ
ML12 6UN
ML12 6UP
ML12 6UR
ML12 6UT
ML12 6UU
ML12 6UW
ML12 6UX
ML12 6UY
ML12 6UZ
ML12 6WT
ML12 6XA
ML12 6XB
ML12 6XD
ML12 6XE
ML12 6XF
ML12 6XG
ML12 6XH
ML12 6XJ
ML12 6XL
ML12 6XQ
DG11 3HS
DG12 5LG
DG12 5RW
DG12 6AE
DG12 6AH
DG12 6AX
DG12 6BL
DG12 6BP
DG12 6NP
DG12 6PL
DG12 6RE
DG12 6TA
DG13 0ED
DG13 0EL
DG13 0JA
DG13 0JJ
DG13 0JU
DG13 0LA
DG13 0LX
DG13 0PH
DG14 0RG
DG2 0BS
DG2 0LH
DG2 0LN
DG2 0NA
DG2 0NT
DG2 0NX
DG2 0NY
DG2 0PX
DG2 0RN
DG2 0SQ
DG2 0UQ
DG2 7BN
DG2 7BQ
DG2 7DP
DG2 7EG
DG2 7HD
DG2 7HP
DG2 7HW
DG2 7SG
DG2 8BA
DG2 8NT
DG2 8PG
DG2 8PQ
DG2 9AJ
DG2 9AT
DG2 9BB
DG2 9BJ
DG2 9DA
DG2 9DU
DG2 9DY
DG2 9HD
DG2 9SW
DG3 4AA
DG3 4DT
DG3 4EP
DG3 4HG
DG3 4JH
DG3 4LW
DG3 5DE
DG3 5DU
DG3 5HF
DG3 5HS
DG3 5LR
DG3 5LT
DG4 6BE
DG4 6DD
DG4 6EA
DG4 6EE
DG4 6HP
CA6 5AA
CA6 5AB
CA6 5AD
CA6 5AE
CA6 5AF
CA6 5AG
CA6 5AH
CA6 5AJ
CA6 5AL
CA6 5AN
CA6 5AP
CA6 5AQ
CA6 5AR
CA6 5AS
CA6 5AT
CA6 5AU
CA6 5AW
CA6 5AX
CA6 5AY
CA6 5AZ
CA6 5BA
CA6 5BB
CA6 5BD
CA6 5BE
CA6 5BG
CA6 5BH
CA6 5BJ
CA6 5BL
CA6 5BN
CA6 5BP
CA6 5BQ
CA6 5BS
CA6 5BT
CA6 5BU
CA6 5BW
CA6 5BX
CA6 5BY
CA6 5BZ
CA6 5DA
CA6 5DB
CA6 5DD
CA6 5DE
CA6 5DF
CA6 5DG
CA6 5DH
CA6 5DJ
CA6 5DL
CA6 5DN
CA6 5DP
CA6 5DQ
CA6 5DR
CA6 5DS
CA6 5DT
CA6 5DU
CA6 5DW
CA6 5DX
CA6 5DY
CA6 5DZ
CA6 5EA
CA6 5EB
CA6 5ED
CA6 5EE
CA6 5EF
CA6 5EG
CA6 5EH
CA6 5EJ
CA6 5EL
CA6 5EN
CA6 5EP
CA6 5EQ
CA6 5ER
CA6 5ES
CA6 5ET
CA6 5EU
CA6 5EW
CA6 5EX
CA6 5EY
CA6 5EZ
CA6 5HA
CA6 5HB
CA6 5HD
CA6 5HE
CA6 5HF
CA6 5HG
CA6 5HH
CA6 5HJ
CA6 5HL
CA6 5HN
CA6 5HP
CA6 5HQ
CA6 5HR
CA6 5HS
CA6 5HW
CA6 5HX
CA6 5HY
CA6 5HZ
CA6 5JD
CA6 5JE
CA6 5JF
CA6 5JG
CA6 5JH
CA6 5JJ
CA6 5JL
CA6 5JN
CA6 5JP
CA6 5JQ
CA6 5JR
CA6 5JS
CA6 5JT
CA6 5JU
CA6 5JW
CA6 5JX
CA6 5JZ
CA6 5LA
CA6 5LB
CA6 5LD
CA6 5LE
DG1 1AD
DG1 1AE
DG1 1AU
DG1 1BB
DG1 1BN
DG1 1BQ
DG1 1DH
DG1 1DN
DG1 1DP
DG1 1DQ
DG1 1LY
DG1 1NB
DG1 1ND
DG1 1NU
DG1 1NX
DG1 1PH
DG1 1PQ
DG1 1PY
DG1 1QT
DG1 1SB
DG1 1SN
DG1 1TJ
DG1 2AF
DG1 2AQ
DG1 2AS
DG1 2AU
DG1 2AY
DG1 2BB
DG1 2BL
DG1 2BP
DG1 2BW
DG1 2BX
DG1 2BZ
DG1 2DH
DG1 2DW
DG1 2EA
DG1 2EG
DG1 2ES
DG1 2HW
DG1 2LD
DG1 1DS
DG1 1DY
DG1 1DZ
DG1 1EE
DG1 1EN
DG1 1EQ
DG1 1ER
DG1 1ES
DG1 1EY
DG1 1EZ
DG1 1HQ
DG1 1HT
DG1 1JS
DG1 1LA
DG1 1LL
DG1 1LQ
DG1 1XX
DG1 2LW
DG1 2NW
DG1 2NY
DG1 2PT
DG1 2PU
DG1 2QA
DG1 2QJ
DG1 2QU
DG1 2QY
DG1 2QZ
DG1 2RD
DG1 2RG
DG1 2RJ
DG1 2SB
DG1 3AA
DG1 3AF
DG1 3BG
DG1 3BS
DG1 3DH
DG1 3EH
DG1 3EL
DG1 3ER
DG1 3HD
DG1 3JT
DG1 3PZ
DG1 3QQ
DG1 3TX
DG1 4EL
DG1 4JD
DG1 4SZ
DG1 4US
DG1 4WQ
DG1 4ZA
DG1 4ZB
DG1 4ZG
DG1 4ZH
DG1 4ZY
DG10 9AF
DG10 9AU
DG10 9DZ
DG10 9EN
DG10 9EQ
DG10 9EW
DG10 9GZ
DG10 9NQ
DG10 9QD
DG10 9QY
DG11 1EE
DG11 1SD
DG11 2BN
DG11 2BW
DG11 2DD
DG11 2HD
DG11 2HS
DG11 2JN
DG11 2JW
DG11 2LT
DG4 6JR
DG4 6LH
DG4 6LT
DG4 6NF
DG4 6NG
DG4 6NH
DG4 6PJ
DG4 6PP
DG4 6PS
DG4 6PT
DG4 6PX
DG4 6QD
DG4 6QE
DG4 6QF
DG4 6QG
DG4 6RU
DG4 6RW
DG4 6SQ
DG4 6YT
DG5 4BD
DG5 4JQ
DG5 4NG
DG5 4UL
DG6 4AB
DG6 4AY
DG6 4DG
DG6 4EE
DG6 4ET
DG6 4LN
DG6 4PL
DG6 4QX
DG6 4RY
DG6 4UF
DG6 4UQ
DG7 1AS
DG7 1AW
DG7 1DF
DG7 1DP
DG7 1DY
DG7 1HT
DG7 1HW
DG7 1JR
DG7 1JW
DG7 1PJ
DG7 1PZ
DG7 2HN
DG7 2NH
DG7 3JP
DG7 3JR
DG7 3JS
DG8 0AR
DG8 0BP
DG8 0ES
DG8 0EW
DG8 0LQ
DG8 0PG
DG8 0QS
DG8 6BU
DG8 6BY
DG8 6DP
DG8 6ED
DG8 6HS
DG8 6JP
DG8 6JU
DG8 6NJ
DG8 6NN
DG8 6RF
DG8 6RG
DG8 6SQ
DG8 6ST
DG8 7AP
DG8 7AY
DG8 7DD
DG8 7DJ
DG8 7JD
DG8 8LW
DG8 8NX
DG8 8PA
DG8 8PR
DG8 8QD
DG8 9AE
DG8 9AJ
DG8 9DG
DG8 9LF
DG8 9NQ
DG8 9PZ
DG8 9RW
DG8 9SF
DG9 0HN
DG9 0JG
DG9 0JW
DG9 0RZ
DG9 7DG
DG9 7DJ
DG9 7DP
DG9 7DT
DG9 7DY
DG9 7EQ
DG9 7HG
DG9 7HQ
DG9 7HZ
DG9 7JD
DG9 7JR
DG9 7JT
DG9 7JX
DG9 7LE
DG9 7NJ
DG9 7NR
DG9 7NT
DG9 7PE
DG9 7PF
DG9 7PL
DG9 7PN
DG9 7PP
DG9 7PR
DG9 7PS
DG9 7RG
DG9 7RU
DG9 7SH
DG9 7UA
DG9 7UB
DG9 8EF
DG9 8EL
DG9 8EN
DG9 8EQ
DG9 8JF
DG9 8LL
DG9 8NH
DG9 8PE
DG9 8PN
DG9 8PU
DG9 8RW
DG9 8RZ
DG9 8TG
DG9 8TU
DG9 9DS
DG9 9DT
DG9 9RA
ML12 6XN
DG1 3FJ
DG1 3FR
DG1 3FU
DG1 3GF
DG1 3GJ
DG1 3GW
DG1 9DW
DG11 1UF
DG12 6GX
DG2 9RG
DG3 5JB
DG3 5PE
DG3 9AA
DG5 4FE
DG5 4GX
DG6 4RE
DG6 4RF
DG6 4RG
DG6 4TB
DG7 1LP
DG7 2EL
DG7 3UJ
DG7 3UL
DG9 7TZ
DG1 3QZ
DG2 9RB"""
array = array.split('\n')
arrayPostCodes = array.copy()
arrayBands = [[0 for x in range(8)] for x in range(len(array))]
charA = 'A'
for i in range(0, len(array)):
array[i] = array[i].replace(' ', '+')
def Connect2Web(postcode, index):
req = urllib.request.Request("http://www.saa.gov.uk/dumgal/search.php?SEARCHED=1&ST&SEARCH_TERM=" + postcode + "&ASSESSOR_ID=dumgal&SEARCH_TABLE=council_tax&x=0&y=0&DISPLAY_COUNT=100#results")
resp = urllib.request.urlopen(req)
web_pg = resp.read();
#print (web_pg)
#regex = "<td style='text-align:center;'>[A-H]</td>"
##localityRegex ="Please select one of the following "
localityPattern = re.compile(b"Please select one of the following ")
locality = re.findall(localityPattern, web_pg)
if (len(locality) != 0):
bandList = []
hrefPattern = re.compile(b'\/dumgal\/search\.php\?[^"]*')
allLinks = re.findall(hrefPattern, web_pg)
for i in range(1, len(allLinks)):
allLinks[i] = allLinks[i].decode("utf-8")
allLinks[i] = allLinks[i].replace("&", "&")
req = urllib.request.Request("http://www.saa.gov.uk" + allLinks[i])
print ("http://www.saa.gov.uk" + allLinks[i])
resp = urllib.request.urlopen(req)
web_pg = resp.read()
#print (web_pg)
pattern = re.compile(b"<td style='text-align:center;'>[A-H]</td>")
m = re.findall(pattern, web_pg)
#print (m)
for i in range (0, len(m)):
m[i] = m[i].decode("utf-8")
bandList.append(m[i][-6])
for i in range(0, 7):
arrayBands[index][i] = bandList.count(chr(ord('A') + i ) )
print (arrayBands[index])
else :
pattern = re.compile(b"<td style='text-align:center;'>[A-H]</td>")
m = re.findall(pattern, web_pg)
for i in range (0, len(m)):
m[i] = m[i].decode("utf-8")
m[i] = m[i][-6]
for i in range(0, 7):
arrayBands[index][i] = m.count(chr(ord('A') + i ) )
with open('bands.csv', 'a', newline='') as fp:
a = csv.writer(fp, delimiter=',')
a.writerow(['Postcode','A','B','C','D','E','F','G','H'])
for i in range(0, len(array)):
print (i)
Connect2Web(array[i], i)
a.writerow([arrayPostCodes[i]] + arrayBands[i]) | [
"re.findall",
"csv.writer",
"re.compile"
] | [((68098, 68148), 're.compile', 're.compile', (["b'Please select one of the following '"], {}), "(b'Please select one of the following ')\n", (68108, 68148), False, 'import re\n'), ((68165, 68200), 're.findall', 're.findall', (['localityPattern', 'web_pg'], {}), '(localityPattern, web_pg)\n', (68175, 68200), False, 'import re\n'), ((69605, 69634), 'csv.writer', 'csv.writer', (['fp'], {'delimiter': '""","""'}), "(fp, delimiter=',')\n", (69615, 69634), False, 'import csv\n'), ((68279, 68326), 're.compile', 're.compile', (['b\'\\\\/dumgal\\\\/search\\\\.php\\\\?[^"]*\''], {}), '(b\'\\\\/dumgal\\\\/search\\\\.php\\\\?[^"]*\')\n', (68289, 68326), False, 'import re\n'), ((68343, 68374), 're.findall', 're.findall', (['hrefPattern', 'web_pg'], {}), '(hrefPattern, web_pg)\n', (68353, 68374), False, 'import re\n'), ((69238, 69294), 're.compile', 're.compile', (['b"<td style=\'text-align:center;\'>[A-H]</td>"'], {}), '(b"<td style=\'text-align:center;\'>[A-H]</td>")\n', (69248, 69294), False, 'import re\n'), ((69308, 69335), 're.findall', 're.findall', (['pattern', 'web_pg'], {}), '(pattern, web_pg)\n', (69318, 69335), False, 'import re\n'), ((68808, 68864), 're.compile', 're.compile', (['b"<td style=\'text-align:center;\'>[A-H]</td>"'], {}), '(b"<td style=\'text-align:center;\'>[A-H]</td>")\n', (68818, 68864), False, 'import re\n'), ((68882, 68909), 're.findall', 're.findall', (['pattern', 'web_pg'], {}), '(pattern, web_pg)\n', (68892, 68909), False, 'import re\n')] |
"""
MIT License
Copyright (c) 2021 <NAME> (benrammok)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import wx
import gui_wx
from wx import Clipboard
class GUI(gui_wx.MyFrame1):
def __init__(self, parent):
gui_wx.MyFrame1.__init__(self, parent)
self.command_text_default = "hcitool -i hci0 cmd 0x3F 0x14 P1 P2 P3 P4 P5 P6 P7"
self.command_text = ""
self.carrier_frequency = ""
self.dbm_power = "0x00"
self.dbm_index = "0x00"
self.carrier_frequency_textboxOnText(event=None)
def generate_hci_command(self):
self.command_text = self.command_text_default
carrier_enable = self.carrier_control_combobox.GetValue().split(' - ')[0]
carrier_frequency = "0x00" if self.carrier_frequency == "" else self.carrier_frequency
modulation_mode = self.modulation_combobox.GetValue().split(' - ')[0]
mod_type = '0x00' if modulation_mode == '0x00' else self.modulation_type_combobox.GetValue().split(' - ')[0]
transmit_power = self.transmitt_power_combobox.GetValue().split(' - ')[0]
transmit_power_dbm = '0x00' if transmit_power != '0x08' \
else self.dbm_power
transmit_power_table_index = '0x00' if transmit_power != '0x09' \
else self.dbm_index
if carrier_enable == '0x01':
self.command_text = self.command_text_default.split('P2')[0].strip(' ')
self.command_text = self.command_text.replace('P1', carrier_enable)
print(self.command_text)
else:
self.command_text = self.command_text.replace('P1', carrier_enable)
self.command_text = self.command_text.replace('P2', carrier_frequency)
self.command_text = self.command_text.replace('P3', modulation_mode)
self.command_text = self.command_text.replace('P4', mod_type)
self.command_text = self.command_text.replace('P5', transmit_power)
self.command_text = self.command_text.replace('P6', transmit_power_dbm)
self.command_text = self.command_text.replace('P7', transmit_power_table_index)
self.command_textbox.Value = self.command_text
def carrier_control_comboboxOnCombobox(self, event):
command_value = self.carrier_control_combobox.GetValue().split('-')[0].strip(' ')
if command_value == '0x01':
self.carrier_frequency_textbox.Enable(False)
self.modulation_combobox.Enable(False)
self.modulation_type_combobox.Enable(False)
self.transmitt_power_combobox.Enable(False)
self.transmitt_power_dbm_textbox.Enable(False)
self.transmitt_power_table_index_textbox.Enable(False)
else:
self.carrier_frequency_textbox.Enable(True)
self.modulation_combobox.Enable(True)
self.modulation_type_combobox.Enable(True)
self.transmitt_power_combobox.Enable(True)
self.transmitt_power_dbm_textbox.Enable(True)
self.transmitt_power_table_index_textbox.Enable(True)
self.generate_hci_command()
def carrier_frequency_textboxOnText(self, event):
carrier_frequency_text = self.carrier_frequency_textbox.GetValue()
if carrier_frequency_text.isdigit():
carrier_frequency_num = int(carrier_frequency_text)
if 2402 <= carrier_frequency_num <= 2480:
self.carrier_frequency = "0x{:02X}".format(carrier_frequency_num - 2400)
self.generate_hci_command()
def modulation_comboboxOnCombobox(self, event):
self.generate_hci_command()
def modulation_type_comboboxOnCombobox(self, event):
self.generate_hci_command()
def transmitt_power_comboboxOnCombobox(self, event):
self.generate_hci_command()
def transmitt_power_dbm_textboxOnText(self, event):
dBm = self.transmitt_power_dbm_textbox.GetValue()
if dBm.isdigit():
dBm_digit = int(dBm)
print(int('0xFF', 16))
if int('0x00', 16) <= dBm_digit <= int('0xFF', 16):
self.dbm_power = "0x{:02X}".format(dBm_digit)
print("Generate")
self.generate_hci_command()
def transmitt_power_table_index_textboxOnText(self, event):
index = self.transmitt_power_dbm_textbox.GetValue()
if index.isdigit():
index_digit = int(index)
if int('0x00', 16) <= index_digit <= int('0xFF', 16):
self.dbm_index = "0x{:02X}".format(index_digit)
self.generate_hci_command()
def carrier_off_commandOnButtonClick(self, event):
self.generate_hci_command()
def Copy_CommandOnButtonClick( self, event ):
if Clipboard.Open():
Clipboard.Clear()
Clipboard.SetData(wx.TextDataObject(self.command_textbox.GetValue()))
app = wx.App(False)
gui = GUI(parent=None)
gui.Show(True)
app.MainLoop()
| [
"wx.App",
"wx.Clipboard.Open",
"gui_wx.MyFrame1.__init__",
"wx.Clipboard.Clear"
] | [((5981, 5994), 'wx.App', 'wx.App', (['(False)'], {}), '(False)\n', (5987, 5994), False, 'import wx\n'), ((1230, 1268), 'gui_wx.MyFrame1.__init__', 'gui_wx.MyFrame1.__init__', (['self', 'parent'], {}), '(self, parent)\n', (1254, 1268), False, 'import gui_wx\n'), ((5840, 5856), 'wx.Clipboard.Open', 'Clipboard.Open', ([], {}), '()\n', (5854, 5856), False, 'from wx import Clipboard\n'), ((5871, 5888), 'wx.Clipboard.Clear', 'Clipboard.Clear', ([], {}), '()\n', (5886, 5888), False, 'from wx import Clipboard\n')] |
import re
from .constants import (
UK_POSTCODE_AREA_REGEX,
UK_POSTCODE_DISTRICT_REGEX,
UK_POSTCODE_RULES_LIST,
UK_POSTCODE_SECTOR_REGEX,
UK_POSTCODE_UNIT_REGEX,
UK_POSTCODE_VALIDATION_REGEX,
)
from .exceptions import InvalidPostcode, PostcodeNotValidated
class UKPostcode:
raw_postcode = None
validated_postcode = None
_outward = None
_inward = None
_rules_list = UK_POSTCODE_RULES_LIST
def __init__(self, postcode):
self.raw_postcode = f"{postcode}"
def __str__(self):
return f"{self.raw_postcode}"
@property
def outward(self):
if self._outward is None:
raise PostcodeNotValidated
return self._outward
@property
def inward(self):
if self._inward is None:
raise PostcodeNotValidated
return self._inward
@property
def area(self):
return re.search(UK_POSTCODE_AREA_REGEX, self.outward).group()
@property
def district(self):
return re.search(UK_POSTCODE_DISTRICT_REGEX, self.outward).group()
@property
def sector(self):
return re.search(UK_POSTCODE_SECTOR_REGEX, self.inward).group()
@property
def unit(self):
return re.search(UK_POSTCODE_UNIT_REGEX, self.inward).group()
def validate(self):
postcode = self.raw_postcode.upper()
postcode_matchs = UK_POSTCODE_VALIDATION_REGEX.match(postcode)
if not postcode_matchs:
raise InvalidPostcode
self._outward, self._inward = postcode_matchs.groups()
self.validated_postcode = f"{self._outward} {self._inward}"
for rule_class in self._rules_list:
rule = rule_class(self)
rule.validate()
| [
"re.search"
] | [((902, 949), 're.search', 're.search', (['UK_POSTCODE_AREA_REGEX', 'self.outward'], {}), '(UK_POSTCODE_AREA_REGEX, self.outward)\n', (911, 949), False, 'import re\n'), ((1012, 1063), 're.search', 're.search', (['UK_POSTCODE_DISTRICT_REGEX', 'self.outward'], {}), '(UK_POSTCODE_DISTRICT_REGEX, self.outward)\n', (1021, 1063), False, 'import re\n'), ((1124, 1172), 're.search', 're.search', (['UK_POSTCODE_SECTOR_REGEX', 'self.inward'], {}), '(UK_POSTCODE_SECTOR_REGEX, self.inward)\n', (1133, 1172), False, 'import re\n'), ((1231, 1277), 're.search', 're.search', (['UK_POSTCODE_UNIT_REGEX', 'self.inward'], {}), '(UK_POSTCODE_UNIT_REGEX, self.inward)\n', (1240, 1277), False, 'import re\n')] |
import numpy as num
import scipy.sparse.linalg as alg
import scipy.linalg as algnorm
import scipy.sparse as smat
import random
# Operacje grafowe - może wydzielić ?
def to_adiacency_row(neighbours, n):
row = num.zeros(n)
row[neighbours] = 1
return row
def graph_to_matrix(graph): # Tworzy macierz rzadką opisująca graf
n = len(graph)
rows = []
cols = []
for i in range(n):
rows.extend([i]*len(graph[i]))
cols.extend(graph[i])
data = [1.0 for v in rows]
matrix = smat.csr_matrix((data, (rows, cols)), (n, n), 'd')
# for i in range(n):
# matrix[i, graph[i]] = 1
return matrix
# return num.array([to_adiacency_row(vertex, len(graph)) for vertex in graph])
def matrix_to_graph(A):
rows, cols = A.nonzero()
n = A.get_shape()[0]
graph = [[]] * n
vert = [0] * n
for (row, col) in zip(rows, cols):
if graph[row].count(col) == 0:
if len(graph[row]) == 0:
graph[row]=[col]
else:
graph[row].append(col)
if graph[col].count(row) == 0:
if len(graph[col]) == 0:
graph[col] = [row]
else:
graph[col].append(row)
return graph
def __dfs(graph, v, visited):
for w in graph[v]:
if visited[w] == 0:
visited[w] = visited[v]
__dfs(graph, w, visited)
def extract_connected_component(graph, vertex): # Być może zbędna funkcja
n = len(graph)
member = [0 for v in graph]
member[vertex] = 1
__dfs(graph, vertex, member)
number = [0 for v in graph]
offset = 0
j = 0
component = []
for i in range(n):
if member[i]:
number[i] = i - offset
component.append(graph[i])
else:
number[i] = -1
offset -= 1
for v in component:
for i in range(len(v)):
v[i] = number[v[i]]
return component
def get_all_components(graph):
n = len(graph)
member = [0 for v in graph]
vertex = 0
comp_id = 1
while vertex < n and member[vertex] == 0:
member[vertex] = comp_id
comp_id += 1
__dfs(graph, vertex, member)
while vertex < n and member[vertex] > 0:
vertex += 1
components = []
number = [0 for v in graph]
index = [0 for c in range(1, comp_id)]
for i in range(n):
comp = member[i]-1
if index[comp] == 0:
components.append([graph[i]])
else:
components[comp].append(graph[i])
number[i] = index[comp]
index[comp] += 1
for component in components:
for v in component:
for i in range(len(v)):
v[i] = number[v[i]]
return components
# Obliczanie MERW i SimRanków
def compute_merw(A): # Archaiczne liczenie MERW
n = A.get_shape()[0]
w, v = alg.eigsh(A, 1, ) # Macierz jest symetryczna
evalue = w[0]
evector = v[:, 0]
evector = evector / algnorm.norm(evector)
P = smat.lil_matrix((n, n))
for row in range(n):
denom = evalue * evector[row]
for col in range(n):
if A[row, col] != 0:
P[row, col] = A[row, col] * evector[col] / denom
return P, evector, evalue, [evector[i]*evector[i] for i in range(n)]
def power_method(A, precision=1e-11):
n = A.get_shape()[0]
v0 = num.array([random.random()+.1 for i in range(n)])
eps = 1
iter = 0
while iter < 20 or eps > precision:
v1 = v0*A
iter += 1
eval = 0
eps = 0
for i in range(n):
if v0[i] == 0:
continue
div = v1[i]/v0[i]
eval = max(eval, div)
eps += eval - div
v0 = v1/eval
return v0/algnorm.norm(v0), eval, iter
def scipy_method(A):
w, v = alg.eigsh(A, k=1, which='LA') # Macierz jest symetryczna
evalue = w[0]
evector = v[:, 0]
if evector[0] < 0:
evector *= -1
return evector / algnorm.norm(evector), evalue, 1
def _inv(x, y):
if x != 0:
return 1/(x*y)
else:
return 0.0
def compute_merw_matrix(A, method=power_method):
n = A.get_shape()[0]
evector, evalue, iter = method(A)
print('({} itr.)'.format(iter), end='')
mat1 = smat.diags([evector], [0], shape=(n, n), format='csc')
mat2 = smat.diags([[_inv(v, evalue) for v in evector]], # Coś jakby odwrotność macierzy diagonalnej
[0], shape=(n, n), format='csc')
return mat2*A*mat1, evector, evalue, [v*v for v in evector]
def compute_grw(A): # Wyznacza rozkład prawdopodobieństwa i rozkład stacjonarny dla zwykłego błądzenia
n = A.get_shape()[0]
degrees = smat.diags(A.sum(axis=0), [0], shape=(n, n), format='csr').power(-1)
P = degrees * A
vals, stationary = alg.eigs(P.transpose(), k=1, sigma=0.9999999)
inorm = 1/num.sum(stationary[:, 0]).real
return P, [x.real * inorm for x in stationary[:, 0]]
def compute_merw_simrank(graph, alpha, precision=1e-5, maxiter=100):
n = len(graph)
R = num.identity(n)
P, v, val, sdist = compute_merw_matrix(graph_to_matrix(graph))
R = num.identity(n)
S = num.zeros((n, n))
denom = [[v[x] * v[y] for x in range(n)] for y in range(n)]
alpha = alpha / val / val
for iteration in range(maxiter):
# S.fill(0) # S = num.zeros((n, n))
for y in range(n):
S[y, y] = 1.0
for x in range(y):
# if denom[x][y] != 0: # To mmoże nie zachodzić, jeśli graf nie jest spójny
S[x, y] = 0.0
for a in graph[x]:
for b in graph[y]:
S[x, y] += R[a, b] / denom[a][b]
S[x, y] *= alpha * denom[x][y]
S[y, x] = S[x, y]
t = R
R = S
S = t
return R, algnorm.norm(R - S)
def compute_basic_simrank(graph, alpha, precision=1e-5, maxiter=20):
n = len(graph)
R = num.identity(n)
S = num.zeros((n, n))
for iteration in range(maxiter):
for y in range(n):
S[y, y] = 1.0
if len(graph[y])>0:
for x in range(y):
S[x, y] = 0.0
if len(graph[x])>0:
for a in graph[x]:
for b in graph[y]:
S[x, y] += R[a, b]
S[x, y] *= alpha / (len(graph[x])*len(graph[y]))
S[y, x] = S[x, y]
t = R
R = S
S = t
return R, algnorm.norm(R - S)
def compute_merw_simrank_ofmatrix(matrix, alpha, precision=1e-5, maxiter=20, method=power_method):
graph = matrix_to_graph(matrix)
n = len(graph)
P, v, val, sdist = compute_merw_matrix(matrix, method=method)
R = num.identity(n)
S = num.zeros((n, n))
denom = [[v[x]*v[y] for x in range(n)] for y in range(n)]
alpha = alpha / val / val
for iteration in range(maxiter):
#S.fill(0) # S = num.zeros((n, n))
for y in range(n):
S[y, y] = 1.0
for x in range(y):
if denom[x][y] != 0: # To mmoże nie zachodzić, jeśli graf nie jest spójny
S[x, y] = 0.0
for a in graph[x]:
for b in graph[y]:
S[x, y] += R[a, b] / denom[a][b]
S[x, y] *= alpha * denom[x][y]
S[y, x] = S[x, y]
t = R
R = S
S = t
return R, algnorm.norm(R - S)
def compute_P_distance_iterative(P, alpha=0.8, maxiter=100, precision=1e-6): # Archaiczna i niedokładna
if alpha <=0 or alpha>1:
raise ValueError()
D = powr = P*alpha
result = smat.identity(P.get_shape()[0], format='csr') + D
for i in range(maxiter):
powr *= D
result = result + powr
eps = alg.norm(powr)
if eps < precision:
return result, eps
return result, eps
def compute_P_distance(P, alpha=0.8):
D = smat.identity(P.get_shape()[0], format='csc')
D -= P * alpha
return alg.inv(D)
| [
"numpy.identity",
"scipy.sparse.lil_matrix",
"scipy.sparse.linalg.inv",
"scipy.sparse.linalg.eigsh",
"numpy.sum",
"numpy.zeros",
"scipy.linalg.norm",
"scipy.sparse.linalg.norm",
"random.random",
"scipy.sparse.diags",
"scipy.sparse.csr_matrix"
] | [((214, 226), 'numpy.zeros', 'num.zeros', (['n'], {}), '(n)\n', (223, 226), True, 'import numpy as num\n'), ((519, 569), 'scipy.sparse.csr_matrix', 'smat.csr_matrix', (['(data, (rows, cols))', '(n, n)', '"""d"""'], {}), "((data, (rows, cols)), (n, n), 'd')\n", (534, 569), True, 'import scipy.sparse as smat\n'), ((2878, 2893), 'scipy.sparse.linalg.eigsh', 'alg.eigsh', (['A', '(1)'], {}), '(A, 1)\n', (2887, 2893), True, 'import scipy.sparse.linalg as alg\n'), ((3018, 3041), 'scipy.sparse.lil_matrix', 'smat.lil_matrix', (['(n, n)'], {}), '((n, n))\n', (3033, 3041), True, 'import scipy.sparse as smat\n'), ((3834, 3863), 'scipy.sparse.linalg.eigsh', 'alg.eigsh', (['A'], {'k': '(1)', 'which': '"""LA"""'}), "(A, k=1, which='LA')\n", (3843, 3863), True, 'import scipy.sparse.linalg as alg\n'), ((4285, 4339), 'scipy.sparse.diags', 'smat.diags', (['[evector]', '[0]'], {'shape': '(n, n)', 'format': '"""csc"""'}), "([evector], [0], shape=(n, n), format='csc')\n", (4295, 4339), True, 'import scipy.sparse as smat\n'), ((5067, 5082), 'numpy.identity', 'num.identity', (['n'], {}), '(n)\n', (5079, 5082), True, 'import numpy as num\n'), ((5158, 5173), 'numpy.identity', 'num.identity', (['n'], {}), '(n)\n', (5170, 5173), True, 'import numpy as num\n'), ((5182, 5199), 'numpy.zeros', 'num.zeros', (['(n, n)'], {}), '((n, n))\n', (5191, 5199), True, 'import numpy as num\n'), ((5970, 5985), 'numpy.identity', 'num.identity', (['n'], {}), '(n)\n', (5982, 5985), True, 'import numpy as num\n'), ((5994, 6011), 'numpy.zeros', 'num.zeros', (['(n, n)'], {}), '((n, n))\n', (6003, 6011), True, 'import numpy as num\n'), ((6801, 6816), 'numpy.identity', 'num.identity', (['n'], {}), '(n)\n', (6813, 6816), True, 'import numpy as num\n'), ((6825, 6842), 'numpy.zeros', 'num.zeros', (['(n, n)'], {}), '((n, n))\n', (6834, 6842), True, 'import numpy as num\n'), ((8096, 8106), 'scipy.sparse.linalg.inv', 'alg.inv', (['D'], {}), '(D)\n', (8103, 8106), True, 'import scipy.sparse.linalg as alg\n'), ((2988, 3009), 'scipy.linalg.norm', 'algnorm.norm', (['evector'], {}), '(evector)\n', (3000, 3009), True, 'import scipy.linalg as algnorm\n'), ((5852, 5871), 'scipy.linalg.norm', 'algnorm.norm', (['(R - S)'], {}), '(R - S)\n', (5864, 5871), True, 'import scipy.linalg as algnorm\n'), ((6551, 6570), 'scipy.linalg.norm', 'algnorm.norm', (['(R - S)'], {}), '(R - S)\n', (6563, 6570), True, 'import scipy.linalg as algnorm\n'), ((7514, 7533), 'scipy.linalg.norm', 'algnorm.norm', (['(R - S)'], {}), '(R - S)\n', (7526, 7533), True, 'import scipy.linalg as algnorm\n'), ((7875, 7889), 'scipy.sparse.linalg.norm', 'alg.norm', (['powr'], {}), '(powr)\n', (7883, 7889), True, 'import scipy.sparse.linalg as alg\n'), ((3771, 3787), 'scipy.linalg.norm', 'algnorm.norm', (['v0'], {}), '(v0)\n', (3783, 3787), True, 'import scipy.linalg as algnorm\n'), ((3998, 4019), 'scipy.linalg.norm', 'algnorm.norm', (['evector'], {}), '(evector)\n', (4010, 4019), True, 'import scipy.linalg as algnorm\n'), ((4881, 4906), 'numpy.sum', 'num.sum', (['stationary[:, 0]'], {}), '(stationary[:, 0])\n', (4888, 4906), True, 'import numpy as num\n'), ((3390, 3405), 'random.random', 'random.random', ([], {}), '()\n', (3403, 3405), False, 'import random\n')] |
import os
os.environ.setdefault("DEBUG", "0")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djangocon.settings")
from django.core.wsgi import get_wsgi_application # noqa
from whitenoise.django import DjangoWhiteNoise # noqa
application = DjangoWhiteNoise(get_wsgi_application())
| [
"os.environ.setdefault",
"django.core.wsgi.get_wsgi_application"
] | [((11, 46), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DEBUG"""', '"""0"""'], {}), "('DEBUG', '0')\n", (32, 46), False, 'import os\n'), ((47, 116), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""djangocon.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'djangocon.settings')\n", (68, 116), False, 'import os\n'), ((264, 286), 'django.core.wsgi.get_wsgi_application', 'get_wsgi_application', ([], {}), '()\n', (284, 286), False, 'from django.core.wsgi import get_wsgi_application\n')] |
# -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
from odoo.addons.auth_signup_confirmation.controllers.auth_signup_confirmation import (
AuthConfirm,
)
class AuthLead(AuthConfirm):
@http.route("/web/signup/confirm", type="http", auth="public")
def singnup_using_generated_link(self, *args, **kw):
user_state_before = (
request.env["res.users"]
.sudo()
.search([("partner_id.signup_token", "=", kw["token"])])
.active
)
res = super(AuthLead, self).singnup_using_generated_link(*args, **kw)
user = (
request.env["res.users"]
.sudo()
.search([("partner_id.signup_token", "=", kw["token"])])
)
if user.active and not user_state_before:
request.env["crm.lead"].sudo().create(
{
"name": user.partner_id.name,
"partner_id": user.partner_id.id,
"contact_name": user.partner_id.name,
}
)
return res
| [
"odoo.http.route"
] | [((220, 281), 'odoo.http.route', 'http.route', (['"""/web/signup/confirm"""'], {'type': '"""http"""', 'auth': '"""public"""'}), "('/web/signup/confirm', type='http', auth='public')\n", (230, 281), False, 'from odoo import http\n')] |
import keras
import pandas as pd
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.utils import to_categorical
import matplotlib.pyplot as plt
import time
start_time=time.time()
location="dataforDl.csv"
data=pd.read_csv(location)
data_columns=data.columns
xtrain = data[data_columns[data_columns != 'typeoffraud']]
ytrain=data['typeoffraud']
location1="dataforDl1.csv"
data1=pd.read_csv(location1)
data1_columns=data1.columns
xtest = data1[data1_columns[data1_columns != 'typeoffraud']]
ytest=data1['typeoffraud']
xtrain_norm = (xtrain - xtrain.mean()) / xtrain.std()
xtest_norm = (xtest - xtest.mean()) / xtest.std()
n_cols = xtrain_norm.shape[1]
ytrain=to_categorical(ytrain)
ytest=to_categorical(ytest)
num_classes=ytrain.shape[1]
print(num_classes)
def classification_model():
# create model
model = Sequential()
model.add(Dense(100,activation='relu', input_shape=(n_cols,)))
model.add(Dense(100, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
# compile model
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
return model
# build the model
model = classification_model()
# fit the model
model.fit(xtrain_norm, ytrain, validation_data=(xtest_norm, ytest), epochs=10, verbose=1)
# evaluate the model
# test_loss,test_acc=model.evaluate(xtest_norm, ytest)
test_labels_p=model.predict(xtest_norm)
test_labels_p=np.argmax(test_labels_p,axis=1)
print('---%s seconds---'%(time.time()-start_time)) | [
"pandas.read_csv",
"numpy.argmax",
"keras.models.Sequential",
"keras.utils.to_categorical",
"keras.layers.Dense",
"time.time"
] | [((213, 224), 'time.time', 'time.time', ([], {}), '()\n', (222, 224), False, 'import time\n'), ((255, 276), 'pandas.read_csv', 'pd.read_csv', (['location'], {}), '(location)\n', (266, 276), True, 'import pandas as pd\n'), ((425, 447), 'pandas.read_csv', 'pd.read_csv', (['location1'], {}), '(location1)\n', (436, 447), True, 'import pandas as pd\n'), ((708, 730), 'keras.utils.to_categorical', 'to_categorical', (['ytrain'], {}), '(ytrain)\n', (722, 730), False, 'from keras.utils import to_categorical\n'), ((737, 758), 'keras.utils.to_categorical', 'to_categorical', (['ytest'], {}), '(ytest)\n', (751, 758), False, 'from keras.utils import to_categorical\n'), ((1478, 1510), 'numpy.argmax', 'np.argmax', (['test_labels_p'], {'axis': '(1)'}), '(test_labels_p, axis=1)\n', (1487, 1510), True, 'import numpy as np\n'), ((867, 879), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (877, 879), False, 'from keras.models import Sequential\n'), ((894, 946), 'keras.layers.Dense', 'Dense', (['(100)'], {'activation': '"""relu"""', 'input_shape': '(n_cols,)'}), "(100, activation='relu', input_shape=(n_cols,))\n", (899, 946), False, 'from keras.layers import Dense\n'), ((961, 990), 'keras.layers.Dense', 'Dense', (['(100)'], {'activation': '"""relu"""'}), "(100, activation='relu')\n", (966, 990), False, 'from keras.layers import Dense\n'), ((1006, 1046), 'keras.layers.Dense', 'Dense', (['num_classes'], {'activation': '"""softmax"""'}), "(num_classes, activation='softmax')\n", (1011, 1046), False, 'from keras.layers import Dense\n'), ((1536, 1547), 'time.time', 'time.time', ([], {}), '()\n', (1545, 1547), False, 'import time\n')] |
from django.contrib import admin
from .models import Profile
# Register your models here.
class ProfileAdmin(admin.ModelAdmin):
class Meta:
fields = '__all__'
admin.site.register(Profile, ProfileAdmin) | [
"django.contrib.admin.site.register"
] | [((174, 216), 'django.contrib.admin.site.register', 'admin.site.register', (['Profile', 'ProfileAdmin'], {}), '(Profile, ProfileAdmin)\n', (193, 216), False, 'from django.contrib import admin\n')] |
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 15 15:41:38 2018
@author: steve
"""
import re,types
from HeroLabStatBase import VERBOSITY,Character
OPERATORS = ["<",">","==",">=","<=","<>","!=","is","not","in","and","or"]
class Matcher(object):
"""
Container for attributes and methods related to finding replacement values
from the character and returning those values
Methods:
getMatch: returns resuult of evaluating the match value
getKeys: returns the list of possible keywords for templates
Attributes:
name = (str) deriverd from character name
type = (str) 'text' or 'image'
matcherDictionary: (dict) provided matcher dictionary
"""
"""
********
Matching
********
The matching method uses keywords surrounded by double wavy brackets and
modified with prefix and suffix elements.
TEXTMATCH
==========
The text match dictionary is for replacing keyworded and bracketed text with
the result of evaluation of the value for each item. Examples of using the
format in a template document are as follows::
{{keyword}}}
{{(keyword)}}
{{head|_keyword}}
{{(head|_keyword)}}
{{head|keyword..}}
{{head|?keyword..}}
{{head|.c.keyword}}
{{(head|_keyword..)}}
{{head|_keyword_..}}
{{head|_keyword__..}}
``keyword``
must have a match in the dictionary to give a value which will be
evaluated. It must also not have a ``..`` (double period) as part of it.
``()``
must be the outer most element, but inside the double brackets. If
the value evaluation results in something parenthesis are placed around it
``head|``
This is replaced with the *head* text if the value evaluation results
in something. The ``|`` (vetical bar) may not be used anywhere else.
``head|?``
This is replaced with the *head* text if the value evaluation results
in something, however only the head text is returned.
``_``
This is used before a keyword to indicate that instead of the evaluated
value the parent Feature's abbreviate method should ne called with the
final attribute as the argument.
If it is used after the keyword, the parent Feature's describe method
is called and the result returned.
If two underscores follow the keyword, the name is prepended colon
separeted from the description.
``.c.``
This before the keyword is used for tracking item lists. The value
should evaluate to an integer value. The ``c`` can be any single character
that character will be repeated interger times based onthe evaluated value
``..``
This after a keyword is used to indicate a possible list. The value
should evaluate to an attribute from the first element in the list. The
list should be one element up from the attribute. The result will be the
same attribute from all the elements in the list. Any text following
the ``..`` will be used as separators between the items in the list.
The value for each item in the text match dictionary should evaluate to the
text which will replace the keyword in the template document, or as mentioned
above the text for the first attribute in a list.
There are also some simple operations which can be done as part of the value
evaluation. These include multiple attribute evaluation, keyword nesting,
and simple conditionals.
``\x1f``
This is used to indicate that there are multiple attributes references
in the keyword item's value. Each attribute is listed with this
character as the separator and will evaluate as a space separated list
``\x1e``
This is used to nest keywords in the values. The double brackets
are not used. However, all the modifiers can be used. Each is
separated with thsi character and will be result in a list of values
``\x1d``
This is used to separate a conditional from the following replacement
The conditional can only be very simple with operators as in the global
OPERATORS, all numbers will be treated as floats, sring comparisons
should be listed without any quotes and any attribute replacements
must be very simple.
"""
TEXTMATCH = {
}
"""
IMAGEMATCH
==========
The image match dictionary is for replacing shape placeholders containing
just the keyword as the placeholder. Examples of using the
format in a template document are as follows::
{{keyword}}
{{h_keyword..}}
{{l_keyword..}}
``keyword``
must have a match in the dictionary to give a value which will be
evaluated. It must also not have a ``..`` (double period) as part of it.
``h_ or l_``
This is used to indicate the resolution for the image. The value has to
evaluate to an object with imageHigh and/or imageLow (default) attribute.
``..``
This is used to indicate a possible list. The value should evaluate to
an attribute from the first element in the list. The list should be
one element up from the attribute. The result will be the same attribute
from all the elements in the list.
The value for each item in the image match dictionary should evaluate to an
object with an imageHigh and/or imageLow attribute (default imageHigh). The
value of this attribute is a tuple containing the filename for the image
and the absolute path filename for the image. If the value is the first in
a list and the `..`` modifier is used, imageHigh and/or imageLow is evaluated
for each item in the list and returned as a list of tuples
"""
IMAGEMATCH = {
}
"""
BOOLEANMATCH
==========
The boolean match dictionary is for replacing a keyword with a returned boolean:
True or False based on the value(s) from the evaluated character attribute.
Examples of using the format in a template document are as follows::
{{keyword}}
{{keyword..}}
``keyword``
must have a match in the dictionary to give a value which will be
evaluated. It must also not have a ``..`` (double period) as part of it.
``..``
This is used to indicate a possible list. The value should evaluate to
an attribute from the first element in the list. The list should be
one element up from the attribute. The result will be derived from the
same attribute from all the elements in the list.
The value for each item in the boolean match dictionary should evaluate to a
case insensityve string of yes, no, true, false, on, or off. These are
then interpreted as a boolean and returned either as a single result of a list.
"""
BOOLEANMATCH = {
}
_booleanDict = {
'yes':True,
'no':False,
'true':True,
'false':False,
'on':True,
'off':False,
}
def __init__(self,character,matcherDictionary=TEXTMATCH,matcherType='text',**kwargs):
"""create a matcher give the character and the match dictionary
Args:
character: (Character) instance from which the data is drawn
matcherDictionary: (dict) dictionary of keywords which are matched
and the values are replacement chracter subobjects which when
evaluated return a string, a boolean, or None for 'text' type
matchers. Alternativelty these vales may return a tuple of
(image filename, image absolute path filename) for 'image'
type matchers
matchType: (string) either 'text' or 'image' or 'boolean'
"""
if 'verbosity' not in kwargs: kwargs['verbosity'] = VERBOSITY
self.verbosity = kwargs['verbosity']
assert type(character) == Character, "First argument must be a Character instance: %s" % character
assert type(matcherDictionary) == dict, "Second argument must be dictionary: %s" % matcherDictionary
assert matcherType == 'text' or matcherType == 'image' or matcherType == 'boolean',"matcherType must be either 'text', 'image', or 'boolean': %s"% matcherType
self._character = character
self.name = "%s.%s %s" % (character.myIndex,character.characterIndex,character.name)
self.type = matcherType
self.matcherDictionary = matcherDictionary
def _exists(self,toTest,*args,**kwargs):
"""check if the attribute exists within the character attribute tree
Returns: a size member tuple
isAttr: (boolean) this attribute exists
testObj: value returned from final test object's attribute
lastTestObj: (Feature) final test object
testAttr: (string) final attribute of the feature being tested
testList: (Feature list) if lastTestObj is a member
testAttrIdx (int or str)
"""
toTestList = toTest.split(".")
testObj = self._character
lastTestObj = testObj
testList = []
testAttrIdx = None
attrCount = 0
isAttr = True
testAttr = ''
# loop through each potential object and attribute from the provided
# oject test string. Starting with testObj = self._character
for (attrCount,myAttr) in enumerate(toTestList):
# save the last successful object test
lastTestObj = testObj
# match the attribute string to identify list element attributes
# or methods of the object. Also match the list index or
# method arguments
attrMatch = re.match(r'([^\[\]\(\)]+)([\[\(](.+)?[\)\]])?',myAttr)
if attrMatch:
# next attribute to test without index or arguments
testAttr = attrMatch.group(1)
testAttrIdx = None
# did we match an index/arguments ?
if len(attrMatch.groups()) == 3:
testAttrIdx = attrMatch.group(3)
# first test, does the testObj have the current attribute
isAttr = hasattr(testObj,testAttr)
if not isAttr:
#print(attrMatch.groups(),testObj,testAttr,dir(testObj))
break
# second test, it the attribute a list element or method
if testAttrIdx != None:
testList = getattr(testObj,testAttr)
if type(testList) == list:
if int(testAttrIdx) >= len(testList):
isAttr = False
break
testObj = testList[int(testAttrIdx)]
elif type(testList) == dict:
testObj = testList[testAttrIdx]
elif type(testList) == types.MethodType:
if type(testList(testAttrIdx.split(","))) == types.GeneratorType:
testObj = testList
testList = [i for i in testObj(testAttrIdx.split(","))]
else:
testObj = [i for i in testObj(testAttrIdx.split(","))]
break
else:
isAttr = False
break
else:
testObj = getattr(testObj,testAttr)
else:
isAttr = False
#if self.type == 'image':
# for testAttr in ['imageLow','imageHigh']:
# isAttr = hasattr(testObj,testAttr)
# if isAttr:
# lastTestObj = testObj
# testObj = getattr(testObj,testAttr)
# break
if testList and lastTestObj not in testList: testList = []
if not isAttr: testObj = toTest
return (isAttr,testObj,lastTestObj,testAttr,testList,testAttrIdx)
def getMatch(self,keyText,*args,**kwargs):
"""Return the match from the included character based on keyText
Args:
keyText: (str) keyword from matcherDictionary possibly with modifiers
for head, parenthesis, lists, image resolution, and/or abbreviation
\x1d separate conditional from replacement
\x1e serarate replacement values when using multiple (which will be joined with a space)
\x1f separate keywords in replacement when nesting keywords
"""
# just in case the keyText is passed with the brackets
myKey = re.sub('^\{\{(.*)\}\}$',r'\1',keyText)
# identify any brackets and strip them off the myKey
(pStart,pEnd) = ('','')
pMatch = re.search(r'^\{(.*)\}$',myKey)
if pMatch: (myKey,pStart,pEnd) = (pMatch.group(1),'{','}')
pMatch = re.search(r'^\[(.*)\]$',myKey)
if pMatch: (myKey,pStart,pEnd) = (pMatch.group(1),'[',']')
pMatch = re.search(r'^\((.*)\)$',myKey)
if pMatch: (myKey,pStart,pEnd) = (pMatch.group(1),'(',')')
# identify any header and strip it off the myKey
headText = ''
hMatch = re.search(r'^([^|]+)\|([^|]+)$',myKey)
if hMatch: (headText,myKey) = hMatch.groups()
hOnlyMatch = re.search(r'^[?]',myKey)
if hOnlyMatch: myKey = re.sub(r'^\?','',myKey)
# identify any repeating characters and strip it off the myKey
repeatText = ''
rMatch = re.search(r'^\.(.)\.(.+)$',myKey)
if rMatch: (repeatText,myKey) = rMatch.groups()
# assign flag for abbreviation
abbreviate = False
if re.match(r'^_',myKey) and self.type != 'image': abbreviate = True
# add in image resoultion
imageRes = ''
if self.type == 'image':
imageRes = 'imageLow'
if re.match(r'^h_',myKey):
imageRes = 'imageHigh'
# match for the list option and separator based on flag
listMatch = re.search(r'\.\.(.*)$',myKey)
joiner = ''
if listMatch:
joiner = listMatch.group(1)
# strip off repeat, resolution, and abbreviate flags down to the key
myKey = re.sub(r'\.\..*$','',re.sub(r'^(h_|l_|_)','',myKey))
# match for the description option and strip the flag
nameDescribe = re.search(r'__$',myKey)
describe = re.search(r'_$',myKey)
myKey = re.sub(r'__?$','',myKey)
# some matchers use the striped key, some use the full key
keyWord = myKey in self.matcherDictionary and myKey or keyText
if keyWord not in self.matcherDictionary:
if self.verbosity >= 2:
print("Warning: key is not in Matcher, %s returned" % keyWord)
return keyWord
rtnList = []
myValue = self.matcherDictionary[keyWord]
testedValue = (False,None,None,str(),list(),None)
# if the value is also keys split them up and get the values
if re.search("\x1f",myValue):
for kw in re.split("\x1f",myValue):
rtnList.append(self.getMatch(kw))
else:
# a special type of text match where two values are separated by a group separator
# in this case the first is evaluated as a boolean which determins if the second is
# displayed.
conditional = False
conditionalResult = []
itemCount = 1
if re.search("\x1d",self.matcherDictionary[keyWord]):
conditional = True
(myConditional,myValue) = re.split("\x1d",myValue)
conditionalList = re.split(r' ',myConditional)
# evaluate each part of the conditional which is a feature to its attribute
# each part of the conditional is also then expanded to a list
for (condIdx,condItem) in enumerate(conditionalList):
testedItem = self._exists(condItem)
# if the keyword asks for a list, the attribute exists, and the attribute comes from a list member
if listMatch and testedItem[0] and testedItem[4]:
# go through each feature in the list and get the relavant attribute value
conditionalList[condIdx] = [hasattr(lf,testedItem[3]) and getattr(lf,testedItem[3]) for lf in testedItem[4]]
itemCount = len(conditionalList[condIdx]) > itemCount and len(conditionalList[condIdx]) or itemCount
else:
conditionalList[condIdx] = [testedItem[1]]
# duplicate the last element in the conditional list part until all are the same length
for (condIdx,condItem) in enumerate(conditionalList):
while len(condItem) < itemCount:
condItem.append(condItem[len(condItem)-1])
conditionalList[condIdx] = condItem
# evaluate set of conditionals for each possible list item
for itemIdx in range(itemCount):
tempConditionalList = []
for condIdx in range(len(conditionalList)):
# all numbers are evaluated as floats
try:
float(conditionalList[condIdx][itemIdx])
if type(conditionalList[condIdx][itemIdx]) != types.BooleanType:
tempConditionalList.append("float(%s)" % conditionalList[condIdx][itemIdx])
else:
if conditionalList[condIdx][itemIdx]:
tempConditionalList.append("True")
else:
tempConditionalList.append("False")
except(ValueError):
if conditionalList[condIdx][itemIdx] not in OPERATORS:
tempConditionalList.append('"'+conditionalList[condIdx][itemIdx]+'"')
else:
tempConditionalList.append(conditionalList[condIdx][itemIdx])
try:
conditionalResult.append(eval(" ".join(tempConditionalList)))
except:
print(tempConditionalList)
raise
# I now have a list of boolean stored in conditionalResult, one for each
# attribute in the list, or a list of one for non-list attributes
# Now lets go through all the values.
valueList = []
maxCount = 0
# loop through each of the \x1e separated values
# these will be interleaved as space separated
# values for each one in a list (if it is a list)
for (valCount,myValue) in enumerate(re.split("\x1e",myValue)):
valueList.append(list())
# append imageRes for images or '' for all else
if self.type == 'image':
myValue = re.sub(r'.image(High|Low)','',myValue)
myValue += "." + imageRes
testedValue = self._exists(myValue)
# if it does not exist append empty result to the list
if not testedValue[0]:
if self.verbosity >= 2: print("Warning: key:%s -> %s is not in Character %s, empty text returned" % (keyWord,myValue,self.name))
#if self.type == 'boolean': return False
#if self.type == 'boolean':
# valueList[valCount].append('false')
#elif self.type == 'image':
# valueList[valCount].append(('',''))
#else:
# valueList[valCount].append('')
valueList[valCount].append(None)
continue
# if we have the value add it/them to the list
feature = testedValue[2]
attr = testedValue[3]
featureList = []
if listMatch and testedValue[4]:
featureList = testedValue[4]
else:
featureList = [feature]
for f in featureList:
if listMatch and hasattr(f,attr) or not listMatch:
if abbreviate:
myVal = f.abbreviate(attr)
else:
myVal = getattr(f,attr)
if describe:
if nameDescribe:
myVal = f.describe(attr,myVal)
else:
myVal = f.describe(attr)
valueList[valCount] += [myVal]
# keep track of max values per valCount
maxCount = len(valueList[valCount]) > maxCount and len(valueList[valCount]) or maxCount
for cntr in range(maxCount):
if conditional:
# use the cntr to find the relavant conditional or if they are mismatched
# just use the last conditional
if (cntr >= len(conditionalResult)):
idx = len(conditionalResult)-1
else:
idx = cntr
if not conditionalResult[idx]:
continue
toJoinList = []
for vIdx in range(len(valueList)):
if cntr < len(valueList[vIdx]):
if (valueList[vIdx][cntr]):
if type(valueList[vIdx][cntr]) == types.MethodType:
toJoinList.append(joiner.join([i for i in valueList[vIdx][cntr]()]))
else:
toJoinList.append(valueList[vIdx][cntr])
if self.type == 'text':
rtnList.append(" ".join(toJoinList))
# multiple value separated by \x1e are ignored for boolean and images
else:
rtnList.append(valueList[0][cntr])
# Now we have a return list of strings or tuples
if rMatch:
newList = []
for i in rtnList:
try:
newList.append(repeatText * int(i))
except ValueError:
if self.verbosity >= 2: print("Warning: key:%s -> %s attribute %s was not an integer for Character %s, 1 repeat used" % (keyWord,testedValue[2],testedValue[3],self.name))
newList.append(repeatText)
rtnList = newList[:]
# if this is a boolean, change the list to boolean list
if self.type == 'boolean':
rtnList = [self._booleanDict[b.lower()] for b in rtnList]
# return the result(s)
rtnList = filter(lambda i:i,rtnList)
if len(rtnList) == 0:
if self.verbosity >= 2: print("Warning: key:%s -> nothing stored in %s attribute %s for Character %s, empty text returned" % (keyWord,testedValue[2],testedValue[3],self.name))
if self.type == 'boolean': return False
if self.type == 'image': return ('','')
return ''
if self.type != 'text':
if len(rtnList) == 1:
return rtnList[0]
return rtnList
if hOnlyMatch: rtnList = []
return ''.join([pStart,headText,joiner.join(rtnList),pEnd])
def getKeys(self,*args,**kwargs):
"""return the list of possible keys for this matcher"""
return self.matcherDictionary.keys() | [
"re.split",
"re.sub",
"re.match",
"re.search"
] | [((12702, 12746), 're.sub', 're.sub', (['"""^\\\\{\\\\{(.*)\\\\}\\\\}$"""', '"""\\\\1"""', 'keyText'], {}), "('^\\\\{\\\\{(.*)\\\\}\\\\}$', '\\\\1', keyText)\n", (12708, 12746), False, 'import re, types\n'), ((12851, 12883), 're.search', 're.search', (['"""^\\\\{(.*)\\\\}$"""', 'myKey'], {}), "('^\\\\{(.*)\\\\}$', myKey)\n", (12860, 12883), False, 'import re, types\n'), ((12966, 12998), 're.search', 're.search', (['"""^\\\\[(.*)\\\\]$"""', 'myKey'], {}), "('^\\\\[(.*)\\\\]$', myKey)\n", (12975, 12998), False, 'import re, types\n'), ((13081, 13113), 're.search', 're.search', (['"""^\\\\((.*)\\\\)$"""', 'myKey'], {}), "('^\\\\((.*)\\\\)$', myKey)\n", (13090, 13113), False, 'import re, types\n'), ((13275, 13314), 're.search', 're.search', (['"""^([^|]+)\\\\|([^|]+)$"""', 'myKey'], {}), "('^([^|]+)\\\\|([^|]+)$', myKey)\n", (13284, 13314), False, 'import re, types\n'), ((13389, 13413), 're.search', 're.search', (['"""^[?]"""', 'myKey'], {}), "('^[?]', myKey)\n", (13398, 13413), False, 'import re, types\n'), ((13581, 13616), 're.search', 're.search', (['"""^\\\\.(.)\\\\.(.+)$"""', 'myKey'], {}), "('^\\\\.(.)\\\\.(.+)$', myKey)\n", (13590, 13616), False, 'import re, types\n'), ((14101, 14132), 're.search', 're.search', (['"""\\\\.\\\\.(.*)$"""', 'myKey'], {}), "('\\\\.\\\\.(.*)$', myKey)\n", (14110, 14132), False, 'import re, types\n'), ((14444, 14467), 're.search', 're.search', (['"""__$"""', 'myKey'], {}), "('__$', myKey)\n", (14453, 14467), False, 'import re, types\n'), ((14487, 14509), 're.search', 're.search', (['"""_$"""', 'myKey'], {}), "('_$', myKey)\n", (14496, 14509), False, 'import re, types\n'), ((14526, 14551), 're.sub', 're.sub', (['"""__?$"""', '""""""', 'myKey'], {}), "('__?$', '', myKey)\n", (14532, 14551), False, 'import re, types\n'), ((15091, 15117), 're.search', 're.search', (['"""\x1f"""', 'myValue'], {}), "('\\x1f', myValue)\n", (15100, 15117), False, 'import re, types\n'), ((9806, 9868), 're.match', 're.match', (['"""([^\\\\[\\\\]\\\\(\\\\)]+)([\\\\[\\\\(](.+)?[\\\\)\\\\]])?"""', 'myAttr'], {}), "('([^\\\\[\\\\]\\\\(\\\\)]+)([\\\\[\\\\(](.+)?[\\\\)\\\\]])?', myAttr)\n", (9814, 9868), False, 'import re, types\n'), ((13445, 13470), 're.sub', 're.sub', (['"""^\\\\?"""', '""""""', 'myKey'], {}), "('^\\\\?', '', myKey)\n", (13451, 13470), False, 'import re, types\n'), ((13748, 13769), 're.match', 're.match', (['"""^_"""', 'myKey'], {}), "('^_', myKey)\n", (13756, 13769), False, 'import re, types\n'), ((13952, 13974), 're.match', 're.match', (['"""^h_"""', 'myKey'], {}), "('^h_', myKey)\n", (13960, 13974), False, 'import re, types\n'), ((14327, 14358), 're.sub', 're.sub', (['"""^(h_|l_|_)"""', '""""""', 'myKey'], {}), "('^(h_|l_|_)', '', myKey)\n", (14333, 14358), False, 'import re, types\n'), ((15140, 15165), 're.split', 're.split', (['"""\x1f"""', 'myValue'], {}), "('\\x1f', myValue)\n", (15148, 15165), False, 'import re, types\n'), ((15554, 15604), 're.search', 're.search', (['"""\x1d"""', 'self.matcherDictionary[keyWord]'], {}), "('\\x1d', self.matcherDictionary[keyWord])\n", (15563, 15604), False, 'import re, types\n'), ((15682, 15707), 're.split', 're.split', (['"""\x1d"""', 'myValue'], {}), "('\\x1d', myValue)\n", (15690, 15707), False, 'import re, types\n'), ((15741, 15769), 're.split', 're.split', (['""" """', 'myConditional'], {}), "(' ', myConditional)\n", (15749, 15769), False, 'import re, types\n'), ((19015, 19040), 're.split', 're.split', (['"""\x1e"""', 'myValue'], {}), "('\\x1e', myValue)\n", (19023, 19040), False, 'import re, types\n'), ((19218, 19257), 're.sub', 're.sub', (['""".image(High|Low)"""', '""""""', 'myValue'], {}), "('.image(High|Low)', '', myValue)\n", (19224, 19257), False, 'import re, types\n')] |
import subprocess
import json
import os
import csv
import numpy as np
import pandas as pd
import pysam
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
def get_orf(input_genome, output_genome, orf):
orf = int(orf)
record = SeqIO.read(input_genome, 'fasta')
record.seq = record.seq[orf:]
SeqIO.write(record, output_genome, 'fasta')
def backtranslate(input_nucleotide, input_protein, output_codon):
nucleotides = SeqIO.parse(input_nucleotide, 'fasta')
proteins = SeqIO.parse(input_protein, 'fasta')
codons = []
for protein_record, nucleotide_record in zip(proteins, nucleotides):
i = 0
codon_list = []
for character in protein_record.seq:
if character != '-':
codon = str(nucleotide_record.seq[3*i:3*i+3])
codon_list.append(codon)
i += 1
else:
codon_list.append('---')
codon_record = SeqRecord(
Seq(''.join(codon_list)),
id=protein_record.id,
description=protein_record.description
)
codons.append(codon_record)
SeqIO.write(codons, output_codon, 'fasta')
def select_simulated_gene(dataset, gene, output):
aligned_filename = "output/simulation/%s/aligned_%s_orf-%d_codon.fasta"
nucleotide_genome_filename = "output/simulation/%s/genome.fasta" % dataset
nucleotide_genome = SeqIO.read(nucleotide_genome_filename, 'fasta')
max_percent_identity = 0
for i in range(3):
non_gaps = 0
matches = 0
codon_list = []
records = SeqIO.parse(aligned_filename % (dataset, gene, i), 'fasta')
translated_genome = next(records)
reference = next(records)
genome_i = 0
for j in range(len(reference)):
if reference[j] != '-':
non_gaps += 1
codon = str(nucleotide_genome[3*genome_i+i:3*genome_i+i+3].seq)
codon_list.append(codon)
if reference[j] == translated_genome[j]:
matches += 1
if translated_genome[j] != '-':
genome_i += 1
percent_identity = matches/non_gaps
if percent_identity > max_percent_identity:
max_percent_identity = percent_identity
desired_codons = ''.join(codon_list)
record = SeqRecord(
Seq(desired_codons).ungap('-'),
id=nucleotide_genome.id,
description=gene
)
SeqIO.write(record, output, 'fasta')
def write_abayesqr_config(sam_filename, reference_filename, output):
config_string = ("""filename of reference sequence (FASTA) : %s
filname of the aligned reads (sam format) : %s
paired-end (1 = true, 0 = false) : 0
SNV_thres : 0.01
reconstruction_start : 1
reconstruction_stop: 1300
min_mapping_qual : 20
min_read_length : 50
max_insert_length : 250
characteristic zone name : test
seq_err (assumed sequencing error rate(%%)) : 0.1
MEC improvement threshold : 0.0395 """ % (reference_filename, sam_filename))
with open(output, 'w') as config_file:
config_file.write(config_string)
def parse_abayesqr_output(input_text, output_fasta):
with open(input_text) as input_file:
lines = input_file.readlines()
records = []
for i, line in enumerate(lines):
if i % 2 == 0:
freq = float(line.split()[-1])
number = int(i/2)+1
header = 'haplotype-%d_freq-%f' % (number, freq)
if i % 2 == 1:
seq = Seq(line.strip())
record = SeqRecord(seq, id=header, description='')
records.append(record)
SeqIO.write(records, output_fasta, 'fasta')
def pairwise_distance_csv(fasta_filename, csv_filename):
records = list(SeqIO.parse(fasta_filename, 'fasta'))
np_seqs = np.array(
[list(str(record.seq)) for record in records],
dtype='<U1'
)
first_records = []
second_records = []
distances = []
search_term = 'quasispecies'
for i in range(len(records)):
for j in range(len(records)):
if records[j].name[: len(search_term)] == search_term:
continue
first_records.append(records[i].id)
second_records.append(records[j].id)
distance = (np_seqs[i, :] != np_seqs[j, :]).sum()
distances.append(distance)
pd.DataFrame({
'first_record': first_records,
'second_record': second_records,
'distance': distances,
}).to_csv(csv_filename)
def add_subtype_information(input_csv, output_csv):
df = pd.read_csv(input_csv)
df['Subtype1'] = df['ID1'].apply(lambda row: row.split('.')[0])
df['Subtype2'] = df['ID2'].apply(lambda row: row.split('.')[0])
df.to_csv(output_csv, index=False)
def extract_truth(
input_fasta, reference_path, dataset, reference, output_path,
output_json_path
):
sequences = list(SeqIO.parse(input_fasta, "fasta"))
aligned_sequences = []
output_dir = os.path.join("output", "truth", dataset)
tmp_dir = os.path.join(
output_dir, "truth-%s-%s-temp" % (dataset, reference)
)
os.mkdir(tmp_dir)
for sequence in sequences:
sequence_path = os.path.join(tmp_dir, "ref.fasta")
alignment_path = os.path.join(tmp_dir, "aligned.fasta")
SeqIO.write(sequence, sequence_path, "fasta")
command = [
"water", "-asequence", sequence_path, "-bsequence",
reference_path, "-gapopen", "10.0", "-gapextend", ".5", "-aformat",
"fasta", "-outfile", alignment_path
]
subprocess.run(command)
aligned_sequence = list(SeqIO.parse(alignment_path, "fasta"))[0]
aligned_sequence.seq = aligned_sequence.seq.ungap('-')
aligned_sequences.append(aligned_sequence)
os.remove(sequence_path)
os.remove(alignment_path)
os.rmdir(tmp_dir)
sequence_length = min([len(record.seq) for record in aligned_sequences])
for record in aligned_sequences:
record.seq = record.seq[:sequence_length]
SeqIO.write(aligned_sequences, output_path, "fasta")
pairwise_distances = []
for i in range(len(aligned_sequences)):
first_sequence = aligned_sequences[i]
first_np = np.array(list(first_sequence.seq), dtype='<U1')
for j in range(i+1, len(aligned_sequences)):
second_sequence = aligned_sequences[j]
second_np = np.array(list(second_sequence.seq), dtype='<U1')
disagreement = int((first_np != second_np).sum())
pairwise_distances.append({
'sequenceA': first_sequence.name,
'sequenceB': second_sequence.name,
'disagreement': disagreement
})
with open(output_json_path, 'w') as json_file:
json.dump(pairwise_distances, json_file, indent=2)
def covarying_truth(
input_computed, input_actual, input_reference, output_json
):
reference = SeqIO.read(input_reference, 'fasta')
rl = len(reference.seq)
with open(input_computed) as input_file:
cvs = json.load(input_file)
with open(input_actual) as input_file:
true_cvs = json.load(input_file)
tp = []
fp = []
tn = []
fn = []
for i in range(rl):
if i in true_cvs and i in cvs:
tp.append(i)
elif i in true_cvs and i not in cvs:
fn.append(i)
elif i not in true_cvs and i in cvs:
fp.append(i)
elif i not in true_cvs and i not in cvs:
tn.append(i)
precision = len(tp)/(len(tp)+len(fp))
recall = len(tp)/(len(tp)+len(fn))
result = {
'true_positives': tp,
'true_negative': tn,
'false_positives': fp,
'false_negatives': fn,
'precision': precision,
'recall': recall
}
with open(output_json, 'w') as output_file:
json.dump(result, output_file, indent=2)
def restrict_fasta_to_cvs(input_fasta, input_cvs, output_fasta):
with open(input_cvs) as json_file:
cvs = json.load(json_file)
records = list(SeqIO.parse(input_fasta, 'fasta'))
for record in records:
record.seq = Seq(''.join([record.seq[site] for site in cvs]))
SeqIO.write(records, output_fasta, 'fasta')
def downsample_bam(input_bam_path, output_bam_path, downsample_amount):
downsample_percentage = 1 - int(downsample_amount) / 100
input_bam = pysam.AlignmentFile(input_bam_path, 'rb')
number_of_reads = input_bam.count()
downsample_number = np.ceil(downsample_percentage * number_of_reads) \
.astype(np.int)
np.random.seed(1)
downsample_indices = np.random.choice(
number_of_reads, downsample_number, replace=False
)
downsample_indices.sort()
downsample_index = 0
output_bam = pysam.AlignmentFile(
output_bam_path, 'wb', header=input_bam.header
)
for i, read in enumerate(input_bam.fetch()):
if i == downsample_indices[downsample_index]:
output_bam.write(read)
downsample_index += 1
if downsample_index == len(downsample_indices):
break
output_bam.close()
pysam.index(output_bam_path)
input_bam.close()
def pluck_record(input_fasta_path, output_fasta_path, record):
all_records = SeqIO.parse(input_fasta_path, 'fasta')
desired_record = SeqIO.to_dict(all_records)[record]
SeqIO.write(desired_record, output_fasta_path, 'fasta')
def single_mapping_dataset(bam_path, ref_path, output_path):
bam = pysam.AlignmentFile(bam_path)
ref = SeqIO.read(ref_path, 'fasta')
percent_identity = np.zeros(bam.mapped, dtype=np.float)
differences = np.zeros(bam.mapped, dtype=np.float)
number_of_aligned_pairs = np.zeros(bam.mapped, dtype=np.float)
for i, read in enumerate(bam.fetch()):
aligned_pairs = read.get_aligned_pairs(matches_only=True)
aligned_query = np.array([
read.query[pair[0]] for pair in aligned_pairs
], dtype='<U1')
aligned_reference = np.array([
ref[pair[1]] for pair in aligned_pairs
], dtype='<U1')
agreement = (aligned_query == aligned_reference).sum()
number_of_aligned_pairs[i] = len(aligned_pairs)
differences[i] = number_of_aligned_pairs[i] - agreement
percent_identity[i] = agreement/number_of_aligned_pairs[i]
quality = np.array([
read.mapping_quality for read in bam.fetch()
], dtype=np.int)
query_length = np.array([
read.query_length for read in bam.fetch()
], dtype=np.int)
result = pd.DataFrame({
'mapping_quality': quality,
'differences': differences,
'number_of_aligned_pairs': number_of_aligned_pairs,
'percent_identity': percent_identity,
'query_length': query_length
}, index=[read.query_name for read in bam.fetch()])
result.to_csv(output_path, index_label='read_id')
def full_fvm_mapping_dataset(dataset_paths, output_csv_path):
all_datasets = list(map(
lambda path: pd.read_csv(path, index_col='read_id'),
dataset_paths
))
for dataset_path, dataset in zip(dataset_paths, all_datasets):
dataset_name = dataset_path.split('/')[-2]
dataset['reference'] = dataset_name
pd.concat(all_datasets, axis=0, sort=False, ignore_index=True) \
.to_csv(output_csv_path)
def true_covarying_kmers(input_fasta, input_json, output_csv, k):
k = int(k)
records = np.array([
list(record.seq)
for record in SeqIO.parse(input_fasta, 'fasta')
], dtype='<U1')
data = {
**{'index_%d' % i: [] for i in range(k)},
**{'character_%d' % i: [] for i in range(k)}
}
with open(input_json) as json_file:
covarying_sites = np.array(json.load(json_file), dtype=np.int)
for i in range(len(covarying_sites) - k):
covarying_indices = covarying_sites[i:i+k]
covarying_kmers = set()
for row_index in range(records.shape[0]):
covarying_kmer = ''.join(records[row_index, covarying_indices])
covarying_kmers.add(covarying_kmer)
for covarying_kmer in list(covarying_kmers):
for i in range(k):
data['index_%d' % i].append(covarying_indices[i])
data['character_%d' % i].append(covarying_kmer[i])
pd.DataFrame(data).to_csv(output_csv, index=False)
def kmers_in_reads(input_bam, input_csv, output_csv, k):
k = int(k)
bam = pysam.AlignmentFile(input_bam)
df = pd.read_csv(input_csv)
df['support'] = np.zeros(len(df), dtype=np.int)
for read in bam.fetch():
starts_after = df.index_0 >= read.reference_start
ends_before = df['index_%d' % (k-1)] <= read.reference_end
relevent_kmers = df.loc[starts_after & ends_before, :]
for i, row in relevent_kmers.iterrows():
inds = list(row[['index_%d' % i for i in range(k)]])
vacs = ''.join([
read.query[pair[0]]
for pair in read.get_aligned_pairs(matches_only=True)
if pair[1] in inds
])
kmer = ''.join(row[['character_%d' % i for i in range(k)]])
if vacs == kmer:
df.loc[i, 'support'] += 1
df.to_csv(output_csv)
def result_json(distance_csv, output_json):
df = pd.read_csv(distance_csv)
not_quasispecies = df.first_record.apply(lambda x: x[:3] != 'qua')
desired_records = list(set(df.first_record[not_quasispecies]))
second_is_quasispecies = df.second_record.apply(lambda x: x[:3] == 'qua')
results = {}
for record in desired_records:
if record[:3] != 'qua': continue
first_is_desired = df.first_record == record
best_match_index = df.loc[
first_is_desired & second_is_quasispecies, 'distance'
].idxmin()
results[record] = {
'best_match': str(df.loc[best_match_index, 'second_record']),
'distance': int(df.loc[best_match_index, 'distance']),
}
with open(output_json, 'w') as json_file:
json.dump(results, json_file, indent=2)
def covarying_fasta(input_json, input_fasta, output_fasta, end_correction=10):
with open(input_json) as json_file:
covarying_sites = json.load(json_file)
records = list(SeqIO.parse(input_fasta, 'fasta'))
for record in records:
last_site = len(record.seq) - end_correction
record.seq = Seq(
''.join([
record.seq[i]
for i in covarying_sites
if i > end_correction and i < last_site
])
)
SeqIO.write(records, output_fasta, 'fasta')
def report(input_files, output_csv, report_type):
csvfile = open(output_csv, 'w')
field_names = ['dataset', 'gene', 'worst_distance', 'report_type']
writer = csv.DictWriter(csvfile, field_names)
writer.writeheader()
for file_path in input_files:
with open(file_path) as json_file:
result_data = json.load(json_file)
dataset = file_path.split('/')[1]
gene = file_path.split('/')[4]
worst_distance = 0
for key, value in result_data.items():
if value['distance'] > worst_distance:
worst_distance = value['distance']
if report_type == 'reconstructing' and worst_distance > 5:
raise Exception('A reconstruction dataset failed!', dataset)
writer.writerow({
'dataset': dataset,
'gene': gene,
'worst_distance': worst_distance,
'report_type': report_type
})
csvfile.close()
def haplotyper_report(input_files, output_csv):
csvfile = open(output_csv, 'w')
field_names = ['dataset', 'worst_distance']
writer = csv.DictWriter(csvfile, field_names)
writer.writeheader()
for file_path in input_files:
file_path = file_path.split('.')[0] + '.csv'
if not os.path.exists(file_path):
continue
with open(file_path) as json_file:
result_data = json.load(json_file)
for key, value in result_data.items():
if value['distance'] > worst_distance:
worst_distance = value['distance']
writer.writerow({
'dataset': file_path,
'worst_distance': worst_distance,
})
csvfile.close()
def superread_agreement(input_superreads, input_fasta, input_json, output_csv):
superreads = list(SeqIO.parse(input_superreads, 'fasta'))
truth = list(SeqIO.parse(input_fasta, 'fasta'))
with open(input_json) as json_file:
sites = np.array(json.load(json_file), dtype=np.int)
csvfile = open(output_csv, 'w')
csvwriter = csv.DictWriter(
csvfile, fieldnames=[
'superread_id',
'weight',
'true_id',
'smallest_diff',
'smallest_recomb',
'start',
'stop'
]
)
csvwriter.writeheader()
n_char = len(sites)
for superread in superreads:
smallest_diff = 1e6
superread_id, weight = superread.name.split('_')
weight = int(weight.split('-')[1])
superread_np = np.array(list(superread.seq), dtype='<U1')[sites]
start = (superread_np != '-').argmax()
stop = ((np.arange(n_char) >= start) & (superread_np == '-')).argmax()
smallest_recomb = 1e6
for true_sequence_a in truth:
true_a_np = np.array(list(true_sequence_a.seq), dtype='<U1')
diff = (superread_np[start:stop] != true_a_np[start:stop]).sum()
if diff < smallest_diff:
smallest_diff = diff
smallest_id = true_sequence_a.name
for true_sequence_b in truth:
true_b_np = np.array(list(true_sequence_b.seq), dtype='<U1')
for i in range(start, stop):
first = true_a_np[start:i] != superread_np[start:i]
second = true_b_np[i:stop] != superread_np[i:stop]
recomb = first.sum() + second.sum()
if recomb < smallest_recomb:
smallest_recomb = recomb
csvwriter.writerow({
'superread_id': superread_id,
'weight': weight,
'true_id': smallest_id,
'smallest_diff': smallest_diff,
'smallest_recomb': smallest_recomb,
'start': start,
'stop': stop
})
csvfile.close()
def superread_scatter_data(superread_path, output_csv):
with open(superread_path) as json_file:
superreads = json.load(json_file)
pd.DataFrame({
'weight': [sr['weight'] for sr in superreads],
'vacs_length': [len(sr['vacs']) for sr in superreads],
}).to_csv(output_csv)
| [
"csv.DictWriter",
"pandas.read_csv",
"Bio.Seq.Seq",
"pysam.AlignmentFile",
"numpy.array",
"numpy.arange",
"os.remove",
"os.path.exists",
"subprocess.run",
"Bio.SeqIO.read",
"Bio.SeqIO.write",
"os.mkdir",
"numpy.random.seed",
"pandas.DataFrame",
"pysam.index",
"numpy.ceil",
"numpy.ran... | [((267, 300), 'Bio.SeqIO.read', 'SeqIO.read', (['input_genome', '"""fasta"""'], {}), "(input_genome, 'fasta')\n", (277, 300), False, 'from Bio import SeqIO\n'), ((339, 382), 'Bio.SeqIO.write', 'SeqIO.write', (['record', 'output_genome', '"""fasta"""'], {}), "(record, output_genome, 'fasta')\n", (350, 382), False, 'from Bio import SeqIO\n'), ((469, 507), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_nucleotide', '"""fasta"""'], {}), "(input_nucleotide, 'fasta')\n", (480, 507), False, 'from Bio import SeqIO\n'), ((523, 558), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_protein', '"""fasta"""'], {}), "(input_protein, 'fasta')\n", (534, 558), False, 'from Bio import SeqIO\n'), ((1158, 1200), 'Bio.SeqIO.write', 'SeqIO.write', (['codons', 'output_codon', '"""fasta"""'], {}), "(codons, output_codon, 'fasta')\n", (1169, 1200), False, 'from Bio import SeqIO\n'), ((1432, 1479), 'Bio.SeqIO.read', 'SeqIO.read', (['nucleotide_genome_filename', '"""fasta"""'], {}), "(nucleotide_genome_filename, 'fasta')\n", (1442, 1479), False, 'from Bio import SeqIO\n'), ((2492, 2528), 'Bio.SeqIO.write', 'SeqIO.write', (['record', 'output', '"""fasta"""'], {}), "(record, output, 'fasta')\n", (2503, 2528), False, 'from Bio import SeqIO\n'), ((3639, 3682), 'Bio.SeqIO.write', 'SeqIO.write', (['records', 'output_fasta', '"""fasta"""'], {}), "(records, output_fasta, 'fasta')\n", (3650, 3682), False, 'from Bio import SeqIO\n'), ((4586, 4608), 'pandas.read_csv', 'pd.read_csv', (['input_csv'], {}), '(input_csv)\n', (4597, 4608), True, 'import pandas as pd\n'), ((5011, 5051), 'os.path.join', 'os.path.join', (['"""output"""', '"""truth"""', 'dataset'], {}), "('output', 'truth', dataset)\n", (5023, 5051), False, 'import os\n'), ((5066, 5133), 'os.path.join', 'os.path.join', (['output_dir', "('truth-%s-%s-temp' % (dataset, reference))"], {}), "(output_dir, 'truth-%s-%s-temp' % (dataset, reference))\n", (5078, 5133), False, 'import os\n'), ((5152, 5169), 'os.mkdir', 'os.mkdir', (['tmp_dir'], {}), '(tmp_dir)\n', (5160, 5169), False, 'import os\n'), ((5890, 5907), 'os.rmdir', 'os.rmdir', (['tmp_dir'], {}), '(tmp_dir)\n', (5898, 5907), False, 'import os\n'), ((6076, 6128), 'Bio.SeqIO.write', 'SeqIO.write', (['aligned_sequences', 'output_path', '"""fasta"""'], {}), "(aligned_sequences, output_path, 'fasta')\n", (6087, 6128), False, 'from Bio import SeqIO\n'), ((6981, 7017), 'Bio.SeqIO.read', 'SeqIO.read', (['input_reference', '"""fasta"""'], {}), "(input_reference, 'fasta')\n", (6991, 7017), False, 'from Bio import SeqIO\n'), ((8236, 8279), 'Bio.SeqIO.write', 'SeqIO.write', (['records', 'output_fasta', '"""fasta"""'], {}), "(records, output_fasta, 'fasta')\n", (8247, 8279), False, 'from Bio import SeqIO\n'), ((8431, 8472), 'pysam.AlignmentFile', 'pysam.AlignmentFile', (['input_bam_path', '"""rb"""'], {}), "(input_bam_path, 'rb')\n", (8450, 8472), False, 'import pysam\n'), ((8616, 8633), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (8630, 8633), True, 'import numpy as np\n'), ((8659, 8726), 'numpy.random.choice', 'np.random.choice', (['number_of_reads', 'downsample_number'], {'replace': '(False)'}), '(number_of_reads, downsample_number, replace=False)\n', (8675, 8726), True, 'import numpy as np\n'), ((8813, 8880), 'pysam.AlignmentFile', 'pysam.AlignmentFile', (['output_bam_path', '"""wb"""'], {'header': 'input_bam.header'}), "(output_bam_path, 'wb', header=input_bam.header)\n", (8832, 8880), False, 'import pysam\n'), ((9168, 9196), 'pysam.index', 'pysam.index', (['output_bam_path'], {}), '(output_bam_path)\n', (9179, 9196), False, 'import pysam\n'), ((9302, 9340), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_fasta_path', '"""fasta"""'], {}), "(input_fasta_path, 'fasta')\n", (9313, 9340), False, 'from Bio import SeqIO\n'), ((9401, 9456), 'Bio.SeqIO.write', 'SeqIO.write', (['desired_record', 'output_fasta_path', '"""fasta"""'], {}), "(desired_record, output_fasta_path, 'fasta')\n", (9412, 9456), False, 'from Bio import SeqIO\n'), ((9530, 9559), 'pysam.AlignmentFile', 'pysam.AlignmentFile', (['bam_path'], {}), '(bam_path)\n', (9549, 9559), False, 'import pysam\n'), ((9570, 9599), 'Bio.SeqIO.read', 'SeqIO.read', (['ref_path', '"""fasta"""'], {}), "(ref_path, 'fasta')\n", (9580, 9599), False, 'from Bio import SeqIO\n'), ((9623, 9659), 'numpy.zeros', 'np.zeros', (['bam.mapped'], {'dtype': 'np.float'}), '(bam.mapped, dtype=np.float)\n', (9631, 9659), True, 'import numpy as np\n'), ((9678, 9714), 'numpy.zeros', 'np.zeros', (['bam.mapped'], {'dtype': 'np.float'}), '(bam.mapped, dtype=np.float)\n', (9686, 9714), True, 'import numpy as np\n'), ((9745, 9781), 'numpy.zeros', 'np.zeros', (['bam.mapped'], {'dtype': 'np.float'}), '(bam.mapped, dtype=np.float)\n', (9753, 9781), True, 'import numpy as np\n'), ((12474, 12504), 'pysam.AlignmentFile', 'pysam.AlignmentFile', (['input_bam'], {}), '(input_bam)\n', (12493, 12504), False, 'import pysam\n'), ((12514, 12536), 'pandas.read_csv', 'pd.read_csv', (['input_csv'], {}), '(input_csv)\n', (12525, 12536), True, 'import pandas as pd\n'), ((13329, 13354), 'pandas.read_csv', 'pd.read_csv', (['distance_csv'], {}), '(distance_csv)\n', (13340, 13354), True, 'import pandas as pd\n'), ((14616, 14659), 'Bio.SeqIO.write', 'SeqIO.write', (['records', 'output_fasta', '"""fasta"""'], {}), "(records, output_fasta, 'fasta')\n", (14627, 14659), False, 'from Bio import SeqIO\n'), ((14832, 14868), 'csv.DictWriter', 'csv.DictWriter', (['csvfile', 'field_names'], {}), '(csvfile, field_names)\n', (14846, 14868), False, 'import csv\n'), ((15762, 15798), 'csv.DictWriter', 'csv.DictWriter', (['csvfile', 'field_names'], {}), '(csvfile, field_names)\n', (15776, 15798), False, 'import csv\n'), ((16699, 16829), 'csv.DictWriter', 'csv.DictWriter', (['csvfile'], {'fieldnames': "['superread_id', 'weight', 'true_id', 'smallest_diff', 'smallest_recomb',\n 'start', 'stop']"}), "(csvfile, fieldnames=['superread_id', 'weight', 'true_id',\n 'smallest_diff', 'smallest_recomb', 'start', 'stop'])\n", (16713, 16829), False, 'import csv\n'), ((1615, 1674), 'Bio.SeqIO.parse', 'SeqIO.parse', (['(aligned_filename % (dataset, gene, i))', '"""fasta"""'], {}), "(aligned_filename % (dataset, gene, i), 'fasta')\n", (1626, 1674), False, 'from Bio import SeqIO\n'), ((3761, 3797), 'Bio.SeqIO.parse', 'SeqIO.parse', (['fasta_filename', '"""fasta"""'], {}), "(fasta_filename, 'fasta')\n", (3772, 3797), False, 'from Bio import SeqIO\n'), ((4932, 4965), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_fasta', '"""fasta"""'], {}), "(input_fasta, 'fasta')\n", (4943, 4965), False, 'from Bio import SeqIO\n'), ((5225, 5259), 'os.path.join', 'os.path.join', (['tmp_dir', '"""ref.fasta"""'], {}), "(tmp_dir, 'ref.fasta')\n", (5237, 5259), False, 'import os\n'), ((5285, 5323), 'os.path.join', 'os.path.join', (['tmp_dir', '"""aligned.fasta"""'], {}), "(tmp_dir, 'aligned.fasta')\n", (5297, 5323), False, 'import os\n'), ((5332, 5377), 'Bio.SeqIO.write', 'SeqIO.write', (['sequence', 'sequence_path', '"""fasta"""'], {}), "(sequence, sequence_path, 'fasta')\n", (5343, 5377), False, 'from Bio import SeqIO\n'), ((5608, 5631), 'subprocess.run', 'subprocess.run', (['command'], {}), '(command)\n', (5622, 5631), False, 'import subprocess\n'), ((5827, 5851), 'os.remove', 'os.remove', (['sequence_path'], {}), '(sequence_path)\n', (5836, 5851), False, 'import os\n'), ((5860, 5885), 'os.remove', 'os.remove', (['alignment_path'], {}), '(alignment_path)\n', (5869, 5885), False, 'import os\n'), ((6814, 6864), 'json.dump', 'json.dump', (['pairwise_distances', 'json_file'], {'indent': '(2)'}), '(pairwise_distances, json_file, indent=2)\n', (6823, 6864), False, 'import json\n'), ((7105, 7126), 'json.load', 'json.load', (['input_file'], {}), '(input_file)\n', (7114, 7126), False, 'import json\n'), ((7189, 7210), 'json.load', 'json.load', (['input_file'], {}), '(input_file)\n', (7198, 7210), False, 'import json\n'), ((7899, 7939), 'json.dump', 'json.dump', (['result', 'output_file'], {'indent': '(2)'}), '(result, output_file, indent=2)\n', (7908, 7939), False, 'import json\n'), ((8060, 8080), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (8069, 8080), False, 'import json\n'), ((8100, 8133), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_fasta', '"""fasta"""'], {}), "(input_fasta, 'fasta')\n", (8111, 8133), False, 'from Bio import SeqIO\n'), ((9362, 9388), 'Bio.SeqIO.to_dict', 'SeqIO.to_dict', (['all_records'], {}), '(all_records)\n', (9375, 9388), False, 'from Bio import SeqIO\n'), ((9915, 9985), 'numpy.array', 'np.array', (['[read.query[pair[0]] for pair in aligned_pairs]'], {'dtype': '"""<U1"""'}), "([read.query[pair[0]] for pair in aligned_pairs], dtype='<U1')\n", (9923, 9985), True, 'import numpy as np\n'), ((10036, 10099), 'numpy.array', 'np.array', (['[ref[pair[1]] for pair in aligned_pairs]'], {'dtype': '"""<U1"""'}), "([ref[pair[1]] for pair in aligned_pairs], dtype='<U1')\n", (10044, 10099), True, 'import numpy as np\n'), ((14070, 14109), 'json.dump', 'json.dump', (['results', 'json_file'], {'indent': '(2)'}), '(results, json_file, indent=2)\n', (14079, 14109), False, 'import json\n'), ((14257, 14277), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (14266, 14277), False, 'import json\n'), ((14297, 14330), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_fasta', '"""fasta"""'], {}), "(input_fasta, 'fasta')\n", (14308, 14330), False, 'from Bio import SeqIO\n'), ((16454, 16492), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_superreads', '"""fasta"""'], {}), "(input_superreads, 'fasta')\n", (16465, 16492), False, 'from Bio import SeqIO\n'), ((16511, 16544), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_fasta', '"""fasta"""'], {}), "(input_fasta, 'fasta')\n", (16522, 16544), False, 'from Bio import SeqIO\n'), ((18586, 18606), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (18595, 18606), False, 'import json\n'), ((3558, 3599), 'Bio.SeqRecord.SeqRecord', 'SeqRecord', (['seq'], {'id': 'header', 'description': '""""""'}), "(seq, id=header, description='')\n", (3567, 3599), False, 'from Bio.SeqRecord import SeqRecord\n'), ((4369, 4474), 'pandas.DataFrame', 'pd.DataFrame', (["{'first_record': first_records, 'second_record': second_records, 'distance':\n distances}"], {}), "({'first_record': first_records, 'second_record':\n second_records, 'distance': distances})\n", (4381, 4474), True, 'import pandas as pd\n'), ((8537, 8585), 'numpy.ceil', 'np.ceil', (['(downsample_percentage * number_of_reads)'], {}), '(downsample_percentage * number_of_reads)\n', (8544, 8585), True, 'import numpy as np\n'), ((11275, 11337), 'pandas.concat', 'pd.concat', (['all_datasets'], {'axis': '(0)', 'sort': '(False)', 'ignore_index': '(True)'}), '(all_datasets, axis=0, sort=False, ignore_index=True)\n', (11284, 11337), True, 'import pandas as pd\n'), ((11779, 11799), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (11788, 11799), False, 'import json\n'), ((12339, 12357), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (12351, 12357), True, 'import pandas as pd\n'), ((14997, 15017), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (15006, 15017), False, 'import json\n'), ((15926, 15951), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (15940, 15951), False, 'import os\n'), ((16043, 16063), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (16052, 16063), False, 'import json\n'), ((16611, 16631), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (16620, 16631), False, 'import json\n'), ((2392, 2411), 'Bio.Seq.Seq', 'Seq', (['desired_codons'], {}), '(desired_codons)\n', (2395, 2411), False, 'from Bio.Seq import Seq\n'), ((5664, 5700), 'Bio.SeqIO.parse', 'SeqIO.parse', (['alignment_path', '"""fasta"""'], {}), "(alignment_path, 'fasta')\n", (5675, 5700), False, 'from Bio import SeqIO\n'), ((11040, 11078), 'pandas.read_csv', 'pd.read_csv', (['path'], {'index_col': '"""read_id"""'}), "(path, index_col='read_id')\n", (11051, 11078), True, 'import pandas as pd\n'), ((11528, 11561), 'Bio.SeqIO.parse', 'SeqIO.parse', (['input_fasta', '"""fasta"""'], {}), "(input_fasta, 'fasta')\n", (11539, 11561), False, 'from Bio import SeqIO\n'), ((17284, 17301), 'numpy.arange', 'np.arange', (['n_char'], {}), '(n_char)\n', (17293, 17301), True, 'import numpy as np\n')] |
#!/usr/bin/python
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src"))
from catkin_lint.main import main
main()
| [
"os.path.dirname",
"catkin_lint.main.main"
] | [((140, 146), 'catkin_lint.main.main', 'main', ([], {}), '()\n', (144, 146), False, 'from catkin_lint.main import main\n'), ((71, 96), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (86, 96), False, 'import os\n')] |
import sys
import pycpabe
argc = len(sys.argv)
if argc < 6:
print("Usage: keygen [sk] [pk] [mk] [no] [a1] ... [aN]")
print("sk: path to generate secret key")
print("pk: path to public key")
print("mk: path to master key")
print("no: number of attributeds in generated secret key")
print("a1 ... aN: attributes")
sys.exit()
sk = sys.argv[1]
pk = sys.argv[2]
mk = sys.argv[3]
no = int(sys.argv[4])
attrs = []
for i in range(5,5+no):
print("Attribite %d: %s" % (i-5, sys.argv[i]))
attrs.append(sys.argv[i])
if pycpabe.cpabe_vkeygen(sk, pk, mk, no, attrs) == -1:
print("Keygen failed!")
sys.exit()
print("Keygen success!")
print("Secret key: %s" % sk)
| [
"pycpabe.cpabe_vkeygen",
"sys.exit"
] | [((321, 331), 'sys.exit', 'sys.exit', ([], {}), '()\n', (329, 331), False, 'import sys\n'), ((520, 564), 'pycpabe.cpabe_vkeygen', 'pycpabe.cpabe_vkeygen', (['sk', 'pk', 'mk', 'no', 'attrs'], {}), '(sk, pk, mk, no, attrs)\n', (541, 564), False, 'import pycpabe\n'), ((598, 608), 'sys.exit', 'sys.exit', ([], {}), '()\n', (606, 608), False, 'import sys\n')] |
#!/usr/bin/env python3
import re
from os import path
import lxml.etree as ET
import lxml.objectify as objectify
from bs4 import BeautifulSoup
from .models import Corpora
class TeiReader:
__xmlns = re.compile(r' *xmlns(|\:\w+)="[^"]*"')
__invalid_ampersand = re.compile(r'&(?=[ <])')
__xslt = ET.parse(path.join(path.dirname(__file__), "transform", "tei-transform.xsl"))
__transform = ET.XSLT(__xslt)
def __do_transform(self, content):
try:
dom = ET.fromstring(content)
except ET.XMLSyntaxError:
# fallback to Beautiful Soup if there are some oddities in the XML file
dom = ET.fromstring(bytes(bytearray(str(BeautifulSoup(content, "xml")), encoding='utf-8')))
xml = self.__assign_beginnings(self.__transform(dom).getroot())
return Corpora(xml)
def __assign_beginnings(self, xml: ET.Element) -> ET.Element:
def rename_n_attribute(element, name):
for n in element.xpath('attributes/attribute[@key]'):
n.attrib['key'] = name
# A div can contain parts, but parts cannot contain divs.
# It is import to respect this difference, because the
# corpus2alpinoreader assumes that a sentence/utterance can be
# splitted over parts, but not over divs.
for line in xml.xpath('//lb'):
rename_n_attribute(line, 'line')
division = False
for sibling in line.itersiblings():
if sibling.tag in ['lb', 'pb']:
break
for descendant in sibling.iterdescendants():
if descendant.tag in ['lb', 'pb']:
break
if descendant.tag == 'div':
division = True
line.append(sibling)
line.tag = 'div' if division else 'part'
for page in xml.xpath('//pb'):
rename_n_attribute(page, 'page')
division = False
for sibling in page.itersiblings():
if sibling.tag == 'pb':
break
for descendant in sibling.iterdescendants():
if descendant.tag == 'pb':
break
if descendant.tag == 'div':
division = True
page.append(sibling)
page.tag = 'div' if division else 'part'
return xml
def __clean_line(self, line):
line = self.__xmlns.sub('', line)
line = self.__invalid_ampersand.sub('&', line)
return line
def __clean_lines(self, lines):
return bytes(bytearray(''.join(self.__clean_line(line) for line in lines), encoding='utf-8'))
def __clean_file(self, filename):
with open(filename, encoding='utf-8') as file:
return self.__clean_lines(file.readlines())
def read_file(self, file_name):
content = self.__clean_file(file_name)
return self.__do_transform(content)
def read_string(self, content):
cleaned = self.__clean_lines(content.split('\n'))
return self.__do_transform(cleaned)
| [
"lxml.etree.XSLT",
"re.compile",
"bs4.BeautifulSoup",
"os.path.dirname",
"lxml.etree.fromstring"
] | [((204, 243), 're.compile', 're.compile', (['""" *xmlns(|\\\\:\\\\w+)="[^"]*\\""""'], {}), '(\' *xmlns(|\\\\:\\\\w+)="[^"]*"\')\n', (214, 243), False, 'import re\n'), ((269, 292), 're.compile', 're.compile', (['"""&(?=[ <])"""'], {}), "('&(?=[ <])')\n", (279, 292), False, 'import re\n'), ((403, 418), 'lxml.etree.XSLT', 'ET.XSLT', (['__xslt'], {}), '(__xslt)\n', (410, 418), True, 'import lxml.etree as ET\n'), ((326, 348), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (338, 348), False, 'from os import path\n'), ((490, 512), 'lxml.etree.fromstring', 'ET.fromstring', (['content'], {}), '(content)\n', (503, 512), True, 'import lxml.etree as ET\n'), ((683, 712), 'bs4.BeautifulSoup', 'BeautifulSoup', (['content', '"""xml"""'], {}), "(content, 'xml')\n", (696, 712), False, 'from bs4 import BeautifulSoup\n')] |