code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
from collections import deque
from collections import namedtuple
import sys
# with open('data/day18_test.txt') as f:
# # with open('data/day18.txt') as f:
# data = f.read().splitlines()
# C = len(data[0])
# R = len(data)
DR = [-1, 0, 1, 0]
DC = [0, 1, 0, -1]
# Pos = namedtuple('Pos', ['r', 'c', 'mykeys', 'd'])
# all_keys = set()
# Q = deque()
# for c in range(C):
# for r in range(R):
# val = data[r][c]
# if val == '@':
# p = Pos(r,c, set(), 0)
# Q.append(p)
# if 'a' <= val <= 'z':
# all_keys.add(val)
# SEEN = set()
# while Q:
# P = Q.popleft()
# k = (P.r, P.c, tuple(sorted(P.mykeys)))
# if k in SEEN:
# continue
# SEEN.add(k)
# # Out of range or wall
# if not (0 <= P.r <= R and 0 <= P.c <= C and data[P.r][P.c] != '#'):
# continue
# # Door without key
# val = data[P.r][P.c]
# if 'A' <= val <= 'Z' and val.lower() not in P.mykeys:
# continue
# new_keys = P.mykeys.copy()
# if 'a' <= val <= 'z':
# new_keys.add(val)
# if new_keys == all_keys:
# print(P.d)
# sys.exit(0)
# # Make sure we don't start from here and come back to it
# # just because we did not add the key
# k = (P.r, P.c, tuple(sorted(new_keys)))
# SEEN.add(k)
# for i in range(4):
# Q.append(Pos(P.r+DR[i], P.c+DC[i], new_keys, P.d+1))
# Part 2
# ------------------------------------
with open('data/day18.txt') as f:
# with open('data/day18_test.txt') as f:
# with open('data/day18_2.txt') as f:
data = f.read().splitlines()
C = len(data[0])
R = len(data)
Pos = namedtuple('Pos', ['r', 'c', 'mykeys', 'd', 'id', 'others'])
id = 0
robots = []
Q = deque()
all_keys = set()
for r in range(R):
for c in range(C):
val = data[r][c]
if val == '@':
robots.append((id, r, c))
id += 1
if 'a' <= val <= 'z':
all_keys.add(val)
for robot in robots:
Q.append(Pos(robot[1], robot[2], set(), 0, robot[0], robots))
SEEN = set()
while Q:
P = Q.popleft()
k = (P.r, P.c, tuple(sorted(P.mykeys)), tuple(P.others))
if k in SEEN:
# print('SEEN', k)
continue
SEEN.add(k)
# Out of range or wall
if not (0 <= P.r <= R and 0 <= P.c <= C and data[P.r][P.c] != '#'):
continue
# print(P)
# Door without key
val = data[P.r][P.c]
if 'A' <= val <= 'Z' and val.lower() not in P.mykeys:
continue
new_keys = P.mykeys.copy()
others = P.others.copy()
if 'a' <= val <= 'z':
new_keys.add(val)
# print(val)
if new_keys == all_keys:
print('Part 2: ', P.d)
sys.exit(0)
others[P.id] = (P.id, P.r, P.c)
for robot in others:
for i in range(4):
if robot[0] == P.id:
Q.append(
Pos(P.r+DR[i],
P.c+DC[i],
new_keys,
P.d+1,
P.id, others))
else:
Q.append(
Pos(robot[1]+DR[i],
robot[2]+DC[i],
new_keys,
P.d+1,
robot[0], others))
|
[
"sys.exit",
"collections.namedtuple",
"collections.deque"
] |
[((1683, 1743), 'collections.namedtuple', 'namedtuple', (['"""Pos"""', "['r', 'c', 'mykeys', 'd', 'id', 'others']"], {}), "('Pos', ['r', 'c', 'mykeys', 'd', 'id', 'others'])\n", (1693, 1743), False, 'from collections import namedtuple\n'), ((1767, 1774), 'collections.deque', 'deque', ([], {}), '()\n', (1772, 1774), False, 'from collections import deque\n'), ((2742, 2753), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2750, 2753), False, 'import sys\n')]
|
from fractions import Fraction
from typing import List
from typing import Tuple
from adventofcode.util.helpers import solution_timer
from adventofcode.util.input_helpers import get_input_for_day
def print_board(board: List[List[int]]):
for row in board:
print("".join("." if x == 0 else str(x) for x in row))
def get_slope(x1: int, y1: int, x2: int, y2: int):
x = abs(x2 - x1)
y = abs(y2 - y1)
if y != 0:
slope = Fraction(x, y)
return (slope.numerator, slope.denominator)
return (x, y)
def fill_line(line: Tuple[Tuple[int, int], Tuple[int, int]], board: List[List[int]]):
x1, y1 = line[0]
x2, y2 = line[1]
slope = get_slope(x1, y1, x2, y2)
x_dir = 1 if x1 < x2 else -1
y_dir = 1 if y1 < y2 else -1
if slope[0] == 0:
while (y_dir == 1 and y1 <= y2) or (y_dir == -1 and y1 >= y2):
board[y1][x1] += 1
y1 += 1 * y_dir
elif slope[1] == 0:
while (x_dir == 1 and x1 <= x2) or (x_dir == -1 and x1 >= x2):
board[y1][x1] += 1
x1 += 1 * x_dir
else:
while ((y_dir == 1 and y1 <= y2) or (y_dir == -1 and y1 >= y2)) or (
(x_dir == 1 and x1 <= x2) or (x_dir == -1 and x1 >= x2)
):
board[y1][x1] += 1
x1 += slope[0] * x_dir
y1 += slope[1] * y_dir
def map_lines(lines: List[Tuple[Tuple[int, int], Tuple[int, int]]]):
maxX = max(max(x, x2) for (x, y), (x2, y2) in lines)
maxY = max(max(y, y2) for (x, y), (x2, y2) in lines)
board = [[0 for _ in range(maxX + 1)] for _ in range(maxY + 1)]
for line in lines:
fill_line(line, board)
return board
@solution_timer(2021, 5, 1)
def part_one(input_data: List[str]):
lines: List[Tuple[Tuple[int, int], Tuple[int, int]]] = []
for line in input_data:
p1, p2 = line.split(" -> ")
x1, y1 = p1.split(",")
x2, y2 = p2.split(",")
lines.append(((int(x1), int(y1)), (int(x2), int(y2))))
lines = [line for line in lines if line[0][0] == line[1][0] or line[0][1] == line[1][1]]
board = map_lines(lines)
# print_board(board)
count = 0
for row in board:
for num in row:
if num > 1:
count += 1
return count
@solution_timer(2021, 5, 2)
def part_two(input_data: List[str]):
lines: List[Tuple[Tuple[int, int], Tuple[int, int]]] = []
for line in input_data:
p1, p2 = line.split(" -> ")
x1, y1 = p1.split(",")
x2, y2 = p2.split(",")
lines.append(((int(x1), int(y1)), (int(x2), int(y2))))
board = map_lines(lines)
# print_board(board)
count = 0
for row in board:
for num in row:
if num > 1:
count += 1
return count
if __name__ == "__main__":
data = get_input_for_day(2021, 5)
part_one(data)
part_two(data)
|
[
"adventofcode.util.input_helpers.get_input_for_day",
"fractions.Fraction",
"adventofcode.util.helpers.solution_timer"
] |
[((1673, 1699), 'adventofcode.util.helpers.solution_timer', 'solution_timer', (['(2021)', '(5)', '(1)'], {}), '(2021, 5, 1)\n', (1687, 1699), False, 'from adventofcode.util.helpers import solution_timer\n'), ((2268, 2294), 'adventofcode.util.helpers.solution_timer', 'solution_timer', (['(2021)', '(5)', '(2)'], {}), '(2021, 5, 2)\n', (2282, 2294), False, 'from adventofcode.util.helpers import solution_timer\n'), ((2806, 2832), 'adventofcode.util.input_helpers.get_input_for_day', 'get_input_for_day', (['(2021)', '(5)'], {}), '(2021, 5)\n', (2823, 2832), False, 'from adventofcode.util.input_helpers import get_input_for_day\n'), ((450, 464), 'fractions.Fraction', 'Fraction', (['x', 'y'], {}), '(x, y)\n', (458, 464), False, 'from fractions import Fraction\n')]
|
"""
ffs.contrib.http
An HTTPath implementation on top of ffs.Path.
"""
#
# !!! We should do some further thinking around what constitutes
# an absolute path for http
#
import os
import urlparse
from lxml import html
import requests
import urlhelp
import ffs
from ffs.util import Flike, wraps
class HTTPFlike(Flike):
"""
A file-like object with a .headers attribute
"""
def __init__(self, *args, **kw):
popkw = [
('headers', {}),
('url', None)
]
for word, default in popkw:
if word in kw:
setattr(self, word, kw[word])
del kw[word]
else:
setattr(self, word, default)
self.dom = html.fromstring(args[0])
Flike.__init__(self, *args, **kw)
@property
def name(self):
return self.url.split('/')[-1]
def ls(self):
"""
Return a list of links in this HTML document.
Return: list[str]
Exceptions: None
"""
atags = self.dom.cssselect('a')
hrefs = [a.attrib['href'] for a in atags]
hrefs = [h[1:] if h[0] == '/' else h for h in hrefs]
hrefs = [h if h.startswith('http') else '/'.join([self.url, h]) for h in hrefs ]
return hrefs
class HTTPFilesystem(ffs.filesystem.ReadOnlyFilesystem):
"""
An implementation of the ffs filesystem inteface for HTTP.
We treat this as a Read-only filesystem.
"""
sep = '/'
def __init__(self):
"""
Set up some initial state please.
"""
self.wd = None
def expanduser(self, resource):
"""
On disk filesystems the ~ should expand to a user's HOME.
Over the internet, this is inappropriate, so raise InappropriateError
Arguments:
- `resource`: str or Path
Exceptions: InappropriateError
"""
raise ffs.exceptions.InappropriateError("Can't expand users on HTTPPaths Larry... ")
def exists(self, resource):
"""
Predicate method to determine whether RESOURCE exists.
Arguments:
- `resource`: str or Path
Return: bool
Exceptions: None
"""
resp = requests.head(urlhelp.protocolise(resource))
return resp.status_code == 200
def getwd(self):
"""
Get the current "Working directory".
For this filesystem metaphor, we stretch it a bit, and take
http://localhost to be a sensible default.
If we have previously cd()'d somewhere, we remember that.
Return: str
Exceptions: None
"""
if self.wd:
return self.wd
return 'http://localhost'
def ls(self, resource):
"""
List the contents of RESOURCE.
In the contents of an HTTP Filesystem, we take this to mean a
list of the <a> links on the page.
Arguments:
- `resource`: str or Path
Return: list[str]
Exceptions: None
"""
return urlhelp.find_links(resource)
def cd(self, resource):
"""
Change our working dir to RESOURCE.
Can be used as a contextmanager that returns us to whatever
state we were previously in on exit.
Arguments:
- `resource`: str or Path
Return: None
Exceptions: None
"""
oldwd = self.wd
self.wd = urlhelp.protocolise(resource)
class HTTPCd(object):
"""
Define this class in a closure to implement the contextmanager
protocol while remaining able to operate on SELF.
"""
def __enter__(zelf):
return
def __exit__(zelf, msg, val, tb):
self.wd = oldwd
return
return HTTPCd()
def is_abspath(self, resource):
"""
Predicate function to determine whether RESOURCE is an
absolute path.
Arguments:
- `resource`: str or Path
Return: bool
Exceptions: None
"""
if resource == 'localhost':
return True
parsed = urlparse.urlparse(resource)
if parsed.netloc:
return True
return False
def abspath(self, resource):
"""
Return an absolute path for RESOURCE
Arguments:
- `resource`: str or Path
Return: str
Exceptions: None
"""
return urlhelp.protocolise(resource)
def open(self, resource):
"""
Return a file-like object that represents the contents of
a HTTP resource
Arguments:
- `resource`: str or Path
Return: File-Like object
Exceptions: None
"""
url = urlhelp.protocolise(resource)
resp = requests.get(url)
flike = HTTPFlike(resp.content, url=url, headers=resp.headers)
return flike
@wraps(ffs.filesystem.BaseFilesystem.parent)
def parent(self, resource):
return os.path.dirname(resource.rstrip(self.sep))
def is_branch(self, resource):
"""
For HTTP, we have no canonical way to determine whether RESOURCE
is a branch or a leaf, so we raise InappropriateError.
Arguments:
- `resource`: str or Path
Exceptions: InappropriateError
"""
raise ffs.exceptions.InappropriateError("Can't tell if this is a branch Larry... ")
def is_leaf(self, resource):
"""
For HTTP, we have no canonical way to determine whether RESOURCE
is a leaf or a leaf, so we raise InappropriateError.
Arguments:
- `resource`: str or Path
Exceptions: InappropriateError
"""
raise ffs.exceptions.InappropriateError("Can't tell if this is a leaf Larry... ")
class HTTPPath(ffs.path.BasePath):
"""
An implementation of the ffs path manupulation interface for
HTTP resources.
"""
fsflavour = HTTPFilesystem
def __init__(self, *args, **kw):
self._flike = None
ffs.path.BasePath.__init__(self, *args, **kw)
def __enter__(self):
"""
Duck-type a HTTP request like a File.
Fetch the content, and return it as a File-like-object
Return: Flike
Exceptions: None
"""
# We have to pass the value here because urlparse will
# iterate through sections, expecting it to be stringy.
# Which causes Eternal Recursion
self._flike = self.fs.open(self._value)
return self._flike
def __exit__(self, msg, err, tb):
"""
Clean up the Flike
"""
try:
self._flike.close()
finally:
self._flike = None
def __iter__(self):
"""
Iterate through the lines in the HTTP response content
Return: iterable
Exceptions: None
"""
def httpgen():
"HTTP Generator"
with self as fh:
for line in fh:
yield line
return httpgen()
# !!! With both this and other addition, figure out a
# way to do subdomains
def __iadd__(self, other):
"""
Implement SELF += str or Path
Arguments:
- `other`: str or Path
Return: HTTPPath
Exceptions: TypeError
"""
return ffs.path.BasePath.__iadd__(self, other)
def open(self):
"""
Return the content as a file like object
"""
return self.fs.open(self._value)
|
[
"ffs.exceptions.InappropriateError",
"ffs.path.BasePath.__iadd__",
"ffs.util.Flike.__init__",
"ffs.path.BasePath.__init__",
"urlhelp.find_links",
"lxml.html.fromstring",
"ffs.util.wraps",
"urlparse.urlparse",
"urlhelp.protocolise",
"requests.get"
] |
[((4943, 4986), 'ffs.util.wraps', 'wraps', (['ffs.filesystem.BaseFilesystem.parent'], {}), '(ffs.filesystem.BaseFilesystem.parent)\n', (4948, 4986), False, 'from ffs.util import Flike, wraps\n'), ((729, 753), 'lxml.html.fromstring', 'html.fromstring', (['args[0]'], {}), '(args[0])\n', (744, 753), False, 'from lxml import html\n'), ((762, 795), 'ffs.util.Flike.__init__', 'Flike.__init__', (['self', '*args'], {}), '(self, *args, **kw)\n', (776, 795), False, 'from ffs.util import Flike, wraps\n'), ((1913, 1991), 'ffs.exceptions.InappropriateError', 'ffs.exceptions.InappropriateError', (['"""Can\'t expand users on HTTPPaths Larry... """'], {}), '("Can\'t expand users on HTTPPaths Larry... ")\n', (1946, 1991), False, 'import ffs\n'), ((3043, 3071), 'urlhelp.find_links', 'urlhelp.find_links', (['resource'], {}), '(resource)\n', (3061, 3071), False, 'import urlhelp\n'), ((3426, 3455), 'urlhelp.protocolise', 'urlhelp.protocolise', (['resource'], {}), '(resource)\n', (3445, 3455), False, 'import urlhelp\n'), ((4163, 4190), 'urlparse.urlparse', 'urlparse.urlparse', (['resource'], {}), '(resource)\n', (4180, 4190), False, 'import urlparse\n'), ((4480, 4509), 'urlhelp.protocolise', 'urlhelp.protocolise', (['resource'], {}), '(resource)\n', (4499, 4509), False, 'import urlhelp\n'), ((4782, 4811), 'urlhelp.protocolise', 'urlhelp.protocolise', (['resource'], {}), '(resource)\n', (4801, 4811), False, 'import urlhelp\n'), ((4827, 4844), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (4839, 4844), False, 'import requests\n'), ((5381, 5458), 'ffs.exceptions.InappropriateError', 'ffs.exceptions.InappropriateError', (['"""Can\'t tell if this is a branch Larry... """'], {}), '("Can\'t tell if this is a branch Larry... ")\n', (5414, 5458), False, 'import ffs\n'), ((5759, 5834), 'ffs.exceptions.InappropriateError', 'ffs.exceptions.InappropriateError', (['"""Can\'t tell if this is a leaf Larry... """'], {}), '("Can\'t tell if this is a leaf Larry... ")\n', (5792, 5834), False, 'import ffs\n'), ((6077, 6122), 'ffs.path.BasePath.__init__', 'ffs.path.BasePath.__init__', (['self', '*args'], {}), '(self, *args, **kw)\n', (6103, 6122), False, 'import ffs\n'), ((7390, 7429), 'ffs.path.BasePath.__iadd__', 'ffs.path.BasePath.__iadd__', (['self', 'other'], {}), '(self, other)\n', (7416, 7429), False, 'import ffs\n'), ((2242, 2271), 'urlhelp.protocolise', 'urlhelp.protocolise', (['resource'], {}), '(resource)\n', (2261, 2271), False, 'import urlhelp\n')]
|
import unittest
import copy
import os
from unittest.mock import patch, Mock
import weaviate
from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message
from weaviate.connect import REST_METHOD_POST, REST_METHOD_DELETE, REST_METHOD_GET
from weaviate.exceptions import SchemaValidationException, RequestsConnectionError, UnexpectedStatusCodeException
company_test_schema = {
"classes":
[
{
"class": "Company",
"description": "A business that acts in the market",
"properties": [
{
"name": "name",
"description": "The name under which the company is known",
"dataType": ["text"],
},
{
"name": "legalBody",
"description": "The legal body under which the company maintains its business",
"dataType": ["text"],
},
{
"name": "hasEmployee",
"description": "The employees of the company",
"dataType": ["Employee"],
}
]
},
{
"class": "Employee",
"description": "An employee of the company",
"properties": [
{
"name": "name",
"description": "The name of the employee",
"dataType": ["text"],
},
{
"name": "job",
"description": "the job description of the employee",
"dataType": ["text"],
},
{
"name": "yearsInTheCompany",
"description": "The number of years this employee has worked in the company",
"dataType": ["int"],
}
]
}
]
}
# A test schema as it was returned from a real weaviate instance
persons_return_test_schema = {
"classes": [
{
"class": "Person",
"description": "A person such as humans or personality known through culture",
"properties": [
{
"dataType": ["text"],
"description": "The name of this person",
"name": "name"
}
]
},
{
"class": "Group",
"description": "A set of persons who are associated with each other over some common properties",
"properties": [
{
"dataType": ["text"],
"description": "The name under which this group is known",
"name": "name"
},
{
"dataType": ["Person"],
"description": "The persons that are part of this group",
"name": "members"
}
]
}
],
}
schema_company_local = { # NOTE: should be the same as file schema_company.json
"classes": [
{
"class": "Company",
"description": "A business that acts in the market",
"properties": [
{
"name": "name",
"description": "The name under which the company is known",
"dataType": ["text"]
},
{
"name": "legalBody",
"description": "The legal body under which the company maintains its business",
"dataType": ["text"]
},
{
"name": "hasEmployee",
"description": "The employees of the company",
"dataType": ["Employee"]
}
]
},
{
"class": "Employee",
"description": "An employee of the company",
"properties": [
{
"name": "name",
"description": "The name of the employee",
"dataType": ["text"]
},
{
"name": "job",
"description": "the job description of the employee",
"dataType": ["text"]
},
{
"name": "yearsInTheCompany",
"description": "The number of years this employee has worked in the company",
"dataType": ["int"]
}
]
}
]
}
class TestSchema(unittest.TestCase):
def setUp(self):
self.client = weaviate.Client("http://localhost:8080")
def test_create(self):
"""
Test the `create` method.
"""
# mock function calls
mock_primitive = Mock()
mock_complex = Mock()
self.client.schema._create_classes_with_primitives = mock_primitive
self.client.schema._create_complex_properties_from_classes = mock_complex
self.client.schema.create("test/schema/schema_company.json") # with read from file
mock_primitive.assert_called_with(schema_company_local["classes"])
mock_complex.assert_called_with(schema_company_local["classes"])
def test_create_class(self):
"""
Test the `create_class` method.
"""
# mock function calls
mock_primitive = Mock()
mock_complex = Mock()
self.client.schema._create_class_with_premitives = mock_primitive
self.client.schema._create_complex_properties_from_class = mock_complex
self.client.schema.create_class(company_test_schema["classes"][0])
mock_primitive.assert_called_with(company_test_schema["classes"][0])
mock_complex.assert_called_with(company_test_schema["classes"][0])
def test_get(self):
"""
Test the `get` method.
"""
# invalid calls
requests_error_message = 'Test! Connection error, schema could not be retrieved.'
mock_conn = mock_run_rest(side_effect=RequestsConnectionError("Test!"))
replace_connection(self.client, mock_conn)
with self.assertRaises(RequestsConnectionError) as error:
self.client.schema.get()
check_error_message(self, error, requests_error_message)
mock_conn = mock_run_rest(status_code=404)
replace_connection(self.client, mock_conn)
with self.assertRaises(UnexpectedStatusCodeException) as error:
self.client.schema.get()
check_startswith_error_message(self, error, "Get schema")
# valid calls
connection_mock_file = mock_run_rest(status_code=200, return_json={'Test': 'OK!'})
replace_connection(self.client, connection_mock_file) # Replace connection with mock
self.assertEqual(self.client.schema.get(), {'Test': 'OK!'})
connection_mock_file.run_rest.assert_called_with("/schema", REST_METHOD_GET) # See if mock has been called
def test_contains(self):
"""
Test the `contains` method.
"""
# If a schema is present it should return true otherwise false
# 1. test schema is present:
replace_connection(self.client, mock_run_rest(return_json=persons_return_test_schema))
self.assertTrue(self.client.schema.contains())
# 2. test no schema is present:
replace_connection(self.client, mock_run_rest(return_json={"classes": []}))
self.assertFalse(self.client.schema.contains())
# 3. test with 'schema' argument
## Test weaviate.schema.contains specific schema.
replace_connection(self.client, mock_run_rest(return_json=persons_return_test_schema))
self.assertFalse(self.client.schema.contains(company_test_schema))
subset_schema = {
"classes": [
{
"class": "Person",
"description": "",
"properties": [
{
"dataType": ["text"],
"description": "",
"name": "name"
}
]
}
]
}
self.assertTrue(self.client.schema.contains(subset_schema))
## Test weaviate.schema.contains schema from file.
replace_connection(self.client, mock_run_rest(return_json=persons_return_test_schema))
schema_json_file = os.path.join(os.path.dirname(__file__), "schema_company.json")
self.assertFalse(self.client.schema.contains(schema_json_file))
replace_connection(self.client, mock_run_rest(return_json=company_test_schema))
self.assertTrue(self.client.schema.contains(schema_json_file))
def test_delete_class_input(self):
"""
Test the 'delete_class` method.
"""
# invalid calls
type_error_message = lambda t: f"Class name was {t} instead of str"
requests_error_message = 'Test! Connection error, during deletion of class.'
with self.assertRaises(TypeError) as error:
self.client.schema.delete_class(1)
check_error_message(self, error, type_error_message(int))
replace_connection(self.client, mock_run_rest(side_effect=RequestsConnectionError('Test!')))
with self.assertRaises(RequestsConnectionError) as error:
self.client.schema.delete_class("uuid")
check_error_message(self, error, requests_error_message)
replace_connection(self.client, mock_run_rest(status_code=404))
with self.assertRaises(UnexpectedStatusCodeException) as error:
self.client.schema.delete_class("uuid")
check_startswith_error_message(self, error, "Delete class from schema")
# valid calls
mock_conn = mock_run_rest(status_code=200)
replace_connection(self.client, mock_conn)
self.client.schema.delete_class("uuid")
mock_conn.run_rest.assert_called_with("/schema/uuid", REST_METHOD_DELETE)
def test_delete_everything(self):
"""
Test the `delete_all` method.
"""
mock_get = mock_run_rest(return_json=company_test_schema)
replace_connection(self.client, mock_get)
self.client.schema.delete_all()
self.assertEqual(mock_get.run_rest.call_count, 2 + 1) # + 1 is for the getting the schema
def test__create_complex_properties_from_classes(self):
"""
Test the `_create_complex_properties_from_classes` method.
"""
mock_complex = Mock()
self.client.schema._create_complex_properties_from_class = mock_complex
self.client.schema._create_complex_properties_from_classes(list("Test!"))
self.assertEqual(mock_complex.call_count, 5)
def test__create_complex_properties_from_class(self):
"""
Test the `_create_complex_properties_from_class` method.
"""
# valid calls
test_func = self.client.schema._create_complex_properties_from_class
def helper_test(nr_calls=1):
mock_rest = mock_run_rest()
replace_connection(self.client, mock_rest)
test_func(properties)
self.assertEqual(mock_rest.run_rest.call_count, nr_calls)
mock_rest.run_rest.assert_called_with(
"/schema/" + properties["class"] + "/properties",
REST_METHOD_POST,
properties['properties'][0])
# no `properties` key
mock_rest = mock_run_rest()
replace_connection(self.client, mock_rest)
test_func({})
self.assertEqual(mock_rest.run_rest.call_count, 0)
# no COMPLEX properties
properties = {
'properties':[
{'dataType': ["text"]}
]
}
test_func(properties)
self.assertEqual(mock_rest.run_rest.call_count, 0)
properties = {
'properties':[
{'dataType': ["text"]},
{'dataType': ['string']}
]
}
test_func(properties)
self.assertEqual(mock_rest.run_rest.call_count, 0)
properties = {
'class' : 'TestClass',
'properties':[
{
'dataType': ["Test"],
'description': "test description",
'name': 'test_prop'
},
]
}
mock_rest = mock_run_rest()
replace_connection(self.client, mock_rest)
test_func(properties)
self.assertEqual(mock_rest.run_rest.call_count, 1)
properties = {
'class' : 'TestClass',
'properties':[
{
'dataType': ["Test"],
'description': "test description",
'name': 'test_prop'
},
]
}
helper_test()
properties['properties'][0]['indexInverted'] = True
helper_test()
properties['properties'][0]['moduleConfig'] = {'test': 'ok!'}
helper_test()
properties['properties'].append(properties['properties'][0]) # add another property
properties['properties'].append(properties['properties'][0]) # add another property
helper_test(3)
# invalid calls
requests_error_message = 'TEST1 Connection error, property may not have been created properly.'
mock_rest = mock_run_rest(side_effect=RequestsConnectionError('TEST1'))
replace_connection(self.client, mock_rest)
with self.assertRaises(RequestsConnectionError) as error:
test_func(properties)
check_error_message(self, error, requests_error_message)
mock_rest = mock_run_rest(status_code=404)
replace_connection(self.client, mock_rest)
with self.assertRaises(UnexpectedStatusCodeException) as error:
test_func(properties)
check_startswith_error_message(self, error, "Add properties to classes")
def test__create_class_with_premitives(self):
"""
Test the `_create_class_with_premitives` method.
"""
# valid calls
test_func = self.client.schema._create_class_with_premitives
def helper_test():
mock_rest = mock_run_rest()
replace_connection(self.client, mock_rest)
test_func(test_class)
self.assertEqual(mock_rest.run_rest.call_count, 1)
mock_rest.run_rest.assert_called_with(
"/schema",
REST_METHOD_POST,
test_class_call)
test_class = {
"class": "TestClass",
"properties": [
{
'dataType': ['int'],
'name': 'test_prop',
'description': 'None'
},
{
'dataType': ['Test'],
'name': 'test_prop',
'description': 'None'
}
]
}
test_class_call = {
"class": "TestClass",
"properties": [
{
'dataType': ['int'],
'name': 'test_prop',
'description': 'None'
},
]
}
helper_test()
test_class['description'] = 'description'
test_class_call['description'] = 'description'
helper_test()
test_class['description'] = 'description'
test_class_call['description'] = 'description'
helper_test()
test_class['vectorIndexType'] = 'vectorIndexType'
test_class_call['vectorIndexType'] = 'vectorIndexType'
helper_test()
test_class['vectorIndexConfig'] = {'vectorIndexConfig': 'vectorIndexConfig'}
test_class_call['vectorIndexConfig'] = {'vectorIndexConfig': 'vectorIndexConfig'}
helper_test()
test_class['vectorizer'] = 'test_vectorizer'
test_class_call['vectorizer'] = 'test_vectorizer'
helper_test()
test_class['moduleConfig'] = {'moduleConfig': 'moduleConfig'}
test_class_call['moduleConfig'] = {'moduleConfig': 'moduleConfig'}
helper_test()
# multiple properties do not imply multimple `run_rest` calls
test_class['properties'].append(test_class['properties'][0]) # add another property
test_class['properties'].append(test_class['properties'][0]) # add another property
test_class_call['properties'].append(test_class['properties'][0]) # add another property
test_class_call['properties'].append(test_class['properties'][0]) # add another property
helper_test()
# invalid calls
requests_error_message = 'TEST1 Connection error, class may not have been created properly.'
mock_rest = mock_run_rest(side_effect=RequestsConnectionError('TEST1'))
replace_connection(self.client, mock_rest)
with self.assertRaises(RequestsConnectionError) as error:
test_func(test_class)
check_error_message(self, error, requests_error_message)
mock_rest = mock_run_rest(status_code=404)
replace_connection(self.client, mock_rest)
with self.assertRaises(UnexpectedStatusCodeException) as error:
test_func(test_class)
check_startswith_error_message(self, error, "Create class")
def test__create_classes_with_primitives(self):
"""
Test the `_create_classes_with_primitives` method.
"""
mock_primitive = Mock()
self.client.schema._create_class_with_premitives = mock_primitive
self.client.schema._create_classes_with_primitives(list("Test!!"))
self.assertEqual(mock_primitive.call_count, 6)
def test__property_is_primitive(self):
"""
Test the `_property_is_primitive` function.
"""
test_types_list = ["NOT Primitive", "Neither this one", "Nor This!"]
self.assertFalse(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
test_types_list = ["NOT Primitive", "boolean", "text"]
self.assertFalse(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
test_types_list = ["text"]
self.assertTrue(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
test_types_list = ["int"]
self.assertTrue(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
test_types_list = ["number"]
self.assertTrue(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
test_types_list = ["string"]
self.assertTrue(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
test_types_list = ["boolean"]
self.assertTrue(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
test_types_list = ["date"]
self.assertTrue(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
test_types_list = ["geoCoordinates"]
self.assertTrue(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
test_types_list = ["blob"]
self.assertTrue(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
test_types_list = ["string", "int", "boolean", "number", "date", "text", "geoCoordinates", "blob"]
self.assertTrue(weaviate.schema.crud_schema._property_is_primitive(test_types_list))
def test__get_primitive_properties(self):
"""
Test the `_get_primitive_properties` function.
"""
test_func = weaviate.schema.crud_schema._get_primitive_properties
properties_list = []
self.assertEqual(test_func(properties_list), properties_list)
properties_list = [{'dataType': ["text"]}]
self.assertEqual(test_func(properties_list), properties_list)
properties_list = [{'dataType': ["text"]}, {'dataType': ["int"]}]
self.assertEqual(test_func(properties_list), properties_list)
properties_list = [{'dataType': ["Test1"]}, {'dataType': ["Test2"]}]
self.assertEqual(test_func(properties_list), [])
properties_list = [{'dataType': ["text"]}, {'dataType': ["int"]}, {'dataType': ["Test1"]}, {'dataType': ["Test2"]}]
self.assertEqual(test_func(properties_list), [{'dataType': ["text"]}, {'dataType': ["int"]}])
|
[
"test.util.check_error_message",
"weaviate.schema.crud_schema._property_is_primitive",
"test.util.check_startswith_error_message",
"os.path.dirname",
"unittest.mock.Mock",
"test.util.replace_connection",
"weaviate.exceptions.RequestsConnectionError",
"weaviate.Client",
"test.util.mock_run_rest"
] |
[((4392, 4432), 'weaviate.Client', 'weaviate.Client', (['"""http://localhost:8080"""'], {}), "('http://localhost:8080')\n", (4407, 4432), False, 'import weaviate\n'), ((4575, 4581), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (4579, 4581), False, 'from unittest.mock import patch, Mock\n'), ((4605, 4611), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (4609, 4611), False, 'from unittest.mock import patch, Mock\n'), ((5165, 5171), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (5169, 5171), False, 'from unittest.mock import patch, Mock\n'), ((5195, 5201), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (5199, 5201), False, 'from unittest.mock import patch, Mock\n'), ((5869, 5911), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_conn'], {}), '(self.client, mock_conn)\n', (5887, 5911), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((6023, 6079), 'test.util.check_error_message', 'check_error_message', (['self', 'error', 'requests_error_message'], {}), '(self, error, requests_error_message)\n', (6042, 6079), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((6101, 6131), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'status_code': '(404)'}), '(status_code=404)\n', (6114, 6131), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((6140, 6182), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_conn'], {}), '(self.client, mock_conn)\n', (6158, 6182), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((6300, 6357), 'test.util.check_startswith_error_message', 'check_startswith_error_message', (['self', 'error', '"""Get schema"""'], {}), "(self, error, 'Get schema')\n", (6330, 6357), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((6412, 6471), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'status_code': '(200)', 'return_json': "{'Test': 'OK!'}"}), "(status_code=200, return_json={'Test': 'OK!'})\n", (6425, 6471), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((6480, 6533), 'test.util.replace_connection', 'replace_connection', (['self.client', 'connection_mock_file'], {}), '(self.client, connection_mock_file)\n', (6498, 6533), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((9227, 9283), 'test.util.check_error_message', 'check_error_message', (['self', 'error', 'requests_error_message'], {}), '(self, error, requests_error_message)\n', (9246, 9283), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((9489, 9560), 'test.util.check_startswith_error_message', 'check_startswith_error_message', (['self', 'error', '"""Delete class from schema"""'], {}), "(self, error, 'Delete class from schema')\n", (9519, 9560), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((9604, 9634), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'status_code': '(200)'}), '(status_code=200)\n', (9617, 9634), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((9643, 9685), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_conn'], {}), '(self.client, mock_conn)\n', (9661, 9685), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((9938, 9984), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'return_json': 'company_test_schema'}), '(return_json=company_test_schema)\n', (9951, 9984), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((9993, 10034), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_get'], {}), '(self.client, mock_get)\n', (10011, 10034), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((10350, 10356), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (10354, 10356), False, 'from unittest.mock import patch, Mock\n'), ((11313, 11328), 'test.util.mock_run_rest', 'mock_run_rest', ([], {}), '()\n', (11326, 11328), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((11337, 11379), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_rest'], {}), '(self.client, mock_rest)\n', (11355, 11379), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((12262, 12277), 'test.util.mock_run_rest', 'mock_run_rest', ([], {}), '()\n', (12275, 12277), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((12286, 12328), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_rest'], {}), '(self.client, mock_rest)\n', (12304, 12328), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((13351, 13393), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_rest'], {}), '(self.client, mock_rest)\n', (13369, 13393), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((13502, 13558), 'test.util.check_error_message', 'check_error_message', (['self', 'error', 'requests_error_message'], {}), '(self, error, requests_error_message)\n', (13521, 13558), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((13580, 13610), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'status_code': '(404)'}), '(status_code=404)\n', (13593, 13610), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((13619, 13661), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_rest'], {}), '(self.client, mock_rest)\n', (13637, 13661), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((13776, 13848), 'test.util.check_startswith_error_message', 'check_startswith_error_message', (['self', 'error', '"""Add properties to classes"""'], {}), "(self, error, 'Add properties to classes')\n", (13806, 13848), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((16778, 16820), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_rest'], {}), '(self.client, mock_rest)\n', (16796, 16820), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((16929, 16985), 'test.util.check_error_message', 'check_error_message', (['self', 'error', 'requests_error_message'], {}), '(self, error, requests_error_message)\n', (16948, 16985), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((17007, 17037), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'status_code': '(404)'}), '(status_code=404)\n', (17020, 17037), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((17046, 17088), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_rest'], {}), '(self.client, mock_rest)\n', (17064, 17088), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((17203, 17262), 'test.util.check_startswith_error_message', 'check_startswith_error_message', (['self', 'error', '"""Create class"""'], {}), "(self, error, 'Create class')\n", (17233, 17262), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((17425, 17431), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (17429, 17431), False, 'from unittest.mock import patch, Mock\n'), ((6991, 7044), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'return_json': 'persons_return_test_schema'}), '(return_json=persons_return_test_schema)\n', (7004, 7044), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((7183, 7225), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'return_json': "{'classes': []}"}), "(return_json={'classes': []})\n", (7196, 7225), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((7428, 7481), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'return_json': 'persons_return_test_schema'}), '(return_json=persons_return_test_schema)\n', (7441, 7481), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((8166, 8219), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'return_json': 'persons_return_test_schema'}), '(return_json=persons_return_test_schema)\n', (8179, 8219), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((8261, 8286), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (8276, 8286), False, 'import os\n'), ((8424, 8470), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'return_json': 'company_test_schema'}), '(return_json=company_test_schema)\n', (8437, 8470), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((9325, 9355), 'test.util.mock_run_rest', 'mock_run_rest', ([], {'status_code': '(404)'}), '(status_code=404)\n', (9338, 9355), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((10891, 10906), 'test.util.mock_run_rest', 'mock_run_rest', ([], {}), '()\n', (10904, 10906), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((10919, 10961), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_rest'], {}), '(self.client, mock_rest)\n', (10937, 10961), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((14132, 14147), 'test.util.mock_run_rest', 'mock_run_rest', ([], {}), '()\n', (14145, 14147), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((14160, 14202), 'test.util.replace_connection', 'replace_connection', (['self.client', 'mock_rest'], {}), '(self.client, mock_rest)\n', (14178, 14202), False, 'from test.util import replace_connection, mock_run_rest, check_error_message, check_startswith_error_message\n'), ((17861, 17928), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (17911, 17928), False, 'import weaviate\n'), ((18018, 18085), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (18068, 18085), False, 'import weaviate\n'), ((18146, 18213), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (18196, 18213), False, 'import weaviate\n'), ((18273, 18340), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (18323, 18340), False, 'import weaviate\n'), ((18403, 18470), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (18453, 18470), False, 'import weaviate\n'), ((18533, 18600), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (18583, 18600), False, 'import weaviate\n'), ((18664, 18731), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (18714, 18731), False, 'import weaviate\n'), ((18792, 18859), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (18842, 18859), False, 'import weaviate\n'), ((18930, 18997), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (18980, 18997), False, 'import weaviate\n'), ((19058, 19125), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (19108, 19125), False, 'import weaviate\n'), ((19258, 19325), 'weaviate.schema.crud_schema._property_is_primitive', 'weaviate.schema.crud_schema._property_is_primitive', (['test_types_list'], {}), '(test_types_list)\n', (19308, 19325), False, 'import weaviate\n'), ((5827, 5859), 'weaviate.exceptions.RequestsConnectionError', 'RequestsConnectionError', (['"""Test!"""'], {}), "('Test!')\n", (5850, 5859), False, 'from weaviate.exceptions import SchemaValidationException, RequestsConnectionError, UnexpectedStatusCodeException\n'), ((13309, 13341), 'weaviate.exceptions.RequestsConnectionError', 'RequestsConnectionError', (['"""TEST1"""'], {}), "('TEST1')\n", (13332, 13341), False, 'from weaviate.exceptions import SchemaValidationException, RequestsConnectionError, UnexpectedStatusCodeException\n'), ((16736, 16768), 'weaviate.exceptions.RequestsConnectionError', 'RequestsConnectionError', (['"""TEST1"""'], {}), "('TEST1')\n", (16759, 16768), False, 'from weaviate.exceptions import SchemaValidationException, RequestsConnectionError, UnexpectedStatusCodeException\n'), ((9066, 9098), 'weaviate.exceptions.RequestsConnectionError', 'RequestsConnectionError', (['"""Test!"""'], {}), "('Test!')\n", (9089, 9098), False, 'from weaviate.exceptions import SchemaValidationException, RequestsConnectionError, UnexpectedStatusCodeException\n')]
|
# Generated by Django 3.2.4 on 2021-06-25 07:28
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api_telemed', '0002_teste'),
]
operations = [
migrations.DeleteModel(
name='teste',
),
]
|
[
"django.db.migrations.DeleteModel"
] |
[((218, 254), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""teste"""'}), "(name='teste')\n", (240, 254), False, 'from django.db import migrations\n')]
|
"""Build a project using PEP 517 hooks.
"""
import argparse
import logging
import os
import shutil
from .envbuild import BuildEnvironment
from pep517 import Pep517HookCaller
from pep517.pyproject import load_system, validate_system
from .dirtools import tempdir, mkdir_p
log = logging.getLogger(__name__)
def _do_build(hooks, env, dist, dest):
get_requires_name = 'get_requires_for_build_{dist}'.format(**locals())
get_requires = getattr(hooks, get_requires_name)
reqs = get_requires({})
log.info('Got build requires: %s', reqs)
env.pip_install(reqs)
log.info('Installed dynamic build dependencies')
with tempdir() as td:
log.info('Trying to build %s in %s', dist, td)
build_name = 'build_{dist}'.format(**locals())
build = getattr(hooks, build_name)
filename = build(td, {})
source = os.path.join(td, filename)
shutil.move(source, os.path.join(dest, os.path.basename(filename)))
def build(source_dir, dist, dest=None, system=None):
system = system or load_system(source_dir)
dest = os.path.join(source_dir, dest or 'dist')
mkdir_p(dest)
validate_system(system)
hooks = Pep517HookCaller(
source_dir, system['build-backend'], system.get('backend-path')
)
with BuildEnvironment() as env:
env.pip_install(system['requires'])
_do_build(hooks, env, dist, dest)
parser = argparse.ArgumentParser()
parser.add_argument(
'source_dir',
help="A directory containing pyproject.toml",
)
parser.add_argument(
'--binary', '-b',
action='store_true',
default=False,
)
parser.add_argument(
'--source', '-s',
action='store_true',
default=False,
)
parser.add_argument(
'--out-dir', '-o',
help="Destination in which to save the builds relative to source dir",
)
def main(args):
# determine which dists to build
dists = list(filter(None, (
'sdist' if args.source or not args.binary else None,
'wheel' if args.binary or not args.source else None,
)))
for dist in dists:
build(args.source_dir, dist, args.out_dir)
if __name__ == '__main__':
main(parser.parse_args())
|
[
"argparse.ArgumentParser",
"os.path.basename",
"pep517.pyproject.load_system",
"pep517.pyproject.validate_system",
"os.path.join",
"logging.getLogger"
] |
[((279, 306), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (296, 306), False, 'import logging\n'), ((1405, 1430), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1428, 1430), False, 'import argparse\n'), ((1075, 1115), 'os.path.join', 'os.path.join', (['source_dir', "(dest or 'dist')"], {}), "(source_dir, dest or 'dist')\n", (1087, 1115), False, 'import os\n'), ((1139, 1162), 'pep517.pyproject.validate_system', 'validate_system', (['system'], {}), '(system)\n', (1154, 1162), False, 'from pep517.pyproject import load_system, validate_system\n'), ((859, 885), 'os.path.join', 'os.path.join', (['td', 'filename'], {}), '(td, filename)\n', (871, 885), False, 'import os\n'), ((1040, 1063), 'pep517.pyproject.load_system', 'load_system', (['source_dir'], {}), '(source_dir)\n', (1051, 1063), False, 'from pep517.pyproject import load_system, validate_system\n'), ((933, 959), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (949, 959), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import user_management.api.models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='AuthToken',
fields=[
('key', models.CharField(primary_key=True, serialize=False, max_length=40)),
('created', models.DateTimeField(editable=False, default=django.utils.timezone.now)),
('expires', models.DateTimeField(editable=False, default=user_management.api.models.update_expiry)),
],
),
]
|
[
"django.db.models.CharField",
"django.db.models.DateTimeField"
] |
[((365, 431), 'django.db.models.CharField', 'models.CharField', ([], {'primary_key': '(True)', 'serialize': '(False)', 'max_length': '(40)'}), '(primary_key=True, serialize=False, max_length=40)\n', (381, 431), False, 'from django.db import models, migrations\n'), ((462, 533), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'editable': '(False)', 'default': 'django.utils.timezone.now'}), '(editable=False, default=django.utils.timezone.now)\n', (482, 533), False, 'from django.db import models, migrations\n'), ((564, 655), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'editable': '(False)', 'default': 'user_management.api.models.update_expiry'}), '(editable=False, default=user_management.api.models.\n update_expiry)\n', (584, 655), False, 'from django.db import models, migrations\n')]
|
import json
import os
import numpy
import matplotlib.pyplot as plt
import networkx as nx
from scipy.misc import imread
from utils import root
import scipy.spatial
from global_map import plot_map
def graph_from_waypoints(filename):
with open(filename) as f:
car_graph = json.loads(f.read())
G = nx.DiGraph()
for p in car_graph['waypoints']:
n_id = p['id']
n_x = p['x']
n_y = p['y']
G.add_node(n_id, pos=(n_x, n_y))
#all waypoints are marked as fixed,
#this is important for smoothing later on
G.node[n_id]['fixed'] = True
for c in p['connectionIDs']:
G.add_edge(n_id, c)
return G
def test_waypoints(filename):
G = graph_from_waypoints(filename)
plot_map()
pos = nx.get_node_attributes(G, 'pos')
nx.draw(G, pos, node_size=5)
plt.show()
def test_car_waypoints():
filename = os.path.join(root, 'flash', 'fft2',
'export', 'binaryData', '910.bin')
test_waypoints(filename)
def test_human_waypoints():
filename = os.path.join(root, 'flash', 'fft2',
'export', 'binaryData', '911.bin')
test_waypoints(filename)
if __name__ in '__main__':
test_human_waypoints()
|
[
"matplotlib.pyplot.show",
"networkx.draw",
"networkx.get_node_attributes",
"networkx.DiGraph",
"os.path.join",
"global_map.plot_map"
] |
[((319, 331), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (329, 331), True, 'import networkx as nx\n'), ((784, 794), 'global_map.plot_map', 'plot_map', ([], {}), '()\n', (792, 794), False, 'from global_map import plot_map\n'), ((806, 838), 'networkx.get_node_attributes', 'nx.get_node_attributes', (['G', '"""pos"""'], {}), "(G, 'pos')\n", (828, 838), True, 'import networkx as nx\n'), ((843, 871), 'networkx.draw', 'nx.draw', (['G', 'pos'], {'node_size': '(5)'}), '(G, pos, node_size=5)\n', (850, 871), True, 'import networkx as nx\n'), ((880, 890), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (888, 890), True, 'import matplotlib.pyplot as plt\n'), ((933, 1003), 'os.path.join', 'os.path.join', (['root', '"""flash"""', '"""fft2"""', '"""export"""', '"""binaryData"""', '"""910.bin"""'], {}), "(root, 'flash', 'fft2', 'export', 'binaryData', '910.bin')\n", (945, 1003), False, 'import os\n'), ((1106, 1176), 'os.path.join', 'os.path.join', (['root', '"""flash"""', '"""fft2"""', '"""export"""', '"""binaryData"""', '"""911.bin"""'], {}), "(root, 'flash', 'fft2', 'export', 'binaryData', '911.bin')\n", (1118, 1176), False, 'import os\n')]
|
import tkinter as tk
import colors as c
import random
class Game(tk.Frame):
def __init__(self):
tk.Frame.__init__(self)
self.grid()
self.master.title("2048")
self.main_grid = tk.Frame(
self, bg=c.GRID_COLOR, bd=3, width=600, height=600
)
self.main_grid.grid(pady=(100, 0))
self.make_GUI()
self.start_game()
# Binding arrow keys
self.master.bind("<Left>", self.left)
self.master.bind("<Right>", self.right)
self.master.bind("<Up>", self.up)
self.master.bind("<Down>", self.down)
self.mainloop()
def make_GUI(self):
# Make 4x4 grid
self.cells = []
for i in range(4):
row = []
for j in range(4):
cell_frame = tk.Frame(
self.main_grid,
bg=c.EMPTY_CELL_COLOR,
width=150,
height=150
)
cell_frame.grid(row=i, column=j, padx=5, pady=5)
cell_number= tk.Label(self.main_grid, bg=c.EMPTY_CELL_COLOR)
cell_number.grid(row=i, column=j)
cell_data = {"frame": cell_frame, "number": cell_number}
row.append(cell_data)
self.cells.append(row)
# Make score
score_frame = tk.Frame(self)
score_frame.place(relx=0.5, y=45, anchor="center")
tk.Label(
score_frame,
text="Score",
font=c.SCORE_LABEL_FONT
).grid(row=0)
self.score_label = tk.Label(score_frame, text="0", font=c.SCORE_FONT)
self.score_label.grid(row=1)
def start_game(self):
# Create matrix of 0s
self.matrix = [[0] * 4 for _ in range(4)]
# Create 2 2s in random cells
row = random.randint(0, 3)
col = random.randint(0, 3)
self.matrix[row][col] = 2
self.cells[row][col]["frame"].configure(bg=c.CELL_COLORS[2])
self.cells[row][col]["number"].configure(
bg=c.CELL_COLORS[2],
fg=c.CELL_NUMBER_COLORS[2],
font=c.CELL_NUMBER_FONTS[2],
text="2"
)
while(self.matrix[row][col] != 0):
row = random.randint(0, 3)
col = random.randint(0, 3)
self.matrix[row][col] = 2
self.cells[row][col]["frame"].configure(bg=c.CELL_COLORS[2])
self.cells[row][col]["number"].configure(
bg=c.CELL_COLORS[2],
fg=c.CELL_NUMBER_COLORS[2],
font=c.CELL_NUMBER_FONTS[2],
text="2"
)
self.score = 0
# Matrix manipulation
def stack(self):
new_matrix = [[0] * 4 for _ in range(4)]
for i in range(4):
fill_position = 0
for j in range(4):
if self.matrix[i][j] != 0:
new_matrix[i][fill_position] = self.matrix[i][j]
fill_position += 1
self.matrix = new_matrix
def combine(self):
for i in range(4):
for j in range(3):
if self.matrix[i][j] != 0 and self.matrix[i][j] == self.matrix[i][j + 1]:
self.matrix[i][j] *= 2
self.matrix[i][j + 1] = 0
self.score += self.matrix[i][j]
def reverse(self):
new_matrix = []
for i in range(4):
new_matrix.append([])
for j in range(4):
new_matrix[i].append(self.matrix[i][3-j])
self.matrix = new_matrix
def transpose(self):
new_matrix = [[0] * 4 for _ in range(4)]
for i in range(4):
for j in range(4):
new_matrix[i][j] = self.matrix[j][i]
self.matrix = new_matrix
# Add new 2 or 4 tile to empty cell
def add_new_tile(self):
row = random.randint(0, 3)
col = random.randint(0, 3)
while(self.matrix[row][col] != 0):
row = random.randint(0, 3)
col = random.randint(0, 3)
self.matrix[row][col] = random.choice([2, 4])
# Update GUI to match new matrix
def update_GUI(self):
for i in range(4):
for j in range(4):
cell_value = self.matrix[i][j]
if cell_value == 0:
self.cells[i][j]["frame"].configure(bg=c.EMPTY_CELL_COLOR)
self.cells[i][j]["number"].configure(bg=c.EMPTY_CELL_COLOR, text="")
else:
self.cells[i][j]["frame"].configure(bg=c.CELL_COLORS[cell_value])
self.cells[i][j]["number"].configure(
bg=c.CELL_COLORS[cell_value],
fg=c.CELL_NUMBER_COLORS[cell_value],
font=c.CELL_NUMBER_FONTS[cell_value],
text=str(cell_value)
)
self.score_label.configure(text=self.score)
self.update_idletasks()
# Move functions
def left(self, event):
self.stack()
self.combine()
self.stack()
self.add_new_tile()
self.update_GUI()
self.game_over()
def right(self, event):
self.reverse()
self.stack()
self.combine()
self.stack()
self.reverse()
self.add_new_tile()
self.update_GUI()
self.game_over()
def up(self, event):
self.transpose()
self.stack()
self.combine()
self.stack()
self.transpose()
self.add_new_tile()
self.update_GUI()
self.game_over()
def down(self, event):
self.transpose()
self.reverse()
self.stack()
self.combine()
self.stack()
self.reverse()
self.transpose()
self.add_new_tile()
self.update_GUI()
self.game_over()
# Check for possible moves
def horizontal_move_possible(self):
for i in range(4):
for j in range(3):
if self.matrix[i][j] == self.matrix[i][j + 1]:
return True
return False
def vertical_move_possible(self):
for i in range(3):
for j in range(4):
if self.matrix[i][j] == self.matrix[i + 1][j]:
return True
return False
# Game over function Win/Lose
def game_over(self):
if any(2048 in row for row in self.matrix):
game_over_frame = tk.Frame(self.main_grid, borderwidth=2)
game_over_frame.place(relx=0.5, rely=0.5, anchor="center")
tk.Label(
game_over_frame,
text="You Win!",
bg=c.WINNER_BG,
fg=c.GAME_OVER_FONT_COLOR,
font=c.GAME_OVER_FONT
).pack()
elif not any(0 in row for row in self.matrix) and not self.horizontal_move_possible() and not self.vertical_move_possible():
game_over_frame = tk.Frame(self.main_grid, borderwidth=2)
game_over_frame.place(relx=0.5, rely=0.5, anchor="center")
tk.Label(
game_over_frame,
text="Game over!",
bg=c.LOSER_BG,
fg=c.GAME_OVER_FONT_COLOR,
font=c.GAME_OVER_FONT
).pack()
def main():
Game()
if __name__ == "__main__":
main()
|
[
"random.randint",
"tkinter.Frame.__init__",
"random.choice",
"tkinter.Frame",
"tkinter.Label"
] |
[((109, 132), 'tkinter.Frame.__init__', 'tk.Frame.__init__', (['self'], {}), '(self)\n', (126, 132), True, 'import tkinter as tk\n'), ((213, 273), 'tkinter.Frame', 'tk.Frame', (['self'], {'bg': 'c.GRID_COLOR', 'bd': '(3)', 'width': '(600)', 'height': '(600)'}), '(self, bg=c.GRID_COLOR, bd=3, width=600, height=600)\n', (221, 273), True, 'import tkinter as tk\n'), ((1371, 1385), 'tkinter.Frame', 'tk.Frame', (['self'], {}), '(self)\n', (1379, 1385), True, 'import tkinter as tk\n'), ((1599, 1649), 'tkinter.Label', 'tk.Label', (['score_frame'], {'text': '"""0"""', 'font': 'c.SCORE_FONT'}), "(score_frame, text='0', font=c.SCORE_FONT)\n", (1607, 1649), True, 'import tkinter as tk\n'), ((1847, 1867), 'random.randint', 'random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (1861, 1867), False, 'import random\n'), ((1882, 1902), 'random.randint', 'random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (1896, 1902), False, 'import random\n'), ((3867, 3887), 'random.randint', 'random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (3881, 3887), False, 'import random\n'), ((3902, 3922), 'random.randint', 'random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (3916, 3922), False, 'import random\n'), ((4076, 4097), 'random.choice', 'random.choice', (['[2, 4]'], {}), '([2, 4])\n', (4089, 4097), False, 'import random\n'), ((2262, 2282), 'random.randint', 'random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (2276, 2282), False, 'import random\n'), ((2301, 2321), 'random.randint', 'random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (2315, 2321), False, 'import random\n'), ((3984, 4004), 'random.randint', 'random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (3998, 4004), False, 'import random\n'), ((4023, 4043), 'random.randint', 'random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (4037, 4043), False, 'import random\n'), ((6482, 6521), 'tkinter.Frame', 'tk.Frame', (['self.main_grid'], {'borderwidth': '(2)'}), '(self.main_grid, borderwidth=2)\n', (6490, 6521), True, 'import tkinter as tk\n'), ((812, 882), 'tkinter.Frame', 'tk.Frame', (['self.main_grid'], {'bg': 'c.EMPTY_CELL_COLOR', 'width': '(150)', 'height': '(150)'}), '(self.main_grid, bg=c.EMPTY_CELL_COLOR, width=150, height=150)\n', (820, 882), True, 'import tkinter as tk\n'), ((1075, 1122), 'tkinter.Label', 'tk.Label', (['self.main_grid'], {'bg': 'c.EMPTY_CELL_COLOR'}), '(self.main_grid, bg=c.EMPTY_CELL_COLOR)\n', (1083, 1122), True, 'import tkinter as tk\n'), ((1453, 1513), 'tkinter.Label', 'tk.Label', (['score_frame'], {'text': '"""Score"""', 'font': 'c.SCORE_LABEL_FONT'}), "(score_frame, text='Score', font=c.SCORE_LABEL_FONT)\n", (1461, 1513), True, 'import tkinter as tk\n'), ((6979, 7018), 'tkinter.Frame', 'tk.Frame', (['self.main_grid'], {'borderwidth': '(2)'}), '(self.main_grid, borderwidth=2)\n', (6987, 7018), True, 'import tkinter as tk\n'), ((6605, 6718), 'tkinter.Label', 'tk.Label', (['game_over_frame'], {'text': '"""You Win!"""', 'bg': 'c.WINNER_BG', 'fg': 'c.GAME_OVER_FONT_COLOR', 'font': 'c.GAME_OVER_FONT'}), "(game_over_frame, text='You Win!', bg=c.WINNER_BG, fg=c.\n GAME_OVER_FONT_COLOR, font=c.GAME_OVER_FONT)\n", (6613, 6718), True, 'import tkinter as tk\n'), ((7102, 7216), 'tkinter.Label', 'tk.Label', (['game_over_frame'], {'text': '"""Game over!"""', 'bg': 'c.LOSER_BG', 'fg': 'c.GAME_OVER_FONT_COLOR', 'font': 'c.GAME_OVER_FONT'}), "(game_over_frame, text='Game over!', bg=c.LOSER_BG, fg=c.\n GAME_OVER_FONT_COLOR, font=c.GAME_OVER_FONT)\n", (7110, 7216), True, 'import tkinter as tk\n')]
|
#!/bin/env python
import json
from websocket import create_connection # type: ignore
import logging
import sys
import os
## var setup
MYCROFTCL_LOGGING = os.environ.get("MYCROFTCL_LOGGING", logging.WARN)
logging.basicConfig(level=MYCROFTCL_LOGGING)
local_file_path = os.path.dirname(os.path.realpath(__file__))
MYCROFT_ADDR = os.environ.get("MYCROFT_ADDR", "localhost")
MYCROFT_PORT = os.environ.get("MYCROFT_PORT", "8181")
MYCROFT_JSON_DIR = os.environ.get(
"MYCROFT_JSON_DIR", f"{local_file_path}/mycroft-json-messages"
)
LANG = os.environ.get("LANG", "en-us")
logging.debug("ENV VARS SET:")
logging.debug(f"MYCROFT_ADDR = {MYCROFT_ADDR}")
logging.debug(f"MYCROFT_PORT = {MYCROFT_PORT}")
logging.debug(f"MYCROFT_JSON_DIR = {MYCROFT_JSON_DIR}")
logging.debug(f"LANG = {LANG}")
def send_message(message: dict, mycroft_addr=MYCROFT_ADDR, mycroft_port=MYCROFT_PORT):
"""Creates websocket address string, connects and sends fully formed json message"""
url = f"ws://{mycroft_addr}:{mycroft_port}/core"
logging.debug(f"Websocket url: {url}")
ws = create_connection(url)
try:
logging.debug(f"String sent: {message}")
send_status = ws.send(json.dumps(message))
logging.debug(f"Send status: {send_status}")
result = ws.recv()
logging.debug(f"received: {result}")
finally:
ws.close()
def get_mycroft_message(command: str, json_dir=MYCROFT_JSON_DIR) -> dict:
"""Retrives and loads the correct json file for the command given"""
json_file = f"{json_dir}/{command}.json"
logging.debug(f"json_file: {json_file}")
with open(f"{json_file}", "rb") as fh:
message = json.load(fh)
logging.debug(f"json_message: {message}")
return message
def run(command: str, data: list, mycroft_addr=MYCROFT_ADDR, mycroft_port=MYCROFT_PORT):
"""Parses data into expected json fields depending on which command is provided"""
message = get_mycroft_message(command)
if command == "speak":
data_string = " ".join(data)
message["data"]["utterance"] = data_string
elif command == "say-to":
message["data"]["utterances"] = data
message["data"]["lang"] = LANG
elif command == "question-query":
data_string = " ".join(data)
message["data"]["phrase"] = data_string
send_message(message, mycroft_addr, mycroft_port)
if __name__ == "__main__":
if sys.stdin.isatty():
logging.debug(f"Passing args: {sys.argv[2:]}")
run(sys.argv[1], sys.argv[2:])
else:
logging.debug("No args given defaulting to stdin")
for line in sys.stdin:
run(sys.argv[1], [line])
|
[
"json.load",
"logging.debug",
"sys.stdin.isatty",
"logging.basicConfig",
"os.path.realpath",
"json.dumps",
"os.environ.get",
"websocket.create_connection"
] |
[((156, 205), 'os.environ.get', 'os.environ.get', (['"""MYCROFTCL_LOGGING"""', 'logging.WARN'], {}), "('MYCROFTCL_LOGGING', logging.WARN)\n", (170, 205), False, 'import os\n'), ((206, 250), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'MYCROFTCL_LOGGING'}), '(level=MYCROFTCL_LOGGING)\n', (225, 250), False, 'import logging\n'), ((328, 371), 'os.environ.get', 'os.environ.get', (['"""MYCROFT_ADDR"""', '"""localhost"""'], {}), "('MYCROFT_ADDR', 'localhost')\n", (342, 371), False, 'import os\n'), ((387, 425), 'os.environ.get', 'os.environ.get', (['"""MYCROFT_PORT"""', '"""8181"""'], {}), "('MYCROFT_PORT', '8181')\n", (401, 425), False, 'import os\n'), ((445, 523), 'os.environ.get', 'os.environ.get', (['"""MYCROFT_JSON_DIR"""', 'f"""{local_file_path}/mycroft-json-messages"""'], {}), "('MYCROFT_JSON_DIR', f'{local_file_path}/mycroft-json-messages')\n", (459, 523), False, 'import os\n'), ((537, 568), 'os.environ.get', 'os.environ.get', (['"""LANG"""', '"""en-us"""'], {}), "('LANG', 'en-us')\n", (551, 568), False, 'import os\n'), ((569, 599), 'logging.debug', 'logging.debug', (['"""ENV VARS SET:"""'], {}), "('ENV VARS SET:')\n", (582, 599), False, 'import logging\n'), ((600, 647), 'logging.debug', 'logging.debug', (['f"""MYCROFT_ADDR = {MYCROFT_ADDR}"""'], {}), "(f'MYCROFT_ADDR = {MYCROFT_ADDR}')\n", (613, 647), False, 'import logging\n'), ((648, 695), 'logging.debug', 'logging.debug', (['f"""MYCROFT_PORT = {MYCROFT_PORT}"""'], {}), "(f'MYCROFT_PORT = {MYCROFT_PORT}')\n", (661, 695), False, 'import logging\n'), ((696, 751), 'logging.debug', 'logging.debug', (['f"""MYCROFT_JSON_DIR = {MYCROFT_JSON_DIR}"""'], {}), "(f'MYCROFT_JSON_DIR = {MYCROFT_JSON_DIR}')\n", (709, 751), False, 'import logging\n'), ((752, 783), 'logging.debug', 'logging.debug', (['f"""LANG = {LANG}"""'], {}), "(f'LANG = {LANG}')\n", (765, 783), False, 'import logging\n'), ((285, 311), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (301, 311), False, 'import os\n'), ((1019, 1057), 'logging.debug', 'logging.debug', (['f"""Websocket url: {url}"""'], {}), "(f'Websocket url: {url}')\n", (1032, 1057), False, 'import logging\n'), ((1067, 1089), 'websocket.create_connection', 'create_connection', (['url'], {}), '(url)\n', (1084, 1089), False, 'from websocket import create_connection\n'), ((1554, 1594), 'logging.debug', 'logging.debug', (['f"""json_file: {json_file}"""'], {}), "(f'json_file: {json_file}')\n", (1567, 1594), False, 'import logging\n'), ((1674, 1715), 'logging.debug', 'logging.debug', (['f"""json_message: {message}"""'], {}), "(f'json_message: {message}')\n", (1687, 1715), False, 'import logging\n'), ((2398, 2416), 'sys.stdin.isatty', 'sys.stdin.isatty', ([], {}), '()\n', (2414, 2416), False, 'import sys\n'), ((1107, 1147), 'logging.debug', 'logging.debug', (['f"""String sent: {message}"""'], {}), "(f'String sent: {message}')\n", (1120, 1147), False, 'import logging\n'), ((1207, 1251), 'logging.debug', 'logging.debug', (['f"""Send status: {send_status}"""'], {}), "(f'Send status: {send_status}')\n", (1220, 1251), False, 'import logging\n'), ((1287, 1323), 'logging.debug', 'logging.debug', (['f"""received: {result}"""'], {}), "(f'received: {result}')\n", (1300, 1323), False, 'import logging\n'), ((1656, 1669), 'json.load', 'json.load', (['fh'], {}), '(fh)\n', (1665, 1669), False, 'import json\n'), ((2426, 2472), 'logging.debug', 'logging.debug', (['f"""Passing args: {sys.argv[2:]}"""'], {}), "(f'Passing args: {sys.argv[2:]}')\n", (2439, 2472), False, 'import logging\n'), ((2530, 2580), 'logging.debug', 'logging.debug', (['"""No args given defaulting to stdin"""'], {}), "('No args given defaulting to stdin')\n", (2543, 2580), False, 'import logging\n'), ((1178, 1197), 'json.dumps', 'json.dumps', (['message'], {}), '(message)\n', (1188, 1197), False, 'import json\n')]
|
import os
import hashlib
import requests
def get_checksums_and_file_names(path):
""" Reads the local checksums file """
with open(path) as in_f:
return zip(*[map(lambda x: x.strip('\n\r\t '), l.strip(" ").split(" ", maxsplit=1)) for l in in_f.readlines()])
def validate_sha256(local_path, sha256):
"""
Computes the md5 checksum of a file and compares it to the passed sha256 hexdigest checksum
"""
file_hash = hashlib.sha256()
with open(local_path, "rb") as in_f:
for chunk in iter(lambda: in_f.read(512 * file_hash.block_size), b''):
file_hash.update(chunk)
return file_hash.hexdigest() == sha256
def download_and_validate(download_url, sha256, out_path):
"""
Download file 'file_name' and validate sha256 checksum against 'sha256'.
Saves the downloaded file to 'out_path'.
If the file already exists, and have a valid sha256, the download is skipped.
"""
if os.path.exists(out_path):
if validate_sha256(out_path, sha256):
print("... skipping (already downloaded with valid sha256)")
return
else:
print("... File exists, but invalid SHA256, re-downloading")
response = requests.get(download_url, allow_redirects=True)
if response.ok:
with open(out_path, "wb") as out_f:
out_f.write(response.content)
else:
raise ValueError("Could not download file from URL {}. "
"Received HTTP response with status code {}".format(download_url,
response.status_code))
if not validate_sha256(out_path, sha256):
os.remove(out_path)
raise ValueError(f"Invalid sha256 for file at {download_url} "
f"(please restart download)")
|
[
"hashlib.sha256",
"os.remove",
"os.path.exists",
"requests.get"
] |
[((446, 462), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (460, 462), False, 'import hashlib\n'), ((954, 978), 'os.path.exists', 'os.path.exists', (['out_path'], {}), '(out_path)\n', (968, 978), False, 'import os\n'), ((1221, 1269), 'requests.get', 'requests.get', (['download_url'], {'allow_redirects': '(True)'}), '(download_url, allow_redirects=True)\n', (1233, 1269), False, 'import requests\n'), ((1696, 1715), 'os.remove', 'os.remove', (['out_path'], {}), '(out_path)\n', (1705, 1715), False, 'import os\n')]
|
import model3 as M
import numpy as np
import tensorflow as tf
import data_reader
class VariationalDrop(M.Model):
# arxiv 1512.05287
def initialize(self, drop_rate):
self.drop_rate = drop_rate
def _get_mask(self, shape):
# (time, batch, dim)
mask = np.random.choice(2, size=(1, shape[1], shape[2]), p=[1-self.drop_rate, self.drop_rate])
return tf.convert_to_tensor(mask)
def forward(self, x):
shape = x.shape()
mask = self._get_mask(shape)
x = x * mask
return x
class predNet(M.Model):
def initialize(self):
self.enc = M.Dense(128)
self.LSTM = M.LSTM(128)
self.dec = M.Dense(17*3)
def forward(self, x):
x = [self.enc(_) for _ in x]
y = self.LSTM(x[:-1])
y = [self.dec(_) for _ in y]
return y
def loss_grad(x, model):
label = x[1:]
with tf.GradientTape() as tape:
out = model(x)
# print(len(out))
sub = [tf.square(o-l) for o,l in zip(out, label)]
loss = tf.reduce_mean(sub)
grad = tape.gradient(loss, model.trainable_variables)
return grad, [loss]
reader = data_reader.data_reader()
model = predNet()
optim = tf.optimizers.Adam(0.001)
saver = M.Saver(model)
ITER = 10000
for i in range(ITER + 1):
data = reader.get_next(32, 16)
grad, ls = loss_grad(data, model)
optim.apply_gradients(zip(grad, model.trainable_variables))
if i%10==0:
print('Iter:%d\tLoss:%.4f'%(i, ls[0]))
if i%2000==0:
saver.save('./model/model.ckpt')
|
[
"model3.Saver",
"model3.LSTM",
"tensorflow.square",
"data_reader.data_reader",
"tensorflow.convert_to_tensor",
"tensorflow.reduce_mean",
"tensorflow.optimizers.Adam",
"model3.Dense",
"numpy.random.choice",
"tensorflow.GradientTape"
] |
[((1014, 1039), 'data_reader.data_reader', 'data_reader.data_reader', ([], {}), '()\n', (1037, 1039), False, 'import data_reader\n'), ((1067, 1092), 'tensorflow.optimizers.Adam', 'tf.optimizers.Adam', (['(0.001)'], {}), '(0.001)\n', (1085, 1092), True, 'import tensorflow as tf\n'), ((1102, 1116), 'model3.Saver', 'M.Saver', (['model'], {}), '(model)\n', (1109, 1116), True, 'import model3 as M\n'), ((261, 354), 'numpy.random.choice', 'np.random.choice', (['(2)'], {'size': '(1, shape[1], shape[2])', 'p': '[1 - self.drop_rate, self.drop_rate]'}), '(2, size=(1, shape[1], shape[2]), p=[1 - self.drop_rate,\n self.drop_rate])\n', (277, 354), True, 'import numpy as np\n'), ((358, 384), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['mask'], {}), '(mask)\n', (378, 384), True, 'import tensorflow as tf\n'), ((548, 560), 'model3.Dense', 'M.Dense', (['(128)'], {}), '(128)\n', (555, 560), True, 'import model3 as M\n'), ((575, 586), 'model3.LSTM', 'M.LSTM', (['(128)'], {}), '(128)\n', (581, 586), True, 'import model3 as M\n'), ((600, 615), 'model3.Dense', 'M.Dense', (['(17 * 3)'], {}), '(17 * 3)\n', (607, 615), True, 'import model3 as M\n'), ((782, 799), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (797, 799), True, 'import tensorflow as tf\n'), ((907, 926), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['sub'], {}), '(sub)\n', (921, 926), True, 'import tensorflow as tf\n'), ((855, 871), 'tensorflow.square', 'tf.square', (['(o - l)'], {}), '(o - l)\n', (864, 871), True, 'import tensorflow as tf\n')]
|
import importlib
import inspect
import pandas as pd
import pymarketstore as pymkts
from collections import defaultdict
from typing import *
from .base_symbol_analyzer import SymbolAnalyzer
class AnalyzersRunner:
def __init__(self, analyzers_module_name, day=None):
self.analyzers_module_name = analyzers_module_name
self.day = day
self.client = pymkts.Client()
def run(self, symbol) -> Dict[str, Dict[str, Union[bool, int, float, str]]]:
"""
returns a dict of dicts
outer dict is keyed by timeframe, inner dict is keyed by column names
"""
analyzers = self.get_analyzers()
results: Dict[str, Dict[str, Union[bool, int, float, str]]] = {}
for timeframe, analyzers in analyzers.items():
df = self.get_data(symbol, timeframe, limits=[
x.lookback for x in analyzers
])
if not len(df):
continue
results[timeframe] = dict(symbol=symbol)
for analyzer in analyzers:
columns = set(results[timeframe].keys())
data = analyzer.run(df)
conflicts = columns & set(data.keys())
if conflicts:
raise Exception('conflicting column name(s) between analyzers: '
', '.join(conflicts))
results[timeframe].update(data)
return results
def get_data(self, symbol, timeframe, limits) -> pd.DataFrame:
limit = None
if all(isinstance(x, int) for x in limits):
limit = max(*limits) if len(limits) > 1 else limits[0]
return self.client.query(
pymkts.Params(symbol, timeframe, attrgroup='OHLCV', limit=limit,
end=pd.Timestamp(self.day.date(), tz='America/New_York'))
).first().df()
def get_analyzers(self) -> Dict[str, List[SymbolAnalyzer]]:
module = importlib.import_module(self.analyzers_module_name)
classes = inspect.getmembers(module, lambda x: (isinstance(x, type)
and issubclass(x, SymbolAnalyzer)
and x != SymbolAnalyzer))
analyzers = defaultdict(list)
for _, analyzer in classes:
for timeframe in analyzer.timeframes:
analyzers[timeframe].append(analyzer)
return analyzers
|
[
"collections.defaultdict",
"importlib.import_module",
"pymarketstore.Client"
] |
[((377, 392), 'pymarketstore.Client', 'pymkts.Client', ([], {}), '()\n', (390, 392), True, 'import pymarketstore as pymkts\n'), ((1950, 2001), 'importlib.import_module', 'importlib.import_module', (['self.analyzers_module_name'], {}), '(self.analyzers_module_name)\n', (1973, 2001), False, 'import importlib\n'), ((2270, 2287), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2281, 2287), False, 'from collections import defaultdict\n')]
|
import pygame
from settings import *
from os import listdir
class Paddle(pygame.sprite.Sprite):
def __init__(self, groups):
super().__init__(groups)
# Setup
# image is mandatory attribute for pygame sprites.
self.textures = []
for image in listdir(IMGS_DIR / 'paddle'):
texture = pygame.image.load(IMGS_DIR / 'paddle' / image)
texture = pygame.transform.scale(texture, (WINDOW_WIDTH // 10, WINDOW_HEIGHT // 20))
texture.set_colorkey((0, 0, 0))
self.textures.append(texture)
self.cur_img = 0
self.image = self.textures[self.cur_img]
# Initial Parameters
self.rect = self.image.get_rect(midbottom=(WINDOW_WIDTH // 2, WINDOW_HEIGHT - 20))
self.previous_rect = self.rect.copy()
self.direction = pygame.math.Vector2()
self.speed = 10
def input(self):
keys = pygame.key.get_pressed()
if keys[pygame.K_RIGHT] and self.rect.right < WINDOW_WIDTH:
self.direction.x = 1
elif keys[pygame.K_LEFT] and self.rect.left > 0:
self.direction.x = -1
else:
self.direction.x = 0
def update(self):
self.previous_rect = self.rect.copy()
self.input()
self.rect.x += self.direction.x * self.speed
# Paddle Animation
if self.cur_img > len(self.textures):
self.cur_img = 0
self.cur_img += 0.05
self.image = self.textures[int(self.cur_img) % len(self.textures)]
|
[
"pygame.math.Vector2",
"pygame.transform.scale",
"pygame.image.load",
"pygame.key.get_pressed",
"os.listdir"
] |
[((286, 314), 'os.listdir', 'listdir', (["(IMGS_DIR / 'paddle')"], {}), "(IMGS_DIR / 'paddle')\n", (293, 314), False, 'from os import listdir\n'), ((834, 855), 'pygame.math.Vector2', 'pygame.math.Vector2', ([], {}), '()\n', (853, 855), False, 'import pygame\n'), ((917, 941), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (939, 941), False, 'import pygame\n'), ((338, 384), 'pygame.image.load', 'pygame.image.load', (["(IMGS_DIR / 'paddle' / image)"], {}), "(IMGS_DIR / 'paddle' / image)\n", (355, 384), False, 'import pygame\n'), ((407, 481), 'pygame.transform.scale', 'pygame.transform.scale', (['texture', '(WINDOW_WIDTH // 10, WINDOW_HEIGHT // 20)'], {}), '(texture, (WINDOW_WIDTH // 10, WINDOW_HEIGHT // 20))\n', (429, 481), False, 'import pygame\n')]
|
# -*- coding: utf-8 -*-
## @file testsuite/python/cvMatTest.py
## @date jan. 2017
## @author PhRG - opticalp.fr
##
## Test the cvMat data generator modules
#
# Copyright (c) 2017 <NAME> / Opticalp
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
def myMain(baseDir):
"""Main function. Run the tests. """
import os
import time
print("Test the basic features of the cvMat data generator modules. ")
from instru import Factory, DataLogger
from instru import bind, dataLoggerClasses, runModule, waitAll
fac = Factory("DataGenFactory")
print("Retrieved factory: " + fac.name)
print("Create module from cvMatDataGen factory")
try:
imgGen = fac.select("cvMat").create("imgGen")
imgGen2 = fac.select("cvMat").create("imgGen2")
except RuntimeError as e:
print("Runtime error: " + str(e))
print("OpenCV is probably not present. Exiting. ")
exit(0)
print("module " + imgGen.name + " created (" + imgGen.internalName + ") ")
print("Set imgGen output value to 127")
imgGen.setParameterValue("value", 127)
print("Set imgGen2 output value to 63")
imgGen2.setParameterValue("value", 63)
print("Run module")
task = runModule(imgGen)
print(task.name + " state is " + task.state())
task.wait()
print(task.name + " state is " + task.state())
print("Attaching a data logger to show the image...")
loggerClasses = dataLoggerClasses() # DataManager::dataLoggerClasses()
print("Available data logger classes: ")
for loggerClass in loggerClasses:
print(" - " + loggerClass + ": " + loggerClasses[loggerClass])
print('Loggers creation using the constructor: DataLogger("ShowImageLogger")')
logger = DataLogger("ShowImageLogger")
logger2 = DataLogger("ShowImageLogger")
print("Logger description: " + logger.description)
print("set logger2 imagePanel to 1")
logger2.setParameterValue("imagePanel",1)
print("bind the loggers")
imgGen.outPort("data").register(logger)
imgGen2.outPort("data").register(logger2)
print("run imgGen and imgGen2")
runModule(imgGen)
waitAll() # to avoid the linux xcb error while not initializing XInitThreads
runModule(imgGen2)
time.sleep(1) # wait 1s in order to show the images
print("Set output value to 128")
imgGen.setParameterValue("value", 128)
print("Add a save image logger")
saver = DataLogger("SaveImageLogger")
imgGen.outPort("data").register(saver)
files = os.listdir(".")
if files.count("img_01.png")>0:
os.remove("img_01.png")
runModule(imgGen)
waitAll()
print("check if the image is present in the current directory")
files = os.listdir(".")
if files.count("img_01.png")!=1:
raise RuntimeError("image img_01.png not created")
print("try SaveImageLogger normalization")
saver.setParameterValue("normalization", "max")
if files.count("img_02.png")>0:
os.remove("img_02.png")
runModule(imgGen)
waitAll()
print("check if the new image is present in the current directory")
files = os.listdir(".")
if files.count("img_02.png")!=1:
raise RuntimeError("image img_02.png not created")
print("End of script cvMatTest.py")
# main body
import sys
import os
from os.path import dirname
if len(sys.argv) >= 1:
# probably called from InstrumentAll
checker = os.path.basename(sys.argv[0])
if checker == "instrumentall" or checker == "instrumentall.exe":
print("current script: ",os.path.realpath(__file__))
baseDir = dirname(dirname(__file__))
myMain(baseDir)
exit(0)
print("Presumably not called from InstrumentAll >> Exiting...")
exit("This script has to be launched from inside InstrumentAll")
|
[
"instru.Factory",
"os.remove",
"os.path.basename",
"os.path.realpath",
"instru.dataLoggerClasses",
"os.path.dirname",
"time.sleep",
"instru.waitAll",
"instru.runModule",
"instru.DataLogger",
"os.listdir"
] |
[((1563, 1588), 'instru.Factory', 'Factory', (['"""DataGenFactory"""'], {}), "('DataGenFactory')\n", (1570, 1588), False, 'from instru import Factory, DataLogger\n'), ((2264, 2281), 'instru.runModule', 'runModule', (['imgGen'], {}), '(imgGen)\n', (2273, 2281), False, 'from instru import bind, dataLoggerClasses, runModule, waitAll\n'), ((2480, 2499), 'instru.dataLoggerClasses', 'dataLoggerClasses', ([], {}), '()\n', (2497, 2499), False, 'from instru import bind, dataLoggerClasses, runModule, waitAll\n'), ((2790, 2819), 'instru.DataLogger', 'DataLogger', (['"""ShowImageLogger"""'], {}), "('ShowImageLogger')\n", (2800, 2819), False, 'from instru import Factory, DataLogger\n'), ((2834, 2863), 'instru.DataLogger', 'DataLogger', (['"""ShowImageLogger"""'], {}), "('ShowImageLogger')\n", (2844, 2863), False, 'from instru import Factory, DataLogger\n'), ((3169, 3186), 'instru.runModule', 'runModule', (['imgGen'], {}), '(imgGen)\n', (3178, 3186), False, 'from instru import bind, dataLoggerClasses, runModule, waitAll\n'), ((3191, 3200), 'instru.waitAll', 'waitAll', ([], {}), '()\n', (3198, 3200), False, 'from instru import bind, dataLoggerClasses, runModule, waitAll\n'), ((3272, 3290), 'instru.runModule', 'runModule', (['imgGen2'], {}), '(imgGen2)\n', (3281, 3290), False, 'from instru import bind, dataLoggerClasses, runModule, waitAll\n'), ((3295, 3308), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3305, 3308), False, 'import time\n'), ((3478, 3507), 'instru.DataLogger', 'DataLogger', (['"""SaveImageLogger"""'], {}), "('SaveImageLogger')\n", (3488, 3507), False, 'from instru import Factory, DataLogger\n'), ((3564, 3579), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (3574, 3579), False, 'import os\n'), ((3653, 3670), 'instru.runModule', 'runModule', (['imgGen'], {}), '(imgGen)\n', (3662, 3670), False, 'from instru import bind, dataLoggerClasses, runModule, waitAll\n'), ((3675, 3684), 'instru.waitAll', 'waitAll', ([], {}), '()\n', (3682, 3684), False, 'from instru import bind, dataLoggerClasses, runModule, waitAll\n'), ((3766, 3781), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (3776, 3781), False, 'import os\n'), ((4056, 4073), 'instru.runModule', 'runModule', (['imgGen'], {}), '(imgGen)\n', (4065, 4073), False, 'from instru import bind, dataLoggerClasses, runModule, waitAll\n'), ((4078, 4087), 'instru.waitAll', 'waitAll', ([], {}), '()\n', (4085, 4087), False, 'from instru import bind, dataLoggerClasses, runModule, waitAll\n'), ((4173, 4188), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (4183, 4188), False, 'import os\n'), ((4487, 4516), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (4503, 4516), False, 'import os\n'), ((3624, 3647), 'os.remove', 'os.remove', (['"""img_01.png"""'], {}), "('img_01.png')\n", (3633, 3647), False, 'import os\n'), ((4027, 4050), 'os.remove', 'os.remove', (['"""img_02.png"""'], {}), "('img_02.png')\n", (4036, 4050), False, 'import os\n'), ((4619, 4645), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (4635, 4645), False, 'import os\n'), ((4682, 4699), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (4689, 4699), False, 'from os.path import dirname\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from ...utils import common_utils
from .roi_head_template import RoIHeadTemplate
from ..model_utils.model_nms_utils import class_agnostic_nms
class CenterROIHead(RoIHeadTemplate):
def __init__(self, input_channels, model_cfg, num_class=1, code_size=7):
super().__init__(num_class=num_class, model_cfg=model_cfg)
self.model_cfg = model_cfg
self.input_channels = input_channels
pre_channel = input_channels
shared_fc_list = []
for k in range(0, self.model_cfg.SHARED_FC.__len__()):
shared_fc_list.extend([
nn.Conv1d(pre_channel, self.model_cfg.SHARED_FC[k], kernel_size=1, bias=False),
nn.BatchNorm1d(self.model_cfg.SHARED_FC[k]),
nn.ReLU()
])
pre_channel = self.model_cfg.SHARED_FC[k]
if k != self.model_cfg.SHARED_FC.__len__() - 1 and self.model_cfg.DP_RATIO > 0:
shared_fc_list.append(nn.Dropout(self.model_cfg.DP_RATIO))
self.shared_fc_layer = nn.Sequential(*shared_fc_list)
self.cls_layers = self.make_fc_layers(
input_channels=pre_channel, output_channels=self.num_class, fc_list=self.model_cfg.CLS_FC
)
self.reg_layers = self.make_fc_layers(
input_channels=pre_channel,
output_channels=code_size * self.num_class,
fc_list=self.model_cfg.REG_FC
)
self.init_weights(weight_init='xavier')
def init_weights(self, weight_init='xavier'):
if weight_init == 'kaiming':
init_func = nn.init.kaiming_normal_
elif weight_init == 'xavier':
init_func = nn.init.xavier_normal_
elif weight_init == 'normal':
init_func = nn.init.normal_
else:
raise NotImplementedError
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.Conv1d):
if weight_init == 'normal':
init_func(m.weight, mean=0, std=0.001)
else:
init_func(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
nn.init.normal_(self.reg_layers[-1].weight, mean=0, std=0.001)
def reorder_first_stage_pred_and_feature(self, batch_dict, nms_config):
first_pred = batch_dict['pred_dicts']
features = batch_dict['ret_maps']
batch_size = len(first_pred)
box_length = first_pred[0]['pred_boxes'].shape[1]
feature_vector_length = features[0].shape[-1]
rois = first_pred[0]['pred_boxes'].new_zeros((batch_size, nms_config.NMS_POST_MAXSIZE, box_length))
roi_scores = first_pred[0]['pred_scores'].new_zeros((batch_size, nms_config.NMS_POST_MAXSIZE))
roi_labels = first_pred[0]['pred_labels'].new_zeros((batch_size, nms_config.NMS_POST_MAXSIZE), dtype=torch.long)
roi_features = features[0].new_zeros((batch_size, nms_config.NMS_POST_MAXSIZE, feature_vector_length))
for i in range(batch_size):
num_obj = features[i].shape[0]
# print(num_obj)
# basically move rotation to position 6, so now the box is 7 + C . C is 2 for nuscenes to
# include velocity target
box_preds = first_pred[i]['pred_boxes']
cls_preds = first_pred[i]['pred_labels']
scores_preds = first_pred[i]['pred_scores']
# if nms_config.MULTI_CLASSES_NMS:
# raise NotImplementedError
# else:
# selected, selected_scores = class_agnostic_nms(
# box_scores=scores_preds, box_preds=box_preds, nms_config=nms_config
# )
# print(selected)
# print(box_preds.shape)
rois[i, :num_obj] = box_preds
roi_labels[i, :num_obj] = cls_preds
roi_scores[i, :num_obj] = scores_preds
roi_features[i, :num_obj] = features[i]
batch_dict['rois'] = rois
batch_dict['roi_labels'] = roi_labels
batch_dict['roi_scores'] = roi_scores
batch_dict['roi_features'] = roi_features
batch_dict['has_class_labels'] = True
return batch_dict
def get_box_reg_layer_loss(self, forward_ret_dict):
loss_cfgs = self.model_cfg.LOSS_CONFIG
code_size = forward_ret_dict['rcnn_reg'].shape[-1]
reg_valid_mask = forward_ret_dict['reg_valid_mask'].view(-1)
gt_boxes3d_ct = forward_ret_dict['gt_of_rois'][..., 0:code_size]
rcnn_reg = forward_ret_dict['rcnn_reg'] # (rcnn_batch_size, C)
rcnn_batch_size = gt_boxes3d_ct.view(-1, code_size).shape[0]
fg_mask = (reg_valid_mask > 0)
fg_sum = fg_mask.long().sum().item()
tb_dict = {}
if loss_cfgs.REG_LOSS == 'L1':
reg_targets = gt_boxes3d_ct.view(rcnn_batch_size, -1)
rcnn_loss_reg = F.l1_loss(
rcnn_reg.view(rcnn_batch_size, -1),
reg_targets,
reduction='none'
) # [B, M, 7]
rcnn_loss_reg = rcnn_loss_reg * rcnn_loss_reg.new_tensor(loss_cfgs.LOSS_WEIGHTS['code_weights'])
rcnn_loss_reg = (rcnn_loss_reg.view(rcnn_batch_size, -1) * fg_mask.unsqueeze(dim=-1).float()).sum() / max(
fg_sum, 1)
rcnn_loss_reg = rcnn_loss_reg * loss_cfgs.LOSS_WEIGHTS['rcnn_reg_weight']
tb_dict['rcnn_loss_reg'] = rcnn_loss_reg.item()
else:
raise NotImplementedError
return rcnn_loss_reg, tb_dict
def generate_predicted_boxes(self, batch_size, rois, cls_preds, box_preds):
"""
Args:
batch_size:
rois: (B, N, 7)
cls_preds: (BN, num_class)
box_preds: (BN, code_size)
Returns:
"""
code_size = box_preds.shape[-1]
# batch_cls_preds: (B, N, num_class or 1)
batch_cls_preds = cls_preds.view(batch_size, -1, cls_preds.shape[-1])
batch_box_preds = box_preds.view(batch_size, -1, code_size)
roi_ry = rois[:, :, 6].view(-1)
roi_xyz = rois[:, :, 0:3].view(-1, 3)
local_rois = rois.clone().detach()
local_rois[:, :, 0:3] = 0
batch_box_preds = (batch_box_preds + local_rois).view(-1, code_size)
batch_box_preds = common_utils.rotate_points_along_z(
batch_box_preds.unsqueeze(dim=1), roi_ry
).squeeze(dim=1)
batch_box_preds[:, 0:3] += roi_xyz
batch_box_preds = batch_box_preds.view(batch_size, -1, code_size)
return batch_cls_preds, batch_box_preds
def assign_targets(self, batch_dict):
import numpy as np
def limit_period(val, offset=0.5, period=np.pi):
return val - torch.floor(val / period + offset) * period
batch_size = batch_dict['batch_size']
with torch.no_grad():
targets_dict = self.proposal_target_layer.forward(batch_dict)
rois = targets_dict['rois'] # (B, N, 7 + C)
gt_of_rois = targets_dict['gt_of_rois'] # (B, N, 7 + C + 1)
targets_dict['gt_of_rois_src'] = gt_of_rois.clone().detach()
roi_ry = limit_period(rois[:, :, 6], offset=0.5, period=np.pi * 2)
gt_of_rois[:, :, :6] = gt_of_rois[:, :, :6] - rois[:, :, :6]
gt_of_rois[:, :, 6] = gt_of_rois[:, :, 6] - roi_ry
# transfer LiDAR coords to local coords
gt_of_rois = common_utils.rotate_points_along_z(
points=gt_of_rois.view(-1, 1, gt_of_rois.shape[-1]), angle=-roi_ry.view(-1)
).view(batch_size, -1, gt_of_rois.shape[-1])
# flip orientation if rois have opposite orientation
heading_label = gt_of_rois[:, :, 6] % (2 * np.pi) # 0 ~ 2pi
opposite_flag = (heading_label > np.pi * 0.5) & (heading_label < np.pi * 1.5)
heading_label[opposite_flag] = (heading_label[opposite_flag] + np.pi) % (2 * np.pi) # (0 ~ pi/2, 3pi/2 ~ 2pi)
flag = heading_label > np.pi
heading_label[flag] = heading_label[flag] - np.pi * 2 # (-pi/2, pi/2)
heading_label = torch.clamp(heading_label, min=-np.pi / 2, max=np.pi / 2)
gt_of_rois[:, :, 6] = heading_label
targets_dict['gt_of_rois'] = gt_of_rois
return targets_dict
def forward(self, batch_dict):
"""
:param input_data: input dict
:return:
"""
batch_dict = self.reorder_first_stage_pred_and_feature(
batch_dict, nms_config=self.model_cfg.NMS_CONFIG['TRAIN' if self.training else 'TEST']
)
if self.training:
targets_dict = self.assign_targets(batch_dict)
batch_dict['rois'] = targets_dict['rois']
batch_dict['roi_labels'] = targets_dict['roi_labels']
batch_dict['roi_features'] = targets_dict['roi_features']
# RoI aware pooling
pooled_features = batch_dict['roi_features'].reshape(-1, 1, batch_dict['roi_features'].shape[-1]).contiguous() # (BxN, 1, C)
batch_size_rcnn = pooled_features.shape[0]
pooled_features = pooled_features.permute(0, 2, 1).contiguous() # (BxN, C, 1)
shared_features = self.shared_fc_layer(pooled_features.view(batch_size_rcnn, -1, 1))
rcnn_cls = self.cls_layers(shared_features).transpose(1, 2).contiguous().squeeze(dim=1) # (B, 1 or 2)
rcnn_reg = self.reg_layers(shared_features).transpose(1, 2).contiguous().squeeze(dim=1) # (B, C)
if not self.training:
batch_cls_preds, batch_box_preds = self.generate_predicted_boxes(
batch_size=batch_dict['batch_size'], rois=batch_dict['rois'], cls_preds=rcnn_cls, box_preds=rcnn_reg
)
batch_dict['batch_cls_preds'] = batch_cls_preds
batch_dict['batch_box_preds'] = batch_box_preds
batch_dict['cls_preds_normalized'] = False
else:
targets_dict['rcnn_cls'] = rcnn_cls
targets_dict['rcnn_reg'] = rcnn_reg
self.forward_ret_dict = targets_dict
return batch_dict
|
[
"torch.nn.Dropout",
"torch.nn.ReLU",
"torch.nn.Sequential",
"torch.nn.Conv1d",
"torch.nn.BatchNorm1d",
"torch.nn.init.normal_",
"torch.clamp",
"torch.nn.init.constant_",
"torch.floor",
"torch.no_grad"
] |
[((1091, 1121), 'torch.nn.Sequential', 'nn.Sequential', (['*shared_fc_list'], {}), '(*shared_fc_list)\n', (1104, 1121), True, 'import torch.nn as nn\n'), ((2240, 2302), 'torch.nn.init.normal_', 'nn.init.normal_', (['self.reg_layers[-1].weight'], {'mean': '(0)', 'std': '(0.001)'}), '(self.reg_layers[-1].weight, mean=0, std=0.001)\n', (2255, 2302), True, 'import torch.nn as nn\n'), ((8125, 8182), 'torch.clamp', 'torch.clamp', (['heading_label'], {'min': '(-np.pi / 2)', 'max': '(np.pi / 2)'}), '(heading_label, min=-np.pi / 2, max=np.pi / 2)\n', (8136, 8182), False, 'import torch\n'), ((6914, 6929), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6927, 6929), False, 'import torch\n'), ((655, 733), 'torch.nn.Conv1d', 'nn.Conv1d', (['pre_channel', 'self.model_cfg.SHARED_FC[k]'], {'kernel_size': '(1)', 'bias': '(False)'}), '(pre_channel, self.model_cfg.SHARED_FC[k], kernel_size=1, bias=False)\n', (664, 733), True, 'import torch.nn as nn\n'), ((751, 794), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.model_cfg.SHARED_FC[k]'], {}), '(self.model_cfg.SHARED_FC[k])\n', (765, 794), True, 'import torch.nn as nn\n'), ((812, 821), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (819, 821), True, 'import torch.nn as nn\n'), ((1022, 1057), 'torch.nn.Dropout', 'nn.Dropout', (['self.model_cfg.DP_RATIO'], {}), '(self.model_cfg.DP_RATIO)\n', (1032, 1057), True, 'import torch.nn as nn\n'), ((2203, 2231), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (2220, 2231), True, 'import torch.nn as nn\n'), ((6810, 6844), 'torch.floor', 'torch.floor', (['(val / period + offset)'], {}), '(val / period + offset)\n', (6821, 6844), False, 'import torch\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os, time, json, urllib3, mysql.connector
DB_DATA = {
"USER": "DB_admin",
"PASS": "<PASSWORD>",
"DB": "dispositivos"
}
EMULATOR_DATA = {
"IP": "172.22.0.98",
"PORT": "8000"
}
# We need to open a transaction for EACH access so that we see an updated version of the DB
# Otherwise we'll only see changes made in our session... We could try to establish a READ COMMITED
# isolation level but our DBMS (DB Management System) doesn't seem to recognize it...
# DB.start_transaction(isolation_level = "READ COMMITTED")
sensor_types = {"temperature": 0, "humidity": 0, "light": 0, "sound": 0, "motion": 0}
RGB_values = {"red": 0, "green": 0, "blue": 0}
server_addr = "http://" + EMULATOR_DATA["IP"] + ":" + EMULATOR_DATA["PORT"] + "/"
# Save 10 connections in a pooling state: we keep them open to avoid having to reestablishing them with each new request
# This'll let us get data faster as we are making quite a lot of queries...
http = urllib3.PoolManager()
def update_sensors(sen):
DB = mysql.connector.connect(host = "localhost", user = DB_DATA["USER"], passwd = DB_DATA["PASS"], db = DB_DATA["DB"])
cursor = DB.cursor()
try:
response = http.request('GET', server_addr + sen)
data = json.loads(response.data)
value = data[sen]
except ValueError:
print("Error reading sensor data...")
if value != sensor_types[sen]:
try:
sensor_types[sen] = value
cursor.execute ("INSERT INTO sensors(nombre, valor) values(%s, %s)", (sen, value))
DB.commit()
except ValueError:
print('Error al insertar en base de datos')
DB.close()
def update_light_colour():
DB = mysql.connector.connect(host = "localhost", user = DB_DATA["USER"], passwd = DB_DATA["PASS"], db = DB_DATA["DB"])
cursor = DB.cursor()
for colour in RGB_values:
try:
cursor.execute("SELECT valor FROM sensors WHERE nombre='" + colour + "' ORDER BY time DESC LIMIT 1")
cursor_value = int(cursor.fetchone()[0])
if cursor_value != RGB_values[colour]:
RGB_values[colour] = cursor_value
print(colour.capitalize() + ": " + str(cursor_value))
response = http.request("PUT", server_addr + colour + "/" + str(cursor_value))
except ValueError:
print("Error in DB connection or PUT request...")
DB.close()
if __name__ == "__main__":
DB = mysql.connector.connect(host = "localhost", user = DB_DATA["USER"], passwd = DB_DATA["PASS"], db = DB_DATA["DB"])
cursor = DB.cursor()
cursor.execute ("DROP table sensors")
cursor.execute ("create table sensors( time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, nombre VARCHAR(15), valor INTEGER)")
cursor.execute ("INSERT INTO sensors(nombre, valor) values(%s, %s)", ('temperature', 20))
cursor.execute ("INSERT INTO sensors(nombre, valor) values(%s, %s)", ('humidity', 40))
cursor.execute ("INSERT INTO sensors(nombre, valor) values(%s, %s)", ('light', 30))
cursor.execute ("INSERT INTO sensors(nombre, valor) values(%s, %s)", ('sound', 10))
cursor.execute ("INSERT INTO sensors(nombre, valor) values(%s, %s)", ('motion', 0))
cursor.execute ("INSERT INTO sensors(nombre, valor) values(%s, %s)", ('red', 20))
cursor.execute ("INSERT INTO sensors(nombre, valor) values(%s, %s)", ('blue', 20))
cursor.execute ("INSERT INTO sensors(nombre, valor) values(%s, %s)", ('green', 20))
DB.commit()
DB.close()
while True:
for sensor in sensor_types:
update_sensors(sensor)
update_light_colour()
time.sleep(2)
|
[
"time.sleep",
"urllib3.PoolManager",
"json.loads"
] |
[((990, 1011), 'urllib3.PoolManager', 'urllib3.PoolManager', ([], {}), '()\n', (1009, 1011), False, 'import os, time, json, urllib3, mysql.connector\n'), ((1247, 1272), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (1257, 1272), False, 'import os, time, json, urllib3, mysql.connector\n'), ((3398, 3411), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3408, 3411), False, 'import os, time, json, urllib3, mysql.connector\n')]
|
import io
import locale
from datetime import datetime
strptime = datetime.strptime
from logging import info, exception
from os import mkdir, startfile
from os.path import join, splitext, split, exists
from re import findall, error
from subprocess import Popen, PIPE, STDOUT, getoutput
from tempfile import TemporaryDirectory
from threading import Thread
from PIL import Image
from timerpy import Timer
from cut_videos.commands import audio_options, image_types, digits, input_ext, duration_command, fps_command
from cut_videos.paths import ffmpeg_path, ffprobe_path
time_format = '%H:%M:%S.%f'
zero_time = '00:00:00.000'
def unformat_time(time: str) -> str:
"""
Convert stort time string to long form (digits only)
:param time: Short time string
:return: Long format time 8 digit string
"""
if '.' in time:
time, milli = time.split('.')
milli += (2 - len(milli)) * '0' # Add trailing zeroes
else:
milli = '00'
# Add redundant zeroes
time = time.split('-')
while len(time) < 3:
time = ['00'] + time
for i, t in enumerate(time):
time[i] = (2 - len(t)) * '0' + t
return ''.join(time) + milli
def _format_time(time: str) -> str:
"""
Format time to shortened human readable form
:param time: Time string in long form
:return: Time string in shortened form
"""
time, milli = splitext(time)
time = findall(r'([1-9]\d?)|00', time)
time = '-'.join(time)
time = time.lstrip('-')
milli = milli.rstrip('0')
milli = milli.rstrip('.')
return time + milli
class Task(Thread):
def __init__(self, window, bar):
Thread.__init__(self, daemon=True) # Run in new thread
# View
self._set_total_frames = bar.set_total_frames
self._set_current_frame_nr = bar.set_current_frame_nr
self._start_time = window.start_time
self._end_time = window.end_time
self._path = window.path
self._files = window.files.copy()
self._video_selection = window.video_selection
self._audio_selection = window.audio_selection
self._scale_input = window.scale_input
self._webm_input = window.webm_input
self._duration = None
start_s, start_ms = self._start_time.split('.')
self.static_command = ['"%s"' % ffmpeg_path,
'-sn', # '-sn' Automatic stream selection
' -r ' + window.input_framerate if window.input_framerate else '',
'-ss ' + start_s if self._start_time != zero_time else '',
# Seeking on input file is faster https://trac.ffmpeg.org/wiki/Seeking
'-i "%s"',
'-ss 0.' + start_ms if self._start_time != zero_time else '',
'-to ' + str(strptime(window.end_time, time_format)
- strptime(start_s + '.0', time_format))
if self._end_time != zero_time else '' # Cut to end if no input is given
]
self.static_command = ' '.join(self.static_command)
def get_output_name(self, i_file):
i_file, _ = splitext(i_file) # Remove ext
start_t = _format_time(self._start_time)
end_t = _format_time(self._end_time)
return '_%s_[%s_%s]' % (i_file, start_t, end_t)
def _convert_frames(self, frames):
if len(frames) > 1:
with TemporaryDirectory() as temp_path:
self.copy_files(temp_path, frames, input_ext)
# Convert the frames
self._set_total_frames(len(frames))
self._convert(join(temp_path, '%%%sd' % digits + input_ext), frames[0])
def _convert_videos(self, videos):
# Load videos
for i_file in videos:
o_file = self.get_output_name(i_file)
i_file = join(self._path, i_file)
# Convert the video
self._set_total_frames(self._get_duration(i_file) * self._get_video_fps(i_file))
self._convert(i_file, o_file)
def run(self):
frames = list(filter(lambda x: splitext(x)[-1].lower() in image_types, self._files))
videos = list(filter(lambda x: splitext(x)[-1].lower() not in image_types, self._files))
# Load frames
self._convert_frames(frames)
self._convert_videos(videos)
# Set bar to full
self._set_total_frames(10)
self._set_current_frame_nr(11)
startfile(self._path) # Open directory when finished
def _get_audio_command(self, file):
# TODO downmix
# https://superuser.com/questions/852400/properly-downmix-5-1-to-stereo-using-ffmpeg
audio_codec = self._get_audio_codec(file)
info('SELECTED: ' + self._audio_selection)
info('INPUT: ' + audio_codec)
# DON'T convert if selected codec is input codec
audio_command = 'Native format' if audio_codec == self._audio_selection else self._audio_selection
return audio_options[audio_command]
def _run_command(self, file, command, new_file):
new_file = join(self._path, new_file)
if exists(new_file):
info('ALREADY EXISTS: ' + new_file)
return
# Insert selected values into command
command = command.replace('<res>', self._scale_input)
command = command.replace('<crf>', self._webm_input)
# Output directory for frames
if not command:
directory, _ = split(new_file)
if exists(directory):
info('Exists')
return
mkdir(directory)
command = [self.static_command % file,
self._get_audio_command(file),
command,
'"%s"' % new_file]
command = ' '.join(command)
info(command)
# Start process
with Timer('CONVERT'):
process = Popen(command, shell=False, stdout=PIPE, stderr=STDOUT)
self._monitor_process(process)
def _monitor_process(self, process):
reader = io.TextIOWrapper(process.stdout, encoding='UTF-8', newline='\r')
while line := reader.readline():
if data := findall(r'frame=\s*(\d+)\s+', line):
self._set_current_frame_nr(data[0])
result = process.communicate()
print(result)
def copy_files(self, temp_path, files, ext):
for i, file in enumerate(sorted(files)):
file_name = digits * "0" + str(i + 1)
with Image.open(join(self._path, file)) as image:
if image.mode != 'RGB':
image = image.convert('RGB')
image.save(join(temp_path, file_name[-digits:] + ext), quality=100)
def _get_audio_codec(self, file):
command = ffprobe_path + ' -v error -select_streams a:0 -show_entries stream=codec_name \
-of default=noprint_wrappers=1:nokey=1 "%s"' % file
return getoutput(command)
def _get_video_fps(self, file):
output = getoutput(fps_command % (ffprobe_path, file))
if not output:
return 1 # in case of audio
output = output.strip()
if '/' in output:
output = output.split('/')
if len(output) == 2:
return float(output[0]) / float(output[1])
elif len(output) == 3:
return float(output[0]) / float(output[1].split('\n')[0])
else:
exception('GET FPS FAIL %s' % str(output))
raise NotImplementedError
error('UNKNOWN FRAMERATE VALUE %s' % output)
raise NotImplementedError
# return float(24)
def _get_duration(self, file):
"""
Get video duration in seconds
:param file: Video file
:return: Duration in seconds
"""
locale.setlocale(locale.LC_ALL, 'en_US.utf8')
result = self._end_time
if result == zero_time: # Run probe to find video length
result = getoutput(duration_command % (ffprobe_path, file))
result = strptime(result, time_format) - strptime(self._start_time, time_format)
return result.total_seconds()
def _convert(self, i_file, o_file):
info('CONVERT %s to %s' % (i_file, o_file))
command, suffix = self._video_selection
suffix = suffix.replace('%ext', splitext(i_file)[-1]) # COPY keep same ext
self._run_command(file=i_file,
command=command,
new_file=o_file + suffix)
info('DONE')
|
[
"os.mkdir",
"threading.Thread.__init__",
"subprocess.Popen",
"tempfile.TemporaryDirectory",
"re.error",
"timerpy.Timer",
"os.path.exists",
"logging.info",
"re.findall",
"io.TextIOWrapper",
"os.path.splitext",
"locale.setlocale",
"subprocess.getoutput",
"os.path.join",
"os.startfile",
"os.path.split"
] |
[((1392, 1406), 'os.path.splitext', 'splitext', (['time'], {}), '(time)\n', (1400, 1406), False, 'from os.path import join, splitext, split, exists\n'), ((1418, 1449), 're.findall', 'findall', (['"""([1-9]\\\\d?)|00"""', 'time'], {}), "('([1-9]\\\\d?)|00', time)\n", (1425, 1449), False, 'from re import findall, error\n'), ((1656, 1690), 'threading.Thread.__init__', 'Thread.__init__', (['self'], {'daemon': '(True)'}), '(self, daemon=True)\n', (1671, 1690), False, 'from threading import Thread\n'), ((3271, 3287), 'os.path.splitext', 'splitext', (['i_file'], {}), '(i_file)\n', (3279, 3287), False, 'from os.path import join, splitext, split, exists\n'), ((4582, 4603), 'os.startfile', 'startfile', (['self._path'], {}), '(self._path)\n', (4591, 4603), False, 'from os import mkdir, startfile\n'), ((4852, 4894), 'logging.info', 'info', (["('SELECTED: ' + self._audio_selection)"], {}), "('SELECTED: ' + self._audio_selection)\n", (4856, 4894), False, 'from logging import info, exception\n'), ((4903, 4932), 'logging.info', 'info', (["('INPUT: ' + audio_codec)"], {}), "('INPUT: ' + audio_codec)\n", (4907, 4932), False, 'from logging import info, exception\n'), ((5216, 5242), 'os.path.join', 'join', (['self._path', 'new_file'], {}), '(self._path, new_file)\n', (5220, 5242), False, 'from os.path import join, splitext, split, exists\n'), ((5254, 5270), 'os.path.exists', 'exists', (['new_file'], {}), '(new_file)\n', (5260, 5270), False, 'from os.path import join, splitext, split, exists\n'), ((5940, 5953), 'logging.info', 'info', (['command'], {}), '(command)\n', (5944, 5953), False, 'from logging import info, exception\n'), ((6190, 6254), 'io.TextIOWrapper', 'io.TextIOWrapper', (['process.stdout'], {'encoding': '"""UTF-8"""', 'newline': "'\\r'"}), "(process.stdout, encoding='UTF-8', newline='\\r')\n", (6206, 6254), False, 'import io\n'), ((7082, 7100), 'subprocess.getoutput', 'getoutput', (['command'], {}), '(command)\n', (7091, 7100), False, 'from subprocess import Popen, PIPE, STDOUT, getoutput\n'), ((7155, 7200), 'subprocess.getoutput', 'getoutput', (['(fps_command % (ffprobe_path, file))'], {}), '(fps_command % (ffprobe_path, file))\n', (7164, 7200), False, 'from subprocess import Popen, PIPE, STDOUT, getoutput\n'), ((7692, 7736), 're.error', 'error', (["('UNKNOWN FRAMERATE VALUE %s' % output)"], {}), "('UNKNOWN FRAMERATE VALUE %s' % output)\n", (7697, 7736), False, 'from re import findall, error\n'), ((7973, 8018), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL', '"""en_US.utf8"""'], {}), "(locale.LC_ALL, 'en_US.utf8')\n", (7989, 8018), False, 'import locale\n'), ((8367, 8410), 'logging.info', 'info', (["('CONVERT %s to %s' % (i_file, o_file))"], {}), "('CONVERT %s to %s' % (i_file, o_file))\n", (8371, 8410), False, 'from logging import info, exception\n'), ((8685, 8697), 'logging.info', 'info', (['"""DONE"""'], {}), "('DONE')\n", (8689, 8697), False, 'from logging import info, exception\n'), ((3974, 3998), 'os.path.join', 'join', (['self._path', 'i_file'], {}), '(self._path, i_file)\n', (3978, 3998), False, 'from os.path import join, splitext, split, exists\n'), ((5284, 5319), 'logging.info', 'info', (["('ALREADY EXISTS: ' + new_file)"], {}), "('ALREADY EXISTS: ' + new_file)\n", (5288, 5319), False, 'from logging import info, exception\n'), ((5599, 5614), 'os.path.split', 'split', (['new_file'], {}), '(new_file)\n', (5604, 5614), False, 'from os.path import join, splitext, split, exists\n'), ((5630, 5647), 'os.path.exists', 'exists', (['directory'], {}), '(directory)\n', (5636, 5647), False, 'from os.path import join, splitext, split, exists\n'), ((5715, 5731), 'os.mkdir', 'mkdir', (['directory'], {}), '(directory)\n', (5720, 5731), False, 'from os import mkdir, startfile\n'), ((5992, 6008), 'timerpy.Timer', 'Timer', (['"""CONVERT"""'], {}), "('CONVERT')\n", (5997, 6008), False, 'from timerpy import Timer\n'), ((6032, 6087), 'subprocess.Popen', 'Popen', (['command'], {'shell': '(False)', 'stdout': 'PIPE', 'stderr': 'STDOUT'}), '(command, shell=False, stdout=PIPE, stderr=STDOUT)\n', (6037, 6087), False, 'from subprocess import Popen, PIPE, STDOUT, getoutput\n'), ((8139, 8189), 'subprocess.getoutput', 'getoutput', (['(duration_command % (ffprobe_path, file))'], {}), '(duration_command % (ffprobe_path, file))\n', (8148, 8189), False, 'from subprocess import Popen, PIPE, STDOUT, getoutput\n'), ((3537, 3557), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (3555, 3557), False, 'from tempfile import TemporaryDirectory\n'), ((5665, 5679), 'logging.info', 'info', (['"""Exists"""'], {}), "('Exists')\n", (5669, 5679), False, 'from logging import info, exception\n'), ((6319, 6356), 're.findall', 'findall', (['"""frame=\\\\s*(\\\\d+)\\\\s+"""', 'line'], {}), "('frame=\\\\s*(\\\\d+)\\\\s+', line)\n", (6326, 6356), False, 'from re import findall, error\n'), ((8499, 8515), 'os.path.splitext', 'splitext', (['i_file'], {}), '(i_file)\n', (8507, 8515), False, 'from os.path import join, splitext, split, exists\n'), ((3753, 3798), 'os.path.join', 'join', (['temp_path', "('%%%sd' % digits + input_ext)"], {}), "(temp_path, '%%%sd' % digits + input_ext)\n", (3757, 3798), False, 'from os.path import join, splitext, split, exists\n'), ((6647, 6669), 'os.path.join', 'join', (['self._path', 'file'], {}), '(self._path, file)\n', (6651, 6669), False, 'from os.path import join, splitext, split, exists\n'), ((6797, 6839), 'os.path.join', 'join', (['temp_path', '(file_name[-digits:] + ext)'], {}), '(temp_path, file_name[-digits:] + ext)\n', (6801, 6839), False, 'from os.path import join, splitext, split, exists\n'), ((4225, 4236), 'os.path.splitext', 'splitext', (['x'], {}), '(x)\n', (4233, 4236), False, 'from os.path import join, splitext, split, exists\n'), ((4318, 4329), 'os.path.splitext', 'splitext', (['x'], {}), '(x)\n', (4326, 4329), False, 'from os.path import join, splitext, split, exists\n')]
|
import pandas as pd
from Bio import SeqIO
fasta_sequences = SeqIO.parse(open('sars2_8thApril2021/msa_0406/msa_0406.fasta'),'fasta')
meta_data = pd.read_csv('sars2_8thApril2021/metadata.tsv', delimiter="\t")
strains = tuple(meta_data['Virus name'])
epi = tuple(meta_data['Accession ID'])
host = tuple(meta_data['Host'])
col_date = tuple(meta_data['Collection date'])
country = tuple(meta_data['Location'])
pango_lin = tuple(meta_data['Pango lineage'])
clade = tuple(meta_data['Clade'])
for fasta in fasta_sequences:
faEpi = fasta.description.split("|")
if faEpi[1] in epi:
myIndex = epi.index(faEpi[1])
# array starts with 0. position
# position of 6 bp deletion (HV) in the spike + a bit around: 24212. - 24234. bp
# position of 3 bp deletion (Y) in the spike + a bit around: 24482. - 24504. bp
print(strains[myIndex], epi[myIndex], host[myIndex], col_date[myIndex], country[myIndex], pango_lin[myIndex], clade[myIndex], str(fasta.seq)[24212:24234], str(fasta.seq)[24482:24504])
|
[
"pandas.read_csv"
] |
[((145, 207), 'pandas.read_csv', 'pd.read_csv', (['"""sars2_8thApril2021/metadata.tsv"""'], {'delimiter': '"""\t"""'}), "('sars2_8thApril2021/metadata.tsv', delimiter='\\t')\n", (156, 207), True, 'import pandas as pd\n')]
|
import argparse
import os
import sys
from pathlib import Path
from playlist.base import PlaylistGenerator
from playlist.config import settings
from playlist.utils.files import read_file
CWD = Path.cwd()
def read_file_in_root_directory(*names, **kwargs):
"""Read a file on root dir."""
return read_file(
os.path.join(os.path.dirname(__file__), *names),
encoding=kwargs.get("encoding", "utf-8"),
)
def main(argv=None):
argv = (argv or sys.argv)[1:]
parser = argparse.ArgumentParser(usage="%(prog)s [ spotify | inventory ]")
parser.add_argument(
"--version",
action="version",
version="%(prog)s " + read_file_in_root_directory("VERSION"),
)
subparsers = parser.add_subparsers()
# service
svc_parser = subparsers.add_parser("service", description="service")
svc_parser.set_defaults(func="")
# inventory
arc_parser = subparsers.add_parser(
"inventory", description="upsert inventory for a given playlist"
)
arc_parser.set_defaults(func="upsert_inventory")
# inventory required
arc_required = arc_parser.add_argument_group("required arguments")
arc_required.add_argument(
"--playlist",
type=str,
nargs="?",
help="service playlist id",
required=True,
)
# inventory opotional
arc_parser.add_argument(
"--service",
action="store",
choices=["spotify"],
default=settings.get("inventory.service")
if settings.get("inventory.service")
else "spotify",
help="supported service name",
)
arc_parser.add_argument(
"--type",
action="store",
dest="type",
default=settings.get("inventory.type")
if settings.get("inventory.type")
else "json",
choices=["googlesheet", "json", "value"],
help="inventory storage type",
)
arc_parser.add_argument(
"--format",
action="store",
dest="format",
default=settings.get("inventory.format")
if settings.get("inventory.format")
else "",
help="output format using python format syntax",
)
arc_parser.add_argument(
"--output",
action="store",
dest="output",
default=settings.get("inventory.output")
if settings.get("inventory.output")
else "",
help="output filename or abspath filename",
)
arc_parser.add_argument(
"--fields",
action="store",
dest="fields",
default=settings.get("inventory.fields")
if settings.get("inventory.fields")
else "",
help="jmespath fields query for the output",
)
arc_parser.add_argument(
"--service-fields",
action="store",
dest="service_fields",
default=settings.get("inventory.service_fields")
if settings.get("inventory.service_fields")
else "",
help="service fields fields for the query",
)
# Parse input
options, args = parser.parse_known_args(argv)
try:
logs = PlaylistGenerator(**vars(options))
if not hasattr(options, "func"):
parser.print_help()
return 1
getattr(logs, options.func)()
except Exception:
import platform
import traceback
options = vars(options)
issue_info = "\n".join(
(
"Version: {0}".format(
read_file_in_root_directory("VERSION")
),
"Python: {0}".format(sys.version),
"Platform: {0}".format(platform.platform()),
"Args: {0}".format(sys.argv),
"Config: {0}".format(options),
"",
traceback.format_exc(),
)
)
sys.stderr.write(issue_info + "\n")
return 1
return 0
if __name__ == "__main__": # pragma: no cover
main()
|
[
"argparse.ArgumentParser",
"playlist.config.settings.get",
"os.path.dirname",
"platform.platform",
"sys.stderr.write",
"traceback.format_exc",
"pathlib.Path.cwd"
] |
[((194, 204), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (202, 204), False, 'from pathlib import Path\n'), ((500, 565), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'usage': '"""%(prog)s [ spotify | inventory ]"""'}), "(usage='%(prog)s [ spotify | inventory ]')\n", (523, 565), False, 'import argparse\n'), ((336, 361), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (351, 361), False, 'import os\n'), ((3866, 3901), 'sys.stderr.write', 'sys.stderr.write', (["(issue_info + '\\n')"], {}), "(issue_info + '\\n')\n", (3882, 3901), False, 'import sys\n'), ((1514, 1547), 'playlist.config.settings.get', 'settings.get', (['"""inventory.service"""'], {}), "('inventory.service')\n", (1526, 1547), False, 'from playlist.config import settings\n'), ((1469, 1502), 'playlist.config.settings.get', 'settings.get', (['"""inventory.service"""'], {}), "('inventory.service')\n", (1481, 1502), False, 'from playlist.config import settings\n'), ((1768, 1798), 'playlist.config.settings.get', 'settings.get', (['"""inventory.type"""'], {}), "('inventory.type')\n", (1780, 1798), False, 'from playlist.config import settings\n'), ((1726, 1756), 'playlist.config.settings.get', 'settings.get', (['"""inventory.type"""'], {}), "('inventory.type')\n", (1738, 1756), False, 'from playlist.config import settings\n'), ((2072, 2104), 'playlist.config.settings.get', 'settings.get', (['"""inventory.format"""'], {}), "('inventory.format')\n", (2084, 2104), False, 'from playlist.config import settings\n'), ((2028, 2060), 'playlist.config.settings.get', 'settings.get', (['"""inventory.format"""'], {}), "('inventory.format')\n", (2040, 2060), False, 'from playlist.config import settings\n'), ((2342, 2374), 'playlist.config.settings.get', 'settings.get', (['"""inventory.output"""'], {}), "('inventory.output')\n", (2354, 2374), False, 'from playlist.config import settings\n'), ((2298, 2330), 'playlist.config.settings.get', 'settings.get', (['"""inventory.output"""'], {}), "('inventory.output')\n", (2310, 2330), False, 'from playlist.config import settings\n'), ((2607, 2639), 'playlist.config.settings.get', 'settings.get', (['"""inventory.fields"""'], {}), "('inventory.fields')\n", (2619, 2639), False, 'from playlist.config import settings\n'), ((2563, 2595), 'playlist.config.settings.get', 'settings.get', (['"""inventory.fields"""'], {}), "('inventory.fields')\n", (2575, 2595), False, 'from playlist.config import settings\n'), ((2897, 2937), 'playlist.config.settings.get', 'settings.get', (['"""inventory.service_fields"""'], {}), "('inventory.service_fields')\n", (2909, 2937), False, 'from playlist.config import settings\n'), ((2845, 2885), 'playlist.config.settings.get', 'settings.get', (['"""inventory.service_fields"""'], {}), "('inventory.service_fields')\n", (2857, 2885), False, 'from playlist.config import settings\n'), ((3810, 3832), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3830, 3832), False, 'import traceback\n'), ((3650, 3669), 'platform.platform', 'platform.platform', ([], {}), '()\n', (3667, 3669), False, 'import platform\n')]
|
import regex
import pickle
import os.path
from pynab import log, root_dir
# category codes
# these are stored in the db, as well
CAT_GAME_NDS = 1010
CAT_GAME_PSP = 1020
CAT_GAME_WII = 1030
CAT_GAME_XBOX = 1040
CAT_GAME_XBOX360 = 1050
CAT_GAME_WIIWARE = 1060
CAT_GAME_XBOX360DLC = 1070
CAT_GAME_PS3 = 1080
CAT_MOVIE_FOREIGN = 2010
CAT_MOVIE_OTHER = 2020
CAT_MOVIE_SD = 2030
CAT_MOVIE_HD = 2040
CAT_MOVIE_BLURAY = 2050
CAT_MOVIE_3D = 2060
CAT_MUSIC_MP3 = 3010
CAT_MUSIC_VIDEO = 3020
CAT_MUSIC_AUDIOBOOK = 3030
CAT_MUSIC_LOSSLESS = 3040
CAT_PC_0DAY = 4010
CAT_PC_ISO = 4020
CAT_PC_MAC = 4030
CAT_PC_MOBILEOTHER = 4040
CAT_PC_GAMES = 4050
CAT_PC_MOBILEIOS = 4060
CAT_PC_MOBILEANDROID = 4070
CAT_TV_FOREIGN = 5020
CAT_TV_SD = 5030
CAT_TV_HD = 5040
CAT_TV_OTHER = 5050
CAT_TV_SPORT = 5060
CAT_TV_ANIME = 5070
CAT_TV_DOCU = 5080
CAT_XXX_DVD = 6010
CAT_XXX_WMV = 6020
CAT_XXX_XVID = 6030
CAT_XXX_X264 = 6040
CAT_XXX_PACK = 6050
CAT_XXX_IMAGESET = 6060
CAT_XXX_OTHER = 6070
CAT_BOOK_MAGS = 7010
CAT_BOOK_EBOOK = 7020
CAT_BOOK_COMICS = 7030
CAT_MISC_OTHER = 8010
CAT_PARENT_GAME = 1000
CAT_PARENT_MOVIE = 2000
CAT_PARENT_MUSIC = 3000
CAT_PARENT_PC = 4000
CAT_PARENT_TV = 5000
CAT_PARENT_XXX = 6000
CAT_PARENT_BOOK = 7000
CAT_PARENT_MISC = 8000
CATEGORISER = pickle.load(open(os.path.join(root_dir, 'db/release_categoriser.pkl'), 'rb'))
def extract_features(name):
def find(reg, str):
res = regex.findall(reg, str, regex.I)
if res:
return '|'.join(sorted(res))
else:
return None
return {
'length': len(name),
'tokens': len(regex.findall('[\w\']+', name)),
'resolution': find('(720|1080)', name),
'quality': find('(SDTV|HDTV|PDTV|WEB-?DL|WEBRIP|XVID|DIVX|DVDR|DVD-RIP|x264|dvd|XvidHD|AVC|AAC|VC\-?1|wmvhd|web\-dl|BRRIP|HDRIP|HDDVD|bddvd|BDRIP|webscr|bluray|bd?25|bd?50|blu-ray|BDREMUX)', name),
'3d': bool(find('(3D)', name)),
'subgroup': find('\[(\w+)\]', name),
'filehash': bool(find('\[([0-9a-fA-F]{8})\]', name)),
'season': bool(find('(S\d{1,2})', name)),
'episode': bool(find('(E\d{1,2})', name)),
'airdate': bool(find('((?:\d{4}[.-/ ]\d{2}[.-/ ]\d{2})|(?:\d{2}[.-/ ]\d{2}[.-/ ]\d{4}))', name)),
'year': bool(find('[.-/ ](\d{4})[.-/ ]', name)),
'versus': bool(find('[.-/ ](vs?)[.-/ ]', name)),
'music': bool(find('((?:^VA(?:\-|\_|\ ))|(?:MP3|VBR|NMR|CDM|FLAC|\-(?:CDR?|EP|LP|SAT|2CD|FM|VINYL|DE|CABLE|TAPE)\-))', name)),
'ebook': bool(find('(e?\-?book|html|epub|pdf|mobi|azw|doc|isbn)', name)),
'comic': bool(find('(cbr|cbz)', name)),
'magazine': bool(find('(mag(?:s|azine?s?))', name)),
'sport': find('(epl|motogp|bellator|supercup|wtcc|bundesliga|uefa|espn|wwe|wwf|wcw|mma|ucf|fia|pga|nfl|ncaa|fifa|mlb|nrl|nhl|afl|nba|wimbledon|cricket)[\. -_]', name),
'xxx': bool(find('(xxx|imageset|porn|erotica)', name)),
'game': find('(PS3|3DS|NDS|PS4|XBOX|XBONE|WII|DLC|CONSOLE|PSP|X360|PS4)', name),
'foreign': bool(find('(seizoen|staffel|danish|flemish|dutch|Deutsch|nl\.?subbed|nl\.?sub|\.NL|\.ITA|norwegian|swedish|swesub|french|german|spanish|icelandic|finnish|Chinese\.Subbed|vostfr|Hebrew\.Dubbed|\.HEB\.|Nordic|Hebdub|NLSubs|NL\-Subs|NLSub|Deutsch| der |German | NL |\.PL\.)', name)),
'pc': bool(find('((?:v?\d\.\d\.)|(?:x64|32bit|64bit|exe))', name)),
'documentary': bool(find('(documentary|national geographic|natgeo)', name))
}
def determine_category(name, group_name=''):
"""Categorise release based on release name and group name."""
features = extract_features(name)
features['name'] = name
features['group'] = group_name
category = int(CATEGORISER.classify(features))
log.debug('category: ({}) [{}]: {}'.format(
group_name,
name,
category
))
return category
|
[
"regex.findall"
] |
[((1401, 1433), 'regex.findall', 'regex.findall', (['reg', 'str', 'regex.I'], {}), '(reg, str, regex.I)\n', (1414, 1433), False, 'import regex\n'), ((1594, 1624), 'regex.findall', 'regex.findall', (['"""[\\\\w\']+"""', 'name'], {}), '("[\\\\w\']+", name)\n', (1607, 1624), False, 'import regex\n')]
|
import unittest
from py_kor.pk_types import *
from py_kor.pk_utilities import Scope
class ScopeTestCase(unittest.TestCase):
def setUp(self) -> None:
def increment(value: RInteger) -> None:
value += 1
pass
def decrement(value: RInteger) -> None:
value -= 1
pass
self.increment = increment
self.decrement = decrement
def tearDown(self) -> None:
pass
def test_empty_scope(self) -> None:
try:
with Scope() as sc:
pass
except BaseException as e:
self.assertTrue(False, f'Unexpected error: {e}')
def test_enter_scope(self) -> None:
test_value = RInteger(0)
try:
with Scope(on_enter=lambda: self.increment(test_value)) as sc:
pass
except BaseException as e:
self.assertTrue(False, f'Unexpected error: {e}')
self.assertEqual(test_value, 1)
def test_exit_scope(self) -> None:
test_value = RInteger(1)
try:
with Scope(on_enter=lambda: self.decrement(test_value)) as sc:
pass
except BaseException as e:
self.assertTrue(False, f'Unexpected error: {e}')
self.assertEqual(test_value, 0)
class EScopeTestCase(unittest.TestCase):
def setUp(self) -> None:
pass
def tearDown(self) -> None:
pass
|
[
"py_kor.pk_utilities.Scope"
] |
[((522, 529), 'py_kor.pk_utilities.Scope', 'Scope', ([], {}), '()\n', (527, 529), False, 'from py_kor.pk_utilities import Scope\n')]
|
from core import Variable
from operation import *
a = Variable(2)
b = square(a)
c = square(b)
d = square(c)
e = square(d)
e.backward()
print(a.grad)
|
[
"core.Variable"
] |
[((55, 66), 'core.Variable', 'Variable', (['(2)'], {}), '(2)\n', (63, 66), False, 'from core import Variable\n')]
|
"""Custom parameter types for the click-based CLI"""
import click
from PIL import ImageColor
class Color(click.ParamType):
"""Parameter type representing a color as name, hex or rgb value"""
name = "color"
def convert(self, value, param, ctx):
if isinstance(value, tuple):
# click 8.0 may pass already parsed parameter values to convert, see
# https://github.com/pallets/click/issues/1898
return value
try:
return ImageColor.getrgb(value)
except ValueError:
self.fail("{} is not a valid color".format(value), param, ctx)
class Ratio(click.ParamType):
"""Parameter type representing a ratio or rational number"""
name = "ratio"
def convert(self, value, param, ctx):
if isinstance(value, float):
# click 8.0 may pass already parsed parameter values to convert, see
# https://github.com/pallets/click/issues/1898
return value
try:
a, b = value.split(":")
a, b = float(a), float(b)
ratio = a / b
except ValueError:
try:
ratio = float(value)
except ValueError:
self.fail("{} is not a valid ratio".format(value), param, ctx)
if ratio <= 0:
self.fail(
"parsed ratio {} is not strictly positive".format(value), param, ctx
)
return ratio
COLOR = Color()
RATIO = Ratio()
|
[
"PIL.ImageColor.getrgb"
] |
[((495, 519), 'PIL.ImageColor.getrgb', 'ImageColor.getrgb', (['value'], {}), '(value)\n', (512, 519), False, 'from PIL import ImageColor\n')]
|
from collections import defaultdict
def distance(x1,y1,x2,y2):
return abs(x1-x2) + abs(y1-y2)
Cs = []
fname = 'input/day-06.txt'
with open(fname) as f:
content = f.read().splitlines()
for val in content:
x, y = [int(c) for c in val.split(',')]
Cs.append((x,y))
xmin = min([x for x,y in Cs])
xmax = max([x for x,y in Cs])
ymin = min([y for x,y in Cs])
ymax = max([y for x,y in Cs])
map1 = defaultdict(int)
for i in range(xmin, xmax):
for j in range (ymin, ymax):
ds = [(distance(c[0], c[1], i, j), c) for c in Cs]
ds.sort()
if ds[0][0] < ds[1][0]:
map1[ds[0][1]] += 1
map2 = defaultdict(int)
for i in range(xmin-400, xmax+400):
for j in range (ymin-400, ymax+400):
ds = [(distance(c[0], c[1], i, j), c) for c in Cs]
ds.sort()
if ds[0][0] < ds[1][0]:
map2[ds[0][1]] += 1
# Catching points that extend infinitely
part1 = [(map1[k] if map1[k]==map2[k] else 0, k) for k in map1.keys()]
part1.sort()
print("Part 1:", part1[-1])
part2 = 0
for i in range(xmin, xmax):
for j in range (ymin, ymax):
part2 += int(sum(abs(x - i) + abs(y - j) for x, y in Cs) < 10000)
print("Part 2:", part2)
|
[
"collections.defaultdict"
] |
[((421, 437), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (432, 437), False, 'from collections import defaultdict\n'), ((648, 664), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (659, 664), False, 'from collections import defaultdict\n')]
|
import sqlite3
connection = sqlite3.connect("data.db")
#When getting rows, use different method to print rows
connection.row_factory = sqlite3.Row
def create_table():
with connection:
connection.execute("CREATE TABLE IF NOT EXISTS entries (content TEXT, date TEXT);")
def add_entry(entry_content, entry_date):
with connection:
connection.execute("INSERT INTO entries VALUES(?, ?);", (entry_content, entry_date)
)
def get_entry():
cursor = connection.execute("SELECT * FROM entries;")
return cursor #returns all of the variable
|
[
"sqlite3.connect"
] |
[((28, 54), 'sqlite3.connect', 'sqlite3.connect', (['"""data.db"""'], {}), "('data.db')\n", (43, 54), False, 'import sqlite3\n')]
|
from src.shared import db
# Required models
from src.models.account import Account
from src.models.tag import Tag
from src.models.response import Response
from typing import List
from string import ascii_letters, digits
import datetime
class Thread:
def __init__(self, uid: int, author: Account, tags: List[Tag], title: str, created: datetime, last_active: datetime = None, response_count: int = None, responses: List[Response] = None):
self.uid = uid
self.author = author
self.tags = tags
self.title = title
self.created = "{0:%Y/%m/%d %H:%M}".format(created)
self.last_active = "{0:%Y/%m/%d %H:%M}".format(last_active) if last_active else None
self.response_count = response_count - 1 if response_count else None # Thread starter shouldn't count as a response.
self.responses = responses
### Helper methods.
@staticmethod
def validate(title: str, tag_ids: List[int]):
result = []
if title and title[0] not in ascii_letters + digits:
result.append("Title must begin with an alphanumeric character.")
if not (10 <= len(title) <= 80):
result.append("Title must be between 10 and 80 characters.")
if len(tag_ids) > 5:
result.append("You can only select a maximum of 5 tags.")
return result
@staticmethod
def author_for_thread(uid: int) -> int:
rows = db.execute_query(
"""
SELECT author_id FROM Thread
WHERE id = %(id)s;
""",
{'id': uid}
)
try:
row = rows.pop(0)
return row['author_id']
except:
return None
### CRUD actions.
@staticmethod
def find_all() -> List['Thread']:
rows = db.execute_query(
"""
SELECT
Thread.*,
Account.username, Account.display_name,
COUNT(Response.id) as response_count,
MAX(Response.created) as last_active
FROM Thread
INNER JOIN Account ON Account.id = Thread.author_id
LEFT JOIN Response ON Response.thread_id = Thread.id
GROUP BY Thread.id, Account.username, Account.display_name
ORDER BY last_active DESC;
"""
)
result = []
for row in rows:
account = Account(row['author_id'], row['username'], row['display_name'])
tags = Tag.find_by_thread_id(row['id'])
thread = Thread(row['id'], account, tags, row['title'], row['created'], row['last_active'], row['response_count'])
result.append(thread)
return result
@staticmethod
def find_by_id(uid: int) -> 'Thread':
rows = db.execute_query(
"""
SELECT
Thread.*,
Account.username, Account.display_name
FROM Thread
INNER JOIN Account ON Account.id = Thread.author_id
AND Thread.id = %(id)s;
""",
{'id': uid}
)
try:
row = rows.pop(0)
account = Account(row['author_id'], row['username'], row['display_name'])
tags = Tag.find_by_thread_id(row['id'])
responses = Response.find_by_thread_id(row['id'])
thread = Thread(row['id'], account, tags, row['title'], row['created'], responses=responses)
return thread
except:
return None
@staticmethod
def find_by_author_id(author_id: int) -> List['Thread']:
rows = db.execute_query(
"""
SELECT
Thread.id, Thread.title, Thread.created,
COUNT(Response.id) as response_count,
MAX(Response.created) as last_active
FROM Thread
INNER JOIN Account ON Account.id = Thread.author_id
AND Thread.author_id = %(author_id)s
LEFT JOIN Response ON Response.thread_id = Thread.id
GROUP BY Thread.id
ORDER BY Thread.created DESC
LIMIT 5;
""",
{'author_id': author_id}
)
result = []
for row in rows:
tags = Tag.find_by_thread_id(row['id'])
thread = Thread(row['id'], None, tags, row['title'], row['created'], row['last_active'], row['response_count'])
result.append(thread)
return result
@staticmethod
def find_by_tag_id(tag_id: int) -> List['Thread']:
rows = db.execute_query(
"""
SELECT
Thread.*,
Account.username, Account.display_name,
COUNT(Response.id) as response_count,
MAX(Response.created) as last_active
FROM Thread
INNER JOIN ThreadTag ON ThreadTag.thread_id = Thread.id
AND ThreadTag.tag_id = %(tag_id)s
INNER JOIN Account ON Account.id = Thread.author_id
LEFT JOIN Response ON Response.thread_id = Thread.id
GROUP BY Thread.id, Account.username, Account.display_name
ORDER BY last_active DESC;
""",
{'tag_id': tag_id}
)
result = []
for row in rows:
account = Account(row['author_id'], row['username'], row['display_name'])
tags = Tag.find_by_thread_id(row['id'])
thread = Thread(row['id'], account, tags, row['title'], row['created'], row['last_active'], row['response_count'])
result.append(thread)
return result
@staticmethod
def create(author_id: int, title: str, content: str, tag_ids: List[int]):
# Remove whitespace.
title = title.strip()
content = content.strip()
errors = Thread.validate(title, tag_ids)
# Convert tag_ids to integers.
try:
tag_ids = list(int(tag_id) for tag_id in tag_ids)
except:
errors.append("Invalid formatting on tags.")
response_errors = Response.validate(content)
errors.extend(response_errors)
if errors:
return {'errors': errors, 'title': title, 'content': content, 'tag_ids': tag_ids}
result = db.execute_update(
"""
INSERT INTO Thread (author_id, title, created)
VALUES (%(author_id)s, %(title)s, NOW() AT TIME ZONE 'UTC')
RETURNING id, created;
""",
{'author_id': author_id, 'title': title}
)
if not ('id' in result and 'created' in result):
return {'errors': ["Something went wrong."], 'title': title, 'content': content, 'tag_ids': tag_ids}
thread_id = result['id']
created = result['created']
Response.create(author_id, thread_id, content, created)
for tag_id in tag_ids:
db.execute_update(
"""
INSERT INTO ThreadTag (tag_id, thread_id)
VALUES (%(tag_id)s, %(thread_id)s);
""",
{'tag_id': tag_id, 'thread_id': thread_id}
)
# Success.
return {'thread_id': thread_id}
@staticmethod
def update(uid: int, title: str, tag_ids: List[int]):
# Remove whitespace.
title = title.strip()
errors = Thread.validate(title, tag_ids)
# Convert tag_ids to integers.
try:
tag_ids = list(int(tag_id) for tag_id in tag_ids)
except:
errors.append("Invalid formatting on tags.")
if errors:
return {'errors': errors, 'title': title, 'tag_ids': tag_ids}
db.execute_update(
"""
UPDATE Thread SET title = %(title)s
WHERE id = %(id)s;
""",
{'id': uid, 'title': title}
)
existing_tags = Tag.find_by_thread_id(uid)
existing_tag_ids = list(t.uid for t in existing_tags) if existing_tags else []
for tag_id in tag_ids:
if tag_id not in existing_tag_ids:
db.execute_update(
"""
INSERT INTO ThreadTag (tag_id, thread_id)
VALUES (%(tag_id)s, %(thread_id)s);
""",
{'tag_id': tag_id, 'thread_id': uid}
)
for existing_tag_id in existing_tag_ids:
if existing_tag_id not in tag_ids:
db.execute_update(
"""
DELETE FROM ThreadTag
WHERE tag_id = %(tag_id)s
AND thread_id = %(thread_id)s;
""",
{'tag_id': existing_tag_id, 'thread_id': uid}
)
# Success.
return {}
@staticmethod
def delete(uid: int):
db.execute_update(
"""
DELETE FROM Thread
WHERE id = %(id)s;
""",
{'id': uid}
)
|
[
"src.shared.db.execute_query",
"src.shared.db.execute_update",
"src.models.response.Response.find_by_thread_id",
"src.models.response.Response.validate",
"src.models.response.Response.create",
"src.models.account.Account",
"src.models.tag.Tag.find_by_thread_id"
] |
[((1453, 1585), 'src.shared.db.execute_query', 'db.execute_query', (['"""\n SELECT author_id FROM Thread\n WHERE id = %(id)s;\n """', "{'id': uid}"], {}), '(\n """\n SELECT author_id FROM Thread\n WHERE id = %(id)s;\n """\n , {\'id\': uid})\n', (1469, 1585), False, 'from src.shared import db\n'), ((1830, 2351), 'src.shared.db.execute_query', 'db.execute_query', (['"""\n SELECT \n Thread.*, \n Account.username, Account.display_name, \n COUNT(Response.id) as response_count,\n MAX(Response.created) as last_active\n FROM Thread\n INNER JOIN Account ON Account.id = Thread.author_id\n LEFT JOIN Response ON Response.thread_id = Thread.id\n GROUP BY Thread.id, Account.username, Account.display_name\n ORDER BY last_active DESC;\n """'], {}), '(\n """\n SELECT \n Thread.*, \n Account.username, Account.display_name, \n COUNT(Response.id) as response_count,\n MAX(Response.created) as last_active\n FROM Thread\n INNER JOIN Account ON Account.id = Thread.author_id\n LEFT JOIN Response ON Response.thread_id = Thread.id\n GROUP BY Thread.id, Account.username, Account.display_name\n ORDER BY last_active DESC;\n """\n )\n', (1846, 2351), False, 'from src.shared import db\n'), ((2821, 3105), 'src.shared.db.execute_query', 'db.execute_query', (['"""\n SELECT\n Thread.*,\n Account.username, Account.display_name\n FROM Thread\n INNER JOIN Account ON Account.id = Thread.author_id\n AND Thread.id = %(id)s;\n """', "{'id': uid}"], {}), '(\n """\n SELECT\n Thread.*,\n Account.username, Account.display_name\n FROM Thread\n INNER JOIN Account ON Account.id = Thread.author_id\n AND Thread.id = %(id)s;\n """\n , {\'id\': uid})\n', (2837, 3105), False, 'from src.shared import db\n'), ((3645, 4196), 'src.shared.db.execute_query', 'db.execute_query', (['"""\n SELECT\n Thread.id, Thread.title, Thread.created,\n COUNT(Response.id) as response_count,\n MAX(Response.created) as last_active\n FROM Thread\n INNER JOIN Account ON Account.id = Thread.author_id\n AND Thread.author_id = %(author_id)s\n LEFT JOIN Response ON Response.thread_id = Thread.id\n GROUP BY Thread.id\n ORDER BY Thread.created DESC\n LIMIT 5;\n """', "{'author_id': author_id}"], {}), '(\n """\n SELECT\n Thread.id, Thread.title, Thread.created,\n COUNT(Response.id) as response_count,\n MAX(Response.created) as last_active\n FROM Thread\n INNER JOIN Account ON Account.id = Thread.author_id\n AND Thread.author_id = %(author_id)s\n LEFT JOIN Response ON Response.thread_id = Thread.id\n GROUP BY Thread.id\n ORDER BY Thread.created DESC\n LIMIT 5;\n """\n , {\'author_id\': author_id})\n', (3661, 4196), False, 'from src.shared import db\n'), ((4598, 5250), 'src.shared.db.execute_query', 'db.execute_query', (['"""\n SELECT\n Thread.*,\n Account.username, Account.display_name,\n COUNT(Response.id) as response_count,\n MAX(Response.created) as last_active\n FROM Thread\n INNER JOIN ThreadTag ON ThreadTag.thread_id = Thread.id\n AND ThreadTag.tag_id = %(tag_id)s\n INNER JOIN Account ON Account.id = Thread.author_id\n LEFT JOIN Response ON Response.thread_id = Thread.id\n GROUP BY Thread.id, Account.username, Account.display_name\n ORDER BY last_active DESC;\n """', "{'tag_id': tag_id}"], {}), '(\n """\n SELECT\n Thread.*,\n Account.username, Account.display_name,\n COUNT(Response.id) as response_count,\n MAX(Response.created) as last_active\n FROM Thread\n INNER JOIN ThreadTag ON ThreadTag.thread_id = Thread.id\n AND ThreadTag.tag_id = %(tag_id)s\n INNER JOIN Account ON Account.id = Thread.author_id\n LEFT JOIN Response ON Response.thread_id = Thread.id\n GROUP BY Thread.id, Account.username, Account.display_name\n ORDER BY last_active DESC;\n """\n , {\'tag_id\': tag_id})\n', (4614, 5250), False, 'from src.shared import db\n'), ((6115, 6141), 'src.models.response.Response.validate', 'Response.validate', (['content'], {}), '(content)\n', (6132, 6141), False, 'from src.models.response import Response\n'), ((6313, 6569), 'src.shared.db.execute_update', 'db.execute_update', (['"""\n INSERT INTO Thread (author_id, title, created)\n VALUES (%(author_id)s, %(title)s, NOW() AT TIME ZONE \'UTC\')\n RETURNING id, created;\n """', "{'author_id': author_id, 'title': title}"], {}), '(\n """\n INSERT INTO Thread (author_id, title, created)\n VALUES (%(author_id)s, %(title)s, NOW() AT TIME ZONE \'UTC\')\n RETURNING id, created;\n """\n , {\'author_id\': author_id, \'title\': title})\n', (6330, 6569), False, 'from src.shared import db\n'), ((6844, 6899), 'src.models.response.Response.create', 'Response.create', (['author_id', 'thread_id', 'content', 'created'], {}), '(author_id, thread_id, content, created)\n', (6859, 6899), False, 'from src.models.response import Response\n'), ((7748, 7904), 'src.shared.db.execute_update', 'db.execute_update', (['"""\n UPDATE Thread SET title = %(title)s\n WHERE id = %(id)s;\n """', "{'id': uid, 'title': title}"], {}), '(\n """\n UPDATE Thread SET title = %(title)s\n WHERE id = %(id)s;\n """\n , {\'id\': uid, \'title\': title})\n', (7765, 7904), False, 'from src.shared import db\n'), ((7954, 7980), 'src.models.tag.Tag.find_by_thread_id', 'Tag.find_by_thread_id', (['uid'], {}), '(uid)\n', (7975, 7980), False, 'from src.models.tag import Tag\n'), ((8939, 9062), 'src.shared.db.execute_update', 'db.execute_update', (['"""\n DELETE FROM Thread\n WHERE id = %(id)s;\n """', "{'id': uid}"], {}), '(\n """\n DELETE FROM Thread\n WHERE id = %(id)s;\n """\n , {\'id\': uid})\n', (8956, 9062), False, 'from src.shared import db\n'), ((2433, 2496), 'src.models.account.Account', 'Account', (["row['author_id']", "row['username']", "row['display_name']"], {}), "(row['author_id'], row['username'], row['display_name'])\n", (2440, 2496), False, 'from src.models.account import Account\n'), ((2516, 2548), 'src.models.tag.Tag.find_by_thread_id', 'Tag.find_by_thread_id', (["row['id']"], {}), "(row['id'])\n", (2537, 2548), False, 'from src.models.tag import Tag\n'), ((3196, 3259), 'src.models.account.Account', 'Account', (["row['author_id']", "row['username']", "row['display_name']"], {}), "(row['author_id'], row['username'], row['display_name'])\n", (3203, 3259), False, 'from src.models.account import Account\n'), ((3279, 3311), 'src.models.tag.Tag.find_by_thread_id', 'Tag.find_by_thread_id', (["row['id']"], {}), "(row['id'])\n", (3300, 3311), False, 'from src.models.tag import Tag\n'), ((3336, 3373), 'src.models.response.Response.find_by_thread_id', 'Response.find_by_thread_id', (["row['id']"], {}), "(row['id'])\n", (3362, 3373), False, 'from src.models.response import Response\n'), ((4287, 4319), 'src.models.tag.Tag.find_by_thread_id', 'Tag.find_by_thread_id', (["row['id']"], {}), "(row['id'])\n", (4308, 4319), False, 'from src.models.tag import Tag\n'), ((5344, 5407), 'src.models.account.Account', 'Account', (["row['author_id']", "row['username']", "row['display_name']"], {}), "(row['author_id'], row['username'], row['display_name'])\n", (5351, 5407), False, 'from src.models.account import Account\n'), ((5427, 5459), 'src.models.tag.Tag.find_by_thread_id', 'Tag.find_by_thread_id', (["row['id']"], {}), "(row['id'])\n", (5448, 5459), False, 'from src.models.tag import Tag\n'), ((6944, 7150), 'src.shared.db.execute_update', 'db.execute_update', (['"""\n INSERT INTO ThreadTag (tag_id, thread_id)\n VALUES (%(tag_id)s, %(thread_id)s);\n """', "{'tag_id': tag_id, 'thread_id': thread_id}"], {}), '(\n """\n INSERT INTO ThreadTag (tag_id, thread_id)\n VALUES (%(tag_id)s, %(thread_id)s);\n """\n , {\'tag_id\': tag_id, \'thread_id\': thread_id})\n', (6961, 7150), False, 'from src.shared import db\n'), ((8163, 8375), 'src.shared.db.execute_update', 'db.execute_update', (['"""\n INSERT INTO ThreadTag (tag_id, thread_id)\n VALUES (%(tag_id)s, %(thread_id)s);\n """', "{'tag_id': tag_id, 'thread_id': uid}"], {}), '(\n """\n INSERT INTO ThreadTag (tag_id, thread_id)\n VALUES (%(tag_id)s, %(thread_id)s);\n """\n , {\'tag_id\': tag_id, \'thread_id\': uid})\n', (8180, 8375), False, 'from src.shared import db\n'), ((8549, 8791), 'src.shared.db.execute_update', 'db.execute_update', (['"""\n DELETE FROM ThreadTag\n WHERE tag_id = %(tag_id)s\n AND thread_id = %(thread_id)s;\n """', "{'tag_id': existing_tag_id, 'thread_id': uid}"], {}), '(\n """\n DELETE FROM ThreadTag\n WHERE tag_id = %(tag_id)s\n AND thread_id = %(thread_id)s;\n """\n , {\'tag_id\': existing_tag_id, \'thread_id\': uid})\n', (8566, 8791), False, 'from src.shared import db\n')]
|
import pytest
import numpy as np
from pytsmp import pytsmp
from tests import helpers
class TestMatrixProfile:
def test_MatrixProfile_init(self):
with pytest.raises(TypeError):
t = np.random.rand(1000)
mp = pytsmp.MatrixProfile(t, window_size=100, verbose=False)
class TestSTAMP:
def test_STAMP_init_incorrect_window_size1(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=0, verbose=False)
assert str(excinfo.value) == "Incorrect window size specified."
def test_STAMP_init_incorrect_window_size2(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=2.3, verbose=False)
assert str(excinfo.value) == "Incorrect window size specified."
def test_STAMP_init_incorrect_window_size3(self):
with pytest.raises(ValueError) as excinfo:
t1 = np.random.rand(1000)
t2 = np.random.rand(500)
mp = pytsmp.STAMP(t1, t2, window_size=501, verbose=False)
assert str(excinfo.value) == "Incorrect window size specified."
def test_STAMP_init_incorrect_exclusion_zone(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, exclusion_zone=-1, verbose=False)
assert str(excinfo.value) == "Exclusion zone must be non-negative."
def test_STAMP_init_incorrect_s_size1(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, s_size=0, verbose=False)
assert str(excinfo.value) == "s_size must be between 0 and 1."
def test_STAMP_init_incorrect_s_size2(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, s_size=1.2, verbose=False)
assert str(excinfo.value) == "s_size must be between 0 and 1."
def test_STAMP_is_anytime(self):
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, s_size=1, verbose=True) # for coverage purpose
is_anytime = mp.is_anytime
assert is_anytime == True, "STAMP_is_anytime: STAMP should be an anytime algorithm."
def test_STAMP_init_check_mutation(self):
t1 = np.random.rand(100)
t2 = np.random.rand(100)
w = 10
mp = pytsmp.STAMP(t1, t2, window_size=w, exclusion_zone=0, verbose=False)
t1[0] = -10
t2[0] = -10
assert t1[0] != mp.ts1[0], "STAMP_init_check_mutation: Matrix profile init should leave original array intact."
assert t2[0] != mp.ts2[0], "STAMP_init_check_mutation: Matrix profile init should leave original array intact."
def test_STAMP_get_profiles_check_length(self):
n = np.random.randint(100, 1000)
m = np.random.randint(100, 1000)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m))
mp = pytsmp.STAMP(t1, t2, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert len(mpro) == n - w + 1, "STAMP_get_profile_check_length: Matrix profile should have correct length"
assert len(ipro) == n - w + 1, "STAMP_get_profile_check_length: Index profile should have correct length"
def test_STAMP_get_profiles_check_mutation(self):
t = np.random.rand(1000)
w = 10
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
mpro[0] = -1
ipro[0] = -1
mpro2, ipro2 = mp.get_profiles()
assert mpro[0] != mpro2[0], "STAMP_get_profile_check_mutation: " \
"Get profile should return a copy of the matrix profile, not the internal one."
assert ipro[0] != ipro2[0], "STAMP_get_profile_check_mutation: " \
"Get profile should return a copy of the index profile, not the internal one."
def test_STAMP_compute_matrix_profile_sanity(self):
t = np.random.rand(1000)
w = 10
mp = pytsmp.STAMP(t, t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, np.zeros(len(t) - w + 1), atol=1e-5), "STAMP_compute_matrix_profile_sanity: " \
"Should compute the matrix profile correctly in the trivial case."
assert np.array_equal(ipro, np.arange(len(t) - w + 1)), "STAMP_compute_matrix_profile_sanity: " \
"Should compute the index profile correctly in the trivial case."
def test_STAMP_compute_matrix_profile_same_random_data(self):
n = np.random.randint(100, 200) # anything larger will be too time-consuming
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
mp_naive, ip_naive = helpers.naive_matrix_profile(t, window_size=w)
assert np.allclose(mpro, mp_naive), "STAMP_compute_matrix_profile_same_random_data: " \
"Should compute the matrix profile correctly."
assert np.allclose(ipro, ip_naive), "STAMP_compute_matrix_profile_same_random_data: " \
"Should compute the index profile correctly."
def test_STAMP_compute_matrix_profile_random_data(self):
n = np.random.randint(100, 200)
m = np.random.randint(100, 200)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m) // 4)
mp = pytsmp.STAMP(t1, t2, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
mp_naive, ip_naive = helpers.naive_matrix_profile(t1, t2, window_size=w)
assert np.allclose(mpro, mp_naive), "STAMP_compute_matrix_profile_random_data: " \
"Should compute the matrix profile correctly."
assert np.allclose(ipro, ip_naive), "STAMP_compute_matrix_profile_random_data: " \
"Should compute the index profile correctly."
def test_STAMP_compute_matrix_profile_data1(self):
t = np.loadtxt("./data/random_walk_data.csv")
mpro_ans = np.loadtxt("./data/random_walk_data_mpro.csv")
ipro_ans = np.loadtxt("./data/random_walk_data_ipro.csv")
w = 50
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "STAMP_compute_matrix_profile_data1: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
# assert np.allclose(ipro, ipro_ans), "STAMP_compute_matrix_profile_data1: " \
# "Should compute the index profile correctly."
def test_STAMP_compute_matrix_profile_data2(self):
t = np.loadtxt("./data/candy_production.csv")
mpro_ans = np.loadtxt("./data/candy_production_mpro.csv")
ipro_ans = np.loadtxt("./data/candy_production_ipro.csv")
w = 80
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "STAMP_compute_matrix_profile_data2: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
assert np.allclose(ipro, ipro_ans), "STAMP_compute_matrix_profile_data1: " \
"Should compute the index profile correctly."
def test_STAMP_compute_matrix_profile_data3(self):
t = np.loadtxt("./data/bitcoin_price.csv")
mpro_ans = np.loadtxt("./data/bitcoin_price_mpro.csv")
ipro_ans = np.loadtxt("./data/bitcoin_price_ipro.csv")
w = 100
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "STAMP_compute_matrix_profile_data3: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
assert np.allclose(ipro, ipro_ans), "STAMP_compute_matrix_profile_data3: " \
"Should compute the index profile correctly."
class TestConvFunctions:
"""
The class for tests of helper functions independent of matrix profile classes.
"""
def test_update_ts1_random_data(self):
n = np.random.randint(200, 1000)
m = np.random.randint(200, 1000)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m) // 4)
mp = pytsmp.STAMP(t1[:-1], t2, window_size=w, verbose=False)
mp.update_ts1(t1[-1])
mpro, ipro = mp.get_profiles()
mp2 = pytsmp.STAMP(t1, t2, window_size=w, verbose=False)
mpro2, ipro2 = mp2.get_profiles()
assert np.allclose(mpro, mpro2), "update_ts1_random_data: " \
"update_ts1 should update the matrix profile properly on random data. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro2)))
assert np.allclose(ipro, ipro2), "update_ts1_random_data: " \
"update_ts1 should update the index profile properly on random data."
def test_update_ts1_multiple_random_data(self):
n = np.random.randint(200, 1000)
m = np.random.randint(200, 1000)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m) // 4)
times = np.random.randint(5, 50)
mp = pytsmp.STAMP(t1[:-times], t2, window_size=w, verbose=False)
for i in range(-times, 0, 1):
mp.update_ts1(t1[i])
mpro, ipro = mp.get_profiles()
mp2 = pytsmp.STAMP(t1, t2, window_size=w, verbose=False)
mpro2, ipro2 = mp2.get_profiles()
assert np.allclose(mpro, mpro2), "update_ts1_multiple_random_data: " \
"update_ts1 should update the matrix profile multiple times properly on random data. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro2)))
assert np.allclose(ipro, ipro2), "update_ts1_random_data: " \
"update_ts1 should update the index profile multiple times properly on random data."
def test_update_ts2_random_data(self):
n = np.random.randint(200, 1000)
m = np.random.randint(200, 1000)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m) // 4)
mp = pytsmp.STAMP(t1, t2[:-1], window_size=w, verbose=False)
mp.update_ts2(t2[-1])
mpro, ipro = mp.get_profiles()
mp2 = pytsmp.STAMP(t1, t2, window_size=w, verbose=False)
mpro2, ipro2 = mp2.get_profiles()
assert np.allclose(mpro, mpro2), "update_ts2_random_data: " \
"update_ts2 should update the matrix profile properly on random data. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro2)))
assert np.allclose(ipro, ipro2), "update_ts2_random_data: " \
"update_ts2 should update the index profile properly on random data."
def test_update_ts2_multiple_random_data(self):
n = np.random.randint(200, 1000)
m = np.random.randint(200, 1000)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m) // 4)
times = np.random.randint(5, 50)
mp = pytsmp.STAMP(t1, t2[:-times], window_size=w, verbose=False)
for i in range(-times, 0, 1):
mp.update_ts2(t2[i])
mpro, ipro = mp.get_profiles()
mp2 = pytsmp.STAMP(t1, t2, window_size=w, verbose=False)
mpro2, ipro2 = mp2.get_profiles()
assert np.allclose(mpro, mpro2), "update_ts2_multiple_random_data: " \
"update_ts2 should update the matrix profile multiple times properly on random data. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro2)))
assert np.allclose(ipro, ipro2), "update_ts2_random_data: " \
"update_ts2 should update the index profile multiple times properly on random data."
def test_update_interleave_random_data(self):
n = np.random.randint(200, 1000)
m = np.random.randint(200, 1000)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m) // 4)
times = np.random.randint(5, 25)
mp = pytsmp.STAMP(t1[:-times], t2[:-times], window_size=w, verbose=False)
for i in range(-times, 0, 1):
mp.update_ts1(t1[i])
mp.update_ts2(t2[i])
mpro, ipro = mp.get_profiles()
mp2 = pytsmp.STAMP(t1, t2, window_size=w, verbose=False)
mpro2, ipro2 = mp2.get_profiles()
assert np.allclose(mpro, mpro2), "update_interleave_random_data: " \
"update_ts1 and update_ts2 should update the matrix profile multiple times " \
"properly on random data. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro2)))
assert np.allclose(ipro, ipro2), "update_interleave_random_data: " \
"update_ts1 and update_ts2 should update the index profile multiple times " \
"properly on random data."
def test_update_ts1_same_data(self):
n = np.random.randint(200, 1000)
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.STAMP(t[:-1], window_size=w, verbose=False)
mp.update_ts1(t[-1])
mpro, ipro = mp.get_profiles()
mp2 = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro2, ipro2 = mp2.get_profiles()
assert np.allclose(mpro, mpro2), "update_ts1_same_data: " \
"update_ts1 should update the matrix profile properly when ts1 == ts2. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro2)))
assert np.allclose(ipro, ipro2), "update_ts1_same_data: " \
"update_ts1 should update the index profile properly when ts1 == ts2."
def test_update_ts1_multiple_same_data(self):
n = np.random.randint(200, 1000)
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
times = np.random.randint(5, 50)
mp = pytsmp.STAMP(t[:-times], window_size=w, verbose=False)
for i in range(-times, 0, 1):
mp.update_ts1(t[i])
mpro, ipro = mp.get_profiles()
mp2 = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro2, ipro2 = mp2.get_profiles()
assert np.allclose(mpro, mpro2), "update_ts1_multiple_same_data: " \
"update_ts1 should update the matrix profile multiple times properly when ts1 == ts2. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro2)))
assert np.allclose(ipro, ipro2), "update_ts1_multiple_same_data: " \
"update_ts1 should update the index profile multiple times properly when ts1 == ts2."
def test_update_ts2_same_data(self):
n = np.random.randint(200, 1000)
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.STAMP(t[:-1], window_size=w, verbose=False)
mp.update_ts2(t[-1])
mpro, ipro = mp.get_profiles()
mp2 = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro2, ipro2 = mp2.get_profiles()
assert np.allclose(mpro, mpro2), "update_ts2_same_data: " \
"update_ts2 should update the matrix profile properly when ts1 == ts2. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro2)))
assert np.allclose(ipro, ipro2), "update_ts2_same_data: " \
"update_ts2 should update the index profile properly when ts1 == ts2."
def test_update_ts2_multiple_same_data(self):
n = np.random.randint(200, 1000)
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
times = np.random.randint(5, 50)
mp = pytsmp.STAMP(t[:-times], window_size=w, verbose=False)
for i in range(-times, 0, 1):
mp.update_ts2(t[i])
mpro, ipro = mp.get_profiles()
mp2 = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro2, ipro2 = mp2.get_profiles()
assert np.allclose(mpro, mpro2), "update_ts2_multiple_same_data: " \
"update_ts2 should update the matrix profile multiple times properly when ts1 == ts2. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro2)))
assert np.allclose(ipro, ipro2), "update_ts2_multiple_same_data: " \
"update_ts2 should update the index profile multiple times properly when ts1 == ts2."
def test_update_interleave_same_data(self):
n = np.random.randint(200, 1000)
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
times = np.random.randint(5, 25)
mp = pytsmp.STAMP(t[:-times], window_size=w, verbose=False)
for i in range(-times, 0, 1):
if i % 2 == 0:
mp.update_ts1(t[i])
else:
mp.update_ts2(t[i])
mpro, ipro = mp.get_profiles()
mp2 = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro2, ipro2 = mp2.get_profiles()
assert np.allclose(mpro, mpro2), "update_interleave_same_data: " \
"update_ts1 and update_ts2 should update the matrix profile multiple times " \
"properly when ts1 == ts2. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro2)))
assert np.allclose(ipro, ipro2), "update_interleave_same_data: " \
"update_ts1 and update_ts2 should update the index profile multiple times " \
"properly when ts1 == ts2."
def test_find_discords_incorrect_num_discords1(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, verbose=False)
discords = mp.find_discords(-1)
assert str(excinfo.value) == "Incorrect num_discords entered."
def test_find_discords_incorrect_num_discords2(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, verbose=False)
discords = mp.find_discords(4.2)
assert str(excinfo.value) == "Incorrect num_discords entered."
def test_find_discords_incorrect_num_discords3(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, verbose=False)
discords = mp.find_discords(0)
assert str(excinfo.value) == "Incorrect num_discords entered."
def test_find_discords_incorrect_exclusion_zone(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, verbose=False)
discords = mp.find_discords(3, exclusion_zone=-1)
assert str(excinfo.value) == "Exclusion zone must be non-negative."
def test_find_discords_sanity1(self):
n = np.random.randint(200, 1000)
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
discords = mp.find_discords(n - w + 1, exclusion_zone=0)
mp_discords = mpro[discords]
assert len(discords) == n - w + 1, "find_discords_snaity1: find_discords should return the correct number of discords."
assert (mp_discords[1:] <= mp_discords[:-1]).all(), "find_discords_sanity1: find_discords should return " \
"discords in descending order of profile values."
def test_find_discords_sanity2(self):
n = np.random.randint(200, 1000)
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
discords = mp.find_discords(n - w + 1) # exclusion_zone=None
mp_discords = mpro[discords]
assert (n - w + 1) // w <= len(discords) <= (n - w + 1) // w * 2 + 1, \
"find_discords_snaity2: find_discords should not return more than the max possible number of discords."
assert (mp_discords[1:] <= mp_discords[:-1]).all(), "find_discords_sanity2: find_discords should return " \
"discords in descending order of profile values."
def test_find_discords_sanity3(self):
n = np.random.randint(200, 1000)
t = np.random.rand(n)
w = np.random.randint(10, n // 5)
num_discords = 5
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
discords = mp.find_discords(num_discords, exclusion_zone=1/2)
mp_discords = mpro[discords]
assert len(discords) == num_discords, "find_discords_snaity3: find_discords should return the desired number of discords."
assert (mp_discords[1:] <= mp_discords[:-1]).all(), "find_discords_sanity3: find_discords should return " \
"discords in descending order of profile values."
def test_find_discords_anomaly(self):
"""
find_discords should be able to locate obvious anomaly.
"""
n = np.random.randint(200, 500)
t = np.random.rand(n)
t = np.tile(t, 4)
w = np.random.randint(10, n // 4)
ab = np.random.randint(len(t))
t[ab] += 5
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
discords = np.sort(mp.find_discords(1, exclusion_zone=1/2))
assert len(discords) == 1, "find_discords_anomaly: find_discords should return the desired number of discords."
assert np.abs(ab - discords[0]) < w, \
"find_discords_anomaly: find_discords should be able to locate obvious anomaly."
def test_find_motifs_incorrect_num_discords1(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, verbose=False)
motifs = mp.find_motifs(-1)
assert str(excinfo.value) == "Incorrect num_motifs entered."
def test_find_motifs_incorrect_num_motifs2(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, verbose=False)
motifs = mp.find_motifs(4.2)
assert str(excinfo.value) == "Incorrect num_motifs entered."
def test_find_motifs_incorrect_num_motifs3(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, verbose=False)
motifs = mp.find_motifs(0)
assert str(excinfo.value) == "Incorrect num_motifs entered."
def test_find_motifs_incorrect_exclusion_zone(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.STAMP(t, window_size=10, verbose=False)
motifs = mp.find_motifs(5, exclusion_zone=-1)
assert str(excinfo.value) == "Exclusion zone must be non-negative."
def test_find_motifs_sanity1(self):
n = np.random.randint(200, 1000)
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
num_motifs = 3
motifs = mp.find_motifs(num_motifs, exclusion_zone=1/2)
mp_motifs = mpro[motifs]
assert len(motifs) == num_motifs, "find_motifs_snaity1: find_motifs should return the desired number of motifs."
assert (mp_motifs[1:, 0] >= mp_motifs[:-1, 0]).all(), "find_motifs_sanity1: find_motifs should return " \
"motifs in ascending order of profile values."
def test_find_motifs_sanity2(self):
n = np.random.randint(200, 1000)
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.STAMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
motifs = mp.find_motifs(n - w + 1) # exclusion_zone=None
mp_motifs = mpro[motifs]
assert (n - w + 1) // (2 * w) <= len(motifs) <= (n - w + 1) // w * 2 + 1, \
"find_motifs_snaity2: find_motifs should not return more than the max possible number of motifs."
assert (mp_motifs[1:, 0] >= mp_motifs[:-1, 0]).all(), "find_motifs_sanity2: find_motifs should return " \
"motifs in descending order of profile values."
class TestSTOMP:
def test_STOMP_is_anytime(self):
t = np.random.rand(1000)
mp = pytsmp.STOMP(t, window_size=10, s_size=1, verbose=True)
is_anytime = mp.is_anytime
assert is_anytime == False, "STOMP_is_anytime: STOMP should not be an anytime algorithm."
def test_STOMP_get_profiles_check_length(self):
n = np.random.randint(100, 1000)
m = np.random.randint(100, 1000)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m))
mp = pytsmp.STOMP(t1, t2, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert len(mpro) == n - w + 1, "STOMP_get_profile_check_length: Matrix profile should have correct length"
assert len(ipro) == n - w + 1, "STOMP_get_profile_check_length: Index profile should have correct length"
def test_STOMP_get_profiles_check_mutation(self):
t = np.random.rand(1000)
w = 10
mp = pytsmp.STOMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
mpro[0] = -1
ipro[0] = -1
mpro2, ipro2 = mp.get_profiles()
assert mpro[0] != mpro2[0], "STOMP_get_profile_check_mutation: " \
"Get profile should return a copy of the matrix profile, not the internal one."
assert ipro[0] != ipro2[0], "STOMP_get_profile_check_mutation: " \
"Get profile should return a copy of the index profile, not the internal one."
def test_STOMP_compute_matrix_profile_sanity(self):
t = np.random.rand(1000)
w = 10
mp = pytsmp.STOMP(t, t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, np.zeros(len(t) - w + 1), atol=1e-5), "STOMP_compute_matrix_profile_sanity: " \
"Should compute the matrix profile correctly in the trivial case."
assert np.array_equal(ipro, np.arange(len(t) - w + 1)), "STOMP_compute_matrix_profile_sanity: " \
"Should compute the index profile correctly in the trivial case."
def test_STOMP_compute_matrix_profile_same_random_data(self):
n = np.random.randint(100, 200) # anything larger will be too time-consuming
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.STOMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
mp_naive, ip_naive = helpers.naive_matrix_profile(t, window_size=w)
assert np.allclose(mpro, mp_naive), "STOMP_compute_matrix_profile_same_random_data: " \
"Should compute the matrix profile correctly."
assert np.allclose(ipro, ip_naive), "STOMP_compute_matrix_profile_same_random_data: " \
"Should compute the index profile correctly."
def test_STOMP_compute_matrix_profile_random_data(self):
n = np.random.randint(100, 200)
m = np.random.randint(100, 200)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m) // 4)
mp = pytsmp.STOMP(t1, t2, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
mp_naive, ip_naive = helpers.naive_matrix_profile(t1, t2, window_size=w)
assert np.allclose(mpro, mp_naive), "STOMP_compute_matrix_profile_random_data: " \
"Should compute the matrix profile correctly."
assert np.allclose(ipro, ip_naive), "STOMP_compute_matrix_profile_random_data: " \
"Should compute the index profile correctly."
def test_STOMP_compute_matrix_profile_data1(self):
t = np.loadtxt("./data/random_walk_data.csv")
mpro_ans = np.loadtxt("./data/random_walk_data_mpro.csv")
ipro_ans = np.loadtxt("./data/random_walk_data_ipro.csv")
w = 50
mp = pytsmp.STOMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "STOMP_compute_matrix_profile_data1: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
# assert np.allclose(ipro, ipro_ans), "STOMP_compute_matrix_profile_data1: " \
# "Should compute the index profile correctly."
def test_STOMP_compute_matrix_profile_data2(self):
t = np.loadtxt("./data/candy_production.csv")
mpro_ans = np.loadtxt("./data/candy_production_mpro.csv")
ipro_ans = np.loadtxt("./data/candy_production_ipro.csv")
w = 80
mp = pytsmp.STOMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "STOMP_compute_matrix_profile_data2: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
assert np.allclose(ipro, ipro_ans), "STOMP_compute_matrix_profile_data1: " \
"Should compute the index profile correctly."
def test_STOMP_compute_matrix_profile_data3(self):
t = np.loadtxt("./data/bitcoin_price.csv")
mpro_ans = np.loadtxt("./data/bitcoin_price_mpro.csv")
ipro_ans = np.loadtxt("./data/bitcoin_price_ipro.csv")
w = 100
mp = pytsmp.STOMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "STOMP_compute_matrix_profile_data3: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
assert np.allclose(ipro, ipro_ans), "STOMP_compute_matrix_profile_data3: " \
"Should compute the index profile correctly."
class TestSCRIMP:
def test_SCRIMP_is_anytime(self):
t = np.random.rand(1000)
mp = pytsmp.SCRIMP(t, window_size=10, s_size=1, verbose=True, pre_scrimp=1)
is_anytime = mp.is_anytime
assert is_anytime == True, "SCRIMP_is_anytime: SCRIMP should be an anytime algorithm."
def test_SCRIMP_get_profiles_check_length(self):
n = np.random.randint(100, 1000)
m = np.random.randint(100, 1000)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m))
mp = pytsmp.SCRIMP(t1, t2, window_size=w, verbose=False, pre_scrimp=0)
mpro, ipro = mp.get_profiles()
assert len(mpro) == n - w + 1, "SCRIMP_get_profile_check_length: Matrix profile should have correct length"
assert len(ipro) == n - w + 1, "SCRIMP_get_profile_check_length: Index profile should have correct length"
def test_SCRIMP_get_profiles_check_mutation(self):
t = np.random.rand(1000)
w = 10
mp = pytsmp.SCRIMP(t, window_size=w, verbose=False, pre_scrimp=0)
mpro, ipro = mp.get_profiles()
mpro[0] = -1
ipro[0] = -1
mpro2, ipro2 = mp.get_profiles()
assert mpro[0] != mpro2[0], "SCRIMP_get_profile_check_mutation: " \
"Get profile should return a copy of the matrix profile, not the internal one."
assert ipro[0] != ipro2[0], "SCRIMP_get_profile_check_mutation: " \
"Get profile should return a copy of the index profile, not the internal one."
def test_SCRIMP_compute_matrix_profile_sanity(self):
t = np.random.rand(1000)
w = 10
mp = pytsmp.SCRIMP(t, t, window_size=w, verbose=False, pre_scrimp=0)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, np.zeros(len(t) - w + 1), atol=1e-5), "SCRIMP_compute_matrix_profile_sanity: " \
"Should compute the matrix profile correctly in the trivial case."
assert np.array_equal(ipro, np.arange(len(t) - w + 1)), "SCRIMP_compute_matrix_profile_sanity: " \
"Should compute the index profile correctly in the trivial case."
def test_SCRIMP_compute_matrix_profile_same_random_data(self):
n = np.random.randint(100, 200) # anything larger will be too time-consuming
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.SCRIMP(t, window_size=w, verbose=False, pre_scrimp=0)
mpro, ipro = mp.get_profiles()
mp_naive, ip_naive = helpers.naive_matrix_profile(t, window_size=w)
assert np.allclose(mpro, mp_naive), "SCRIMP_compute_matrix_profile_same_random_data: " \
"Should compute the matrix profile correctly."
assert np.allclose(ipro, ip_naive), "SCRIMP_compute_matrix_profile_same_random_data: " \
"Should compute the index profile correctly."
def test_SCRIMP_compute_matrix_profile_random_data(self):
n = np.random.randint(100, 200)
m = np.random.randint(100, 200)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m) // 4)
mp = pytsmp.SCRIMP(t1, t2, window_size=w, verbose=False, pre_scrimp=0)
mpro, ipro = mp.get_profiles()
mp_naive, ip_naive = helpers.naive_matrix_profile(t1, t2, window_size=w)
assert np.allclose(mpro, mp_naive), "SCRIMP_compute_matrix_profile_random_data: " \
"Should compute the matrix profile correctly."
assert np.allclose(ipro, ip_naive), "SCRIMP_compute_matrix_profile_random_data: " \
"Should compute the index profile correctly."
def test_SCRIMP_compute_matrix_profile_data1(self):
t = np.loadtxt("./data/random_walk_data.csv")
mpro_ans = np.loadtxt("./data/random_walk_data_mpro.csv")
ipro_ans = np.loadtxt("./data/random_walk_data_ipro.csv")
w = 50
mp = pytsmp.SCRIMP(t, window_size=w, verbose=False, pre_scrimp=0)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "SCRIMP_compute_matrix_profile_data1: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
# assert np.allclose(ipro, ipro_ans), "SCRIMP_compute_matrix_profile_data1: " \
# "Should compute the index profile correctly."
def test_SCRIMP_compute_matrix_profile_data2(self):
t = np.loadtxt("./data/candy_production.csv")
mpro_ans = np.loadtxt("./data/candy_production_mpro.csv")
ipro_ans = np.loadtxt("./data/candy_production_ipro.csv")
w = 80
mp = pytsmp.SCRIMP(t, window_size=w, verbose=False, pre_scrimp=0)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "SCRIMP_compute_matrix_profile_data2: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
assert np.allclose(ipro, ipro_ans), "SCRIMP_compute_matrix_profile_data1: " \
"Should compute the index profile correctly."
def test_SCRIMP_compute_matrix_profile_data3(self):
t = np.loadtxt("./data/bitcoin_price.csv")
mpro_ans = np.loadtxt("./data/bitcoin_price_mpro.csv")
ipro_ans = np.loadtxt("./data/bitcoin_price_ipro.csv")
w = 100
mp = pytsmp.SCRIMP(t, window_size=w, verbose=False, pre_scrimp=0)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "SCRIMP_compute_matrix_profile_data3: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
assert np.allclose(ipro, ipro_ans), "SCRIMP_compute_matrix_profile_data3: " \
"Should compute the index profile correctly."
class TestPreSCRIMP:
def test_PreSCRIMP_is_anytime(self):
t = np.random.rand(1000)
mp = pytsmp.PreSCRIMP(t, window_size=10, s_size=1, verbose=True)
is_anytime = mp.is_anytime
assert is_anytime == True, "PreSCRIMP_is_anytime: PreSCRIMP should be an anytime algorithm."
def test_PreSCRIMP_init_incorrect_pre_scrimp1(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.PreSCRIMP(t, window_size=10, verbose=False, sample_rate=0)
assert str(excinfo.value) == "sample_rate must be positive."
def test_PreSCRIMP_init_incorrect_pre_scrimp2(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.PreSCRIMP(t, window_size=10, verbose=False, sample_rate=-2)
assert str(excinfo.value) == "sample_rate must be positive."
def test_PreSCRIMP_get_profiles_check_length(self):
n = np.random.randint(100, 1000)
m = np.random.randint(100, 1000)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m))
mp = pytsmp.PreSCRIMP(t1, t2, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert len(mpro) == n - w + 1, "PreSCRIMP_get_profile_check_length: Matrix profile should have correct length"
assert len(ipro) == n - w + 1, "PreSCRIMP_get_profile_check_length: Index profile should have correct length"
def test_PreSCRIMP_get_profiles_check_mutation(self):
t = np.random.rand(1000)
w = 10
mp = pytsmp.PreSCRIMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
mpro[0] = -1
ipro[0] = -1
mpro2, ipro2 = mp.get_profiles()
assert mpro[0] != mpro2[0], "PreSCRIMP_get_profile_check_mutation: " \
"Get profile should return a copy of the matrix profile, not the internal one."
assert ipro[0] != ipro2[0], "PreSCRIMP_get_profile_check_mutation: " \
"Get profile should return a copy of the index profile, not the internal one."
def test_PreSCRIMP_compute_matrix_profile_sanity1(self):
t = np.random.rand(1000)
w = 10
mp = pytsmp.PreSCRIMP(t, t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, np.zeros(len(t) - w + 1), atol=1e-5), "PreSCRIMP_compute_matrix_profile_sanity1: " \
"Should compute the matrix profile correctly in the trivial case."
assert np.array_equal(ipro, np.arange(len(t) - w + 1)), "PreSCRIMP_compute_matrix_profile_sanity1: " \
"Should compute the index profile correctly in the trivial case."
def test_PreSCRIMP_compute_matrix_profile_sanity2(self):
t = np.random.rand(1000)
w = 50
mpp = pytsmp.PreSCRIMP(t, t, window_size=w, verbose=False)
mprop, iprop = mpp.get_profiles()
mp = pytsmp.SCRIMP(t, t, window_size=w, verbose=False, pre_scrimp=0)
mpro, ipro = mp.get_profiles()
assert (mprop > mpro - 1e-5).all(), "PreSCRIMP_compute_matrix_profile_sanity2: PreSCRIMP should be an " \
"upper approximation for the actual matrix profile."
@pytest.mark.skip(reason="Randomized tests on approximate algorithms do not seem a correct thing to do.")
def test_PreSCRIMP_compute_matrix_profile_same_random_data(self):
n = np.random.randint(100, 200) # anything larger will be too time-consuming
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.PreSCRIMP(t, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
mp_naive, ip_naive = helpers.naive_matrix_profile(t, window_size=w)
assert np.allclose(mpro, mp_naive), "PreSCRIMP_compute_matrix_profile_same_random_data: " \
"Should compute the matrix profile correctly."
assert np.allclose(ipro, ip_naive), "PreSCRIMP_compute_matrix_profile_same_random_data: " \
"Should compute the index profile correctly."
@pytest.mark.skip(reason="Randomized tests on approximate algorithms do not seem a correct thing to do.")
def test_PreSCRIMP_compute_matrix_profile_random_data(self):
n = np.random.randint(100, 200)
m = np.random.randint(100, 200)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m) // 4)
mp = pytsmp.PreSCRIMP(t1, t2, window_size=w, verbose=False)
mpro, ipro = mp.get_profiles()
mp_naive, ip_naive = helpers.naive_matrix_profile(t1, t2, window_size=w)
assert np.allclose(mpro, mp_naive), "PreSCRIMP_compute_matrix_profile_random_data: " \
"Should compute the matrix profile correctly."
assert np.allclose(ipro, ip_naive), "PreSCRIMP_compute_matrix_profile_random_data: " \
"Should compute the index profile correctly."
class TestSCRIMP_PreSCRIMP:
def test_SCRIMP_init_incorrect_pre_scrimp(self):
with pytest.raises(ValueError) as excinfo:
t = np.random.rand(1000)
mp = pytsmp.SCRIMP(t, window_size=10, verbose=False, pre_scrimp=-1)
assert str(excinfo.value) == "pre_scrimp parameter must be non-negative."
def test_SCRIMP_init_pre_scrimp_zero(self):
t = np.random.rand(1000)
mp = pytsmp.SCRIMP(t, window_size=10, s_size=1, verbose=False, pre_scrimp=0)
assert getattr(mp, "_pre_scrimp_class", None) is None, "SCRIMP_init_pre_scrimp_zero: " \
"PreSCRIMP should not run if pre_scrimp = 0."
def test_SCRIMP_init_pre_scrimp_nonzero(self):
t = np.random.rand(1000)
mp = pytsmp.SCRIMP(t, window_size=10, s_size=1, verbose=False, pre_scrimp=1/2)
assert getattr(mp, "_pre_scrimp_class", None) is not None, "SCRIMP_init_pre_scrimp_nonzero: " \
"PreSCRIMP should run if pre_scrimp > 0."
def test_SCRIMP_PreSCRIMP_get_profiles_check_length(self):
n = np.random.randint(100, 1000)
m = np.random.randint(100, 1000)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m))
mp = pytsmp.SCRIMP(t1, t2, window_size=w, verbose=False, pre_scrimp=1/4)
mpro, ipro = mp.get_profiles()
assert len(mpro) == n - w + 1, "SCRIMP_get_profile_check_length: Matrix profile should have correct length"
assert len(ipro) == n - w + 1, "SCRIMP_get_profile_check_length: Index profile should have correct length"
def test_SCRIMP_PreSCRIMP_get_profiles_check_mutation(self):
t = np.random.rand(1000)
w = 10
mp = pytsmp.SCRIMP(t, window_size=w, verbose=False, pre_scrimp=1/4)
mpro, ipro = mp.get_profiles()
mpro[0] = -1
ipro[0] = -1
mpro2, ipro2 = mp.get_profiles()
assert mpro[0] != mpro2[0], "SCRIMP_get_profile_check_mutation: " \
"Get profile should return a copy of the matrix profile, not the internal one."
assert ipro[0] != ipro2[0], "SCRIMP_get_profile_check_mutation: " \
"Get profile should return a copy of the index profile, not the internal one."
def test_SCRIMP_PreSCRIMP_compute_matrix_profile_sanity(self):
t = np.random.rand(1000)
w = 10
mp = pytsmp.SCRIMP(t, t, window_size=w, verbose=False, pre_scrimp=1/4)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, np.zeros(len(t) - w + 1), atol=1e-5), "SCRIMP_compute_matrix_profile_sanity: " \
"Should compute the matrix profile correctly in the trivial case."
assert np.array_equal(ipro, np.arange(len(t) - w + 1)), "SCRIMP_compute_matrix_profile_sanity: " \
"Should compute the index profile correctly in the trivial case."
def test_SCRIMP_PreSCRIMP_compute_matrix_profile_same_random_data(self):
n = np.random.randint(100, 200) # anything larger will be too time-consuming
t = np.random.rand(n)
w = np.random.randint(10, n // 4)
mp = pytsmp.SCRIMP(t, window_size=w, verbose=False, pre_scrimp=1/4)
mpro, ipro = mp.get_profiles()
mp_naive, ip_naive = helpers.naive_matrix_profile(t, window_size=w)
assert np.allclose(mpro, mp_naive), "SCRIMP_compute_matrix_profile_same_random_data: " \
"Should compute the matrix profile correctly."
assert np.allclose(ipro, ip_naive), "SCRIMP_compute_matrix_profile_same_random_data: " \
"Should compute the index profile correctly."
def test_SCRIMP_PreSCRIMP_compute_matrix_profile_random_data(self):
n = np.random.randint(100, 200)
m = np.random.randint(100, 200)
t1 = np.random.rand(n)
t2 = np.random.rand(m)
w = np.random.randint(10, min(n, m) // 4)
mp = pytsmp.SCRIMP(t1, t2, window_size=w, verbose=False, pre_scrimp=1/4)
mpro, ipro = mp.get_profiles()
mp_naive, ip_naive = helpers.naive_matrix_profile(t1, t2, window_size=w)
assert np.allclose(mpro, mp_naive), "SCRIMP_compute_matrix_profile_random_data: " \
"Should compute the matrix profile correctly."
assert np.allclose(ipro, ip_naive), "SCRIMP_compute_matrix_profile_random_data: " \
"Should compute the index profile correctly."
def test_SCRIMP_PreSCRIMP_compute_matrix_profile_data1(self):
t = np.loadtxt("./data/random_walk_data.csv")
mpro_ans = np.loadtxt("./data/random_walk_data_mpro.csv")
ipro_ans = np.loadtxt("./data/random_walk_data_ipro.csv")
w = 50
mp = pytsmp.SCRIMP(t, window_size=w, verbose=False, pre_scrimp=1/4)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "SCRIMP_compute_matrix_profile_data1: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
# assert np.allclose(ipro, ipro_ans), "SCRIMP_compute_matrix_profile_data1: " \
# "Should compute the index profile correctly."
def test_SCRIMP_PreSCRIMP_compute_matrix_profile_data2(self):
t = np.loadtxt("./data/candy_production.csv")
mpro_ans = np.loadtxt("./data/candy_production_mpro.csv")
ipro_ans = np.loadtxt("./data/candy_production_ipro.csv")
w = 80
mp = pytsmp.SCRIMP(t, window_size=w, verbose=False, pre_scrimp=1/4)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "SCRIMP_compute_matrix_profile_data2: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
assert np.allclose(ipro, ipro_ans), "SCRIMP_compute_matrix_profile_data1: " \
"Should compute the index profile correctly."
def test_SCRIMP_PreSCRIMP_compute_matrix_profile_data3(self):
t = np.loadtxt("./data/bitcoin_price.csv")
mpro_ans = np.loadtxt("./data/bitcoin_price_mpro.csv")
ipro_ans = np.loadtxt("./data/bitcoin_price_ipro.csv")
w = 100
mp = pytsmp.SCRIMP(t, window_size=w, verbose=False, pre_scrimp=1/4)
mpro, ipro = mp.get_profiles()
assert np.allclose(mpro, mpro_ans), "SCRIMP_compute_matrix_profile_data3: " \
"Should compute the matrix profile correctly. " \
"Max error is {}".format(np.max(np.abs(mpro - mpro_ans)))
assert np.allclose(ipro, ipro_ans), "SCRIMP_compute_matrix_profile_data3: " \
"Should compute the index profile correctly."
|
[
"numpy.abs",
"numpy.allclose",
"tests.helpers.naive_matrix_profile",
"pytsmp.pytsmp.STAMP",
"pytsmp.pytsmp.SCRIMP",
"pytsmp.pytsmp.MatrixProfile",
"pytest.raises",
"numpy.random.randint",
"pytsmp.pytsmp.PreSCRIMP",
"numpy.loadtxt",
"numpy.tile",
"numpy.random.rand",
"pytest.mark.skip",
"pytsmp.pytsmp.STOMP"
] |
[((42167, 42281), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Randomized tests on approximate algorithms do not seem a correct thing to do."""'}), "(reason=\n 'Randomized tests on approximate algorithms do not seem a correct thing to do.'\n )\n", (42183, 42281), False, 'import pytest\n'), ((43065, 43179), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Randomized tests on approximate algorithms do not seem a correct thing to do."""'}), "(reason=\n 'Randomized tests on approximate algorithms do not seem a correct thing to do.'\n )\n", (43081, 43179), False, 'import pytest\n'), ((2145, 2165), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (2159, 2165), True, 'import numpy as np\n'), ((2179, 2234), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 's_size': '(1)', 'verbose': '(True)'}), '(t, window_size=10, s_size=1, verbose=True)\n', (2191, 2234), False, 'from pytsmp import pytsmp\n'), ((2447, 2466), 'numpy.random.rand', 'np.random.rand', (['(100)'], {}), '(100)\n', (2461, 2466), True, 'import numpy as np\n'), ((2480, 2499), 'numpy.random.rand', 'np.random.rand', (['(100)'], {}), '(100)\n', (2494, 2499), True, 'import numpy as np\n'), ((2528, 2596), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2'], {'window_size': 'w', 'exclusion_zone': '(0)', 'verbose': '(False)'}), '(t1, t2, window_size=w, exclusion_zone=0, verbose=False)\n', (2540, 2596), False, 'from pytsmp import pytsmp\n'), ((2942, 2970), 'numpy.random.randint', 'np.random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (2959, 2970), True, 'import numpy as np\n'), ((2983, 3011), 'numpy.random.randint', 'np.random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (3000, 3011), True, 'import numpy as np\n'), ((3025, 3042), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (3039, 3042), True, 'import numpy as np\n'), ((3056, 3073), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (3070, 3073), True, 'import numpy as np\n'), ((3132, 3182), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (3144, 3182), False, 'from pytsmp import pytsmp\n'), ((3518, 3538), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (3532, 3538), True, 'import numpy as np\n'), ((3567, 3612), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (3579, 3612), False, 'from pytsmp import pytsmp\n'), ((4185, 4205), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (4199, 4205), True, 'import numpy as np\n'), ((4234, 4282), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t', 't'], {'window_size': 'w', 'verbose': '(False)'}), '(t, t, window_size=w, verbose=False)\n', (4246, 4282), False, 'from pytsmp import pytsmp\n'), ((4865, 4892), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (4882, 4892), True, 'import numpy as np\n'), ((4951, 4968), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (4965, 4968), True, 'import numpy as np\n'), ((4981, 5010), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (4998, 5010), True, 'import numpy as np\n'), ((5024, 5069), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (5036, 5069), False, 'from pytsmp import pytsmp\n'), ((5138, 5184), 'tests.helpers.naive_matrix_profile', 'helpers.naive_matrix_profile', (['t'], {'window_size': 'w'}), '(t, window_size=w)\n', (5166, 5184), False, 'from tests import helpers\n'), ((5200, 5227), 'numpy.allclose', 'np.allclose', (['mpro', 'mp_naive'], {}), '(mpro, mp_naive)\n', (5211, 5227), True, 'import numpy as np\n'), ((5387, 5414), 'numpy.allclose', 'np.allclose', (['ipro', 'ip_naive'], {}), '(ipro, ip_naive)\n', (5398, 5414), True, 'import numpy as np\n'), ((5632, 5659), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (5649, 5659), True, 'import numpy as np\n'), ((5672, 5699), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (5689, 5699), True, 'import numpy as np\n'), ((5713, 5730), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (5727, 5730), True, 'import numpy as np\n'), ((5744, 5761), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (5758, 5761), True, 'import numpy as np\n'), ((5825, 5875), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (5837, 5875), False, 'from pytsmp import pytsmp\n'), ((5944, 5995), 'tests.helpers.naive_matrix_profile', 'helpers.naive_matrix_profile', (['t1', 't2'], {'window_size': 'w'}), '(t1, t2, window_size=w)\n', (5972, 5995), False, 'from tests import helpers\n'), ((6011, 6038), 'numpy.allclose', 'np.allclose', (['mpro', 'mp_naive'], {}), '(mpro, mp_naive)\n', (6022, 6038), True, 'import numpy as np\n'), ((6193, 6220), 'numpy.allclose', 'np.allclose', (['ipro', 'ip_naive'], {}), '(ipro, ip_naive)\n', (6204, 6220), True, 'import numpy as np\n'), ((6427, 6468), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data.csv"""'], {}), "('./data/random_walk_data.csv')\n", (6437, 6468), True, 'import numpy as np\n'), ((6488, 6534), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data_mpro.csv"""'], {}), "('./data/random_walk_data_mpro.csv')\n", (6498, 6534), True, 'import numpy as np\n'), ((6554, 6600), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data_ipro.csv"""'], {}), "('./data/random_walk_data_ipro.csv')\n", (6564, 6600), True, 'import numpy as np\n'), ((6629, 6674), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (6641, 6674), False, 'from pytsmp import pytsmp\n'), ((6729, 6756), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (6740, 6756), True, 'import numpy as np\n'), ((7264, 7305), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production.csv"""'], {}), "('./data/candy_production.csv')\n", (7274, 7305), True, 'import numpy as np\n'), ((7325, 7371), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production_mpro.csv"""'], {}), "('./data/candy_production_mpro.csv')\n", (7335, 7371), True, 'import numpy as np\n'), ((7391, 7437), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production_ipro.csv"""'], {}), "('./data/candy_production_ipro.csv')\n", (7401, 7437), True, 'import numpy as np\n'), ((7466, 7511), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (7478, 7511), False, 'from pytsmp import pytsmp\n'), ((7566, 7593), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (7577, 7593), True, 'import numpy as np\n'), ((7847, 7874), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro_ans'], {}), '(ipro, ipro_ans)\n', (7858, 7874), True, 'import numpy as np\n'), ((8075, 8113), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price.csv"""'], {}), "('./data/bitcoin_price.csv')\n", (8085, 8113), True, 'import numpy as np\n'), ((8133, 8176), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price_mpro.csv"""'], {}), "('./data/bitcoin_price_mpro.csv')\n", (8143, 8176), True, 'import numpy as np\n'), ((8196, 8239), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price_ipro.csv"""'], {}), "('./data/bitcoin_price_ipro.csv')\n", (8206, 8239), True, 'import numpy as np\n'), ((8269, 8314), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (8281, 8314), False, 'from pytsmp import pytsmp\n'), ((8369, 8396), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (8380, 8396), True, 'import numpy as np\n'), ((8650, 8677), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro_ans'], {}), '(ipro, ipro_ans)\n', (8661, 8677), True, 'import numpy as np\n'), ((8991, 9019), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (9008, 9019), True, 'import numpy as np\n'), ((9032, 9060), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (9049, 9060), True, 'import numpy as np\n'), ((9074, 9091), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (9088, 9091), True, 'import numpy as np\n'), ((9105, 9122), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (9119, 9122), True, 'import numpy as np\n'), ((9186, 9241), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1[:-1]', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1[:-1], t2, window_size=w, verbose=False)\n', (9198, 9241), False, 'from pytsmp import pytsmp\n'), ((9325, 9375), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (9337, 9375), False, 'from pytsmp import pytsmp\n'), ((9433, 9457), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro2'], {}), '(mpro, mpro2)\n', (9444, 9457), True, 'import numpy as np\n'), ((9714, 9738), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro2'], {}), '(ipro, ipro2)\n', (9725, 9738), True, 'import numpy as np\n'), ((9945, 9973), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (9962, 9973), True, 'import numpy as np\n'), ((9986, 10014), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (10003, 10014), True, 'import numpy as np\n'), ((10028, 10045), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (10042, 10045), True, 'import numpy as np\n'), ((10059, 10076), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (10073, 10076), True, 'import numpy as np\n'), ((10143, 10167), 'numpy.random.randint', 'np.random.randint', (['(5)', '(50)'], {}), '(5, 50)\n', (10160, 10167), True, 'import numpy as np\n'), ((10181, 10240), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1[:-times]', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1[:-times], t2, window_size=w, verbose=False)\n', (10193, 10240), False, 'from pytsmp import pytsmp\n'), ((10365, 10415), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (10377, 10415), False, 'from pytsmp import pytsmp\n'), ((10473, 10497), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro2'], {}), '(mpro, mpro2)\n', (10484, 10497), True, 'import numpy as np\n'), ((10778, 10802), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro2'], {}), '(ipro, ipro2)\n', (10789, 10802), True, 'import numpy as np\n'), ((11015, 11043), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (11032, 11043), True, 'import numpy as np\n'), ((11056, 11084), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (11073, 11084), True, 'import numpy as np\n'), ((11098, 11115), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (11112, 11115), True, 'import numpy as np\n'), ((11129, 11146), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (11143, 11146), True, 'import numpy as np\n'), ((11210, 11265), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2[:-1]'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2[:-1], window_size=w, verbose=False)\n', (11222, 11265), False, 'from pytsmp import pytsmp\n'), ((11349, 11399), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (11361, 11399), False, 'from pytsmp import pytsmp\n'), ((11457, 11481), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro2'], {}), '(mpro, mpro2)\n', (11468, 11481), True, 'import numpy as np\n'), ((11738, 11762), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro2'], {}), '(ipro, ipro2)\n', (11749, 11762), True, 'import numpy as np\n'), ((11969, 11997), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (11986, 11997), True, 'import numpy as np\n'), ((12010, 12038), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (12027, 12038), True, 'import numpy as np\n'), ((12052, 12069), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (12066, 12069), True, 'import numpy as np\n'), ((12083, 12100), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (12097, 12100), True, 'import numpy as np\n'), ((12167, 12191), 'numpy.random.randint', 'np.random.randint', (['(5)', '(50)'], {}), '(5, 50)\n', (12184, 12191), True, 'import numpy as np\n'), ((12205, 12264), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2[:-times]'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2[:-times], window_size=w, verbose=False)\n', (12217, 12264), False, 'from pytsmp import pytsmp\n'), ((12389, 12439), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (12401, 12439), False, 'from pytsmp import pytsmp\n'), ((12497, 12521), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro2'], {}), '(mpro, mpro2)\n', (12508, 12521), True, 'import numpy as np\n'), ((12802, 12826), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro2'], {}), '(ipro, ipro2)\n', (12813, 12826), True, 'import numpy as np\n'), ((13046, 13074), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (13063, 13074), True, 'import numpy as np\n'), ((13087, 13115), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (13104, 13115), True, 'import numpy as np\n'), ((13129, 13146), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (13143, 13146), True, 'import numpy as np\n'), ((13160, 13177), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (13174, 13177), True, 'import numpy as np\n'), ((13244, 13268), 'numpy.random.randint', 'np.random.randint', (['(5)', '(25)'], {}), '(5, 25)\n', (13261, 13268), True, 'import numpy as np\n'), ((13282, 13350), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1[:-times]', 't2[:-times]'], {'window_size': 'w', 'verbose': '(False)'}), '(t1[:-times], t2[:-times], window_size=w, verbose=False)\n', (13294, 13350), False, 'from pytsmp import pytsmp\n'), ((13508, 13558), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (13520, 13558), False, 'from pytsmp import pytsmp\n'), ((13616, 13640), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro2'], {}), '(mpro, mpro2)\n', (13627, 13640), True, 'import numpy as np\n'), ((13980, 14004), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro2'], {}), '(ipro, ipro2)\n', (13991, 14004), True, 'import numpy as np\n'), ((14283, 14311), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (14300, 14311), True, 'import numpy as np\n'), ((14324, 14341), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (14338, 14341), True, 'import numpy as np\n'), ((14354, 14383), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (14371, 14383), True, 'import numpy as np\n'), ((14397, 14447), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t[:-1]'], {'window_size': 'w', 'verbose': '(False)'}), '(t[:-1], window_size=w, verbose=False)\n', (14409, 14447), False, 'from pytsmp import pytsmp\n'), ((14530, 14575), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (14542, 14575), False, 'from pytsmp import pytsmp\n'), ((14633, 14657), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro2'], {}), '(mpro, mpro2)\n', (14644, 14657), True, 'import numpy as np\n'), ((14913, 14937), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro2'], {}), '(ipro, ipro2)\n', (14924, 14937), True, 'import numpy as np\n'), ((15141, 15169), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (15158, 15169), True, 'import numpy as np\n'), ((15182, 15199), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (15196, 15199), True, 'import numpy as np\n'), ((15212, 15241), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (15229, 15241), True, 'import numpy as np\n'), ((15258, 15282), 'numpy.random.randint', 'np.random.randint', (['(5)', '(50)'], {}), '(5, 50)\n', (15275, 15282), True, 'import numpy as np\n'), ((15296, 15350), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t[:-times]'], {'window_size': 'w', 'verbose': '(False)'}), '(t[:-times], window_size=w, verbose=False)\n', (15308, 15350), False, 'from pytsmp import pytsmp\n'), ((15474, 15519), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (15486, 15519), False, 'from pytsmp import pytsmp\n'), ((15577, 15601), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro2'], {}), '(mpro, mpro2)\n', (15588, 15601), True, 'import numpy as np\n'), ((15881, 15905), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro2'], {}), '(ipro, ipro2)\n', (15892, 15905), True, 'import numpy as np\n'), ((16124, 16152), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (16141, 16152), True, 'import numpy as np\n'), ((16165, 16182), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (16179, 16182), True, 'import numpy as np\n'), ((16195, 16224), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (16212, 16224), True, 'import numpy as np\n'), ((16238, 16288), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t[:-1]'], {'window_size': 'w', 'verbose': '(False)'}), '(t[:-1], window_size=w, verbose=False)\n', (16250, 16288), False, 'from pytsmp import pytsmp\n'), ((16371, 16416), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (16383, 16416), False, 'from pytsmp import pytsmp\n'), ((16474, 16498), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro2'], {}), '(mpro, mpro2)\n', (16485, 16498), True, 'import numpy as np\n'), ((16754, 16778), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro2'], {}), '(ipro, ipro2)\n', (16765, 16778), True, 'import numpy as np\n'), ((16982, 17010), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (16999, 17010), True, 'import numpy as np\n'), ((17023, 17040), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (17037, 17040), True, 'import numpy as np\n'), ((17053, 17082), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (17070, 17082), True, 'import numpy as np\n'), ((17099, 17123), 'numpy.random.randint', 'np.random.randint', (['(5)', '(50)'], {}), '(5, 50)\n', (17116, 17123), True, 'import numpy as np\n'), ((17137, 17191), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t[:-times]'], {'window_size': 'w', 'verbose': '(False)'}), '(t[:-times], window_size=w, verbose=False)\n', (17149, 17191), False, 'from pytsmp import pytsmp\n'), ((17315, 17360), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (17327, 17360), False, 'from pytsmp import pytsmp\n'), ((17418, 17442), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro2'], {}), '(mpro, mpro2)\n', (17429, 17442), True, 'import numpy as np\n'), ((17722, 17746), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro2'], {}), '(ipro, ipro2)\n', (17733, 17746), True, 'import numpy as np\n'), ((17972, 18000), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (17989, 18000), True, 'import numpy as np\n'), ((18013, 18030), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (18027, 18030), True, 'import numpy as np\n'), ((18043, 18072), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (18060, 18072), True, 'import numpy as np\n'), ((18089, 18113), 'numpy.random.randint', 'np.random.randint', (['(5)', '(25)'], {}), '(5, 25)\n', (18106, 18113), True, 'import numpy as np\n'), ((18127, 18181), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t[:-times]'], {'window_size': 'w', 'verbose': '(False)'}), '(t[:-times], window_size=w, verbose=False)\n', (18139, 18181), False, 'from pytsmp import pytsmp\n'), ((18390, 18435), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (18402, 18435), False, 'from pytsmp import pytsmp\n'), ((18493, 18517), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro2'], {}), '(mpro, mpro2)\n', (18504, 18517), True, 'import numpy as np\n'), ((18856, 18880), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro2'], {}), '(ipro, ipro2)\n', (18867, 18880), True, 'import numpy as np\n'), ((20503, 20531), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (20520, 20531), True, 'import numpy as np\n'), ((20544, 20561), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (20558, 20561), True, 'import numpy as np\n'), ((20574, 20603), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (20591, 20603), True, 'import numpy as np\n'), ((20617, 20662), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (20629, 20662), False, 'from pytsmp import pytsmp\n'), ((21213, 21241), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (21230, 21241), True, 'import numpy as np\n'), ((21254, 21271), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (21268, 21271), True, 'import numpy as np\n'), ((21284, 21313), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (21301, 21313), True, 'import numpy as np\n'), ((21327, 21372), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (21339, 21372), False, 'from pytsmp import pytsmp\n'), ((21996, 22024), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (22013, 22024), True, 'import numpy as np\n'), ((22037, 22054), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (22051, 22054), True, 'import numpy as np\n'), ((22067, 22096), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 5)'], {}), '(10, n // 5)\n', (22084, 22096), True, 'import numpy as np\n'), ((22135, 22180), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (22147, 22180), False, 'from pytsmp import pytsmp\n'), ((22827, 22854), 'numpy.random.randint', 'np.random.randint', (['(200)', '(500)'], {}), '(200, 500)\n', (22844, 22854), True, 'import numpy as np\n'), ((22867, 22884), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (22881, 22884), True, 'import numpy as np\n'), ((22897, 22910), 'numpy.tile', 'np.tile', (['t', '(4)'], {}), '(t, 4)\n', (22904, 22910), True, 'import numpy as np\n'), ((22923, 22952), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (22940, 22952), True, 'import numpy as np\n'), ((23024, 23069), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (23036, 23069), False, 'from pytsmp import pytsmp\n'), ((24761, 24789), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (24778, 24789), True, 'import numpy as np\n'), ((24802, 24819), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (24816, 24819), True, 'import numpy as np\n'), ((24832, 24861), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (24849, 24861), True, 'import numpy as np\n'), ((24875, 24920), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (24887, 24920), False, 'from pytsmp import pytsmp\n'), ((25475, 25503), 'numpy.random.randint', 'np.random.randint', (['(200)', '(1000)'], {}), '(200, 1000)\n', (25492, 25503), True, 'import numpy as np\n'), ((25516, 25533), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (25530, 25533), True, 'import numpy as np\n'), ((25546, 25575), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (25563, 25575), True, 'import numpy as np\n'), ((25589, 25634), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (25601, 25634), False, 'from pytsmp import pytsmp\n'), ((26257, 26277), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (26271, 26277), True, 'import numpy as np\n'), ((26291, 26346), 'pytsmp.pytsmp.STOMP', 'pytsmp.STOMP', (['t'], {'window_size': '(10)', 's_size': '(1)', 'verbose': '(True)'}), '(t, window_size=10, s_size=1, verbose=True)\n', (26303, 26346), False, 'from pytsmp import pytsmp\n'), ((26545, 26573), 'numpy.random.randint', 'np.random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (26562, 26573), True, 'import numpy as np\n'), ((26586, 26614), 'numpy.random.randint', 'np.random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (26603, 26614), True, 'import numpy as np\n'), ((26628, 26645), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (26642, 26645), True, 'import numpy as np\n'), ((26659, 26676), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (26673, 26676), True, 'import numpy as np\n'), ((26735, 26785), 'pytsmp.pytsmp.STOMP', 'pytsmp.STOMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (26747, 26785), False, 'from pytsmp import pytsmp\n'), ((27121, 27141), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (27135, 27141), True, 'import numpy as np\n'), ((27170, 27215), 'pytsmp.pytsmp.STOMP', 'pytsmp.STOMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (27182, 27215), False, 'from pytsmp import pytsmp\n'), ((27788, 27808), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (27802, 27808), True, 'import numpy as np\n'), ((27837, 27885), 'pytsmp.pytsmp.STOMP', 'pytsmp.STOMP', (['t', 't'], {'window_size': 'w', 'verbose': '(False)'}), '(t, t, window_size=w, verbose=False)\n', (27849, 27885), False, 'from pytsmp import pytsmp\n'), ((28468, 28495), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (28485, 28495), True, 'import numpy as np\n'), ((28554, 28571), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (28568, 28571), True, 'import numpy as np\n'), ((28584, 28613), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (28601, 28613), True, 'import numpy as np\n'), ((28627, 28672), 'pytsmp.pytsmp.STOMP', 'pytsmp.STOMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (28639, 28672), False, 'from pytsmp import pytsmp\n'), ((28741, 28787), 'tests.helpers.naive_matrix_profile', 'helpers.naive_matrix_profile', (['t'], {'window_size': 'w'}), '(t, window_size=w)\n', (28769, 28787), False, 'from tests import helpers\n'), ((28803, 28830), 'numpy.allclose', 'np.allclose', (['mpro', 'mp_naive'], {}), '(mpro, mp_naive)\n', (28814, 28830), True, 'import numpy as np\n'), ((28990, 29017), 'numpy.allclose', 'np.allclose', (['ipro', 'ip_naive'], {}), '(ipro, ip_naive)\n', (29001, 29017), True, 'import numpy as np\n'), ((29235, 29262), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (29252, 29262), True, 'import numpy as np\n'), ((29275, 29302), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (29292, 29302), True, 'import numpy as np\n'), ((29316, 29333), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (29330, 29333), True, 'import numpy as np\n'), ((29347, 29364), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (29361, 29364), True, 'import numpy as np\n'), ((29428, 29478), 'pytsmp.pytsmp.STOMP', 'pytsmp.STOMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (29440, 29478), False, 'from pytsmp import pytsmp\n'), ((29547, 29598), 'tests.helpers.naive_matrix_profile', 'helpers.naive_matrix_profile', (['t1', 't2'], {'window_size': 'w'}), '(t1, t2, window_size=w)\n', (29575, 29598), False, 'from tests import helpers\n'), ((29614, 29641), 'numpy.allclose', 'np.allclose', (['mpro', 'mp_naive'], {}), '(mpro, mp_naive)\n', (29625, 29641), True, 'import numpy as np\n'), ((29796, 29823), 'numpy.allclose', 'np.allclose', (['ipro', 'ip_naive'], {}), '(ipro, ip_naive)\n', (29807, 29823), True, 'import numpy as np\n'), ((30030, 30071), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data.csv"""'], {}), "('./data/random_walk_data.csv')\n", (30040, 30071), True, 'import numpy as np\n'), ((30091, 30137), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data_mpro.csv"""'], {}), "('./data/random_walk_data_mpro.csv')\n", (30101, 30137), True, 'import numpy as np\n'), ((30157, 30203), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data_ipro.csv"""'], {}), "('./data/random_walk_data_ipro.csv')\n", (30167, 30203), True, 'import numpy as np\n'), ((30232, 30277), 'pytsmp.pytsmp.STOMP', 'pytsmp.STOMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (30244, 30277), False, 'from pytsmp import pytsmp\n'), ((30332, 30359), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (30343, 30359), True, 'import numpy as np\n'), ((30845, 30886), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production.csv"""'], {}), "('./data/candy_production.csv')\n", (30855, 30886), True, 'import numpy as np\n'), ((30906, 30952), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production_mpro.csv"""'], {}), "('./data/candy_production_mpro.csv')\n", (30916, 30952), True, 'import numpy as np\n'), ((30972, 31018), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production_ipro.csv"""'], {}), "('./data/candy_production_ipro.csv')\n", (30982, 31018), True, 'import numpy as np\n'), ((31047, 31092), 'pytsmp.pytsmp.STOMP', 'pytsmp.STOMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (31059, 31092), False, 'from pytsmp import pytsmp\n'), ((31147, 31174), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (31158, 31174), True, 'import numpy as np\n'), ((31428, 31455), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro_ans'], {}), '(ipro, ipro_ans)\n', (31439, 31455), True, 'import numpy as np\n'), ((31656, 31694), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price.csv"""'], {}), "('./data/bitcoin_price.csv')\n", (31666, 31694), True, 'import numpy as np\n'), ((31714, 31757), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price_mpro.csv"""'], {}), "('./data/bitcoin_price_mpro.csv')\n", (31724, 31757), True, 'import numpy as np\n'), ((31777, 31820), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price_ipro.csv"""'], {}), "('./data/bitcoin_price_ipro.csv')\n", (31787, 31820), True, 'import numpy as np\n'), ((31850, 31895), 'pytsmp.pytsmp.STOMP', 'pytsmp.STOMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (31862, 31895), False, 'from pytsmp import pytsmp\n'), ((31950, 31977), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (31961, 31977), True, 'import numpy as np\n'), ((32231, 32258), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro_ans'], {}), '(ipro, ipro_ans)\n', (32242, 32258), True, 'import numpy as np\n'), ((32461, 32481), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (32475, 32481), True, 'import numpy as np\n'), ((32495, 32565), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': '(10)', 's_size': '(1)', 'verbose': '(True)', 'pre_scrimp': '(1)'}), '(t, window_size=10, s_size=1, verbose=True, pre_scrimp=1)\n', (32508, 32565), False, 'from pytsmp import pytsmp\n'), ((32762, 32790), 'numpy.random.randint', 'np.random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (32779, 32790), True, 'import numpy as np\n'), ((32803, 32831), 'numpy.random.randint', 'np.random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (32820, 32831), True, 'import numpy as np\n'), ((32845, 32862), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (32859, 32862), True, 'import numpy as np\n'), ((32876, 32893), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (32890, 32893), True, 'import numpy as np\n'), ((32952, 33017), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(0)'}), '(t1, t2, window_size=w, verbose=False, pre_scrimp=0)\n', (32965, 33017), False, 'from pytsmp import pytsmp\n'), ((33356, 33376), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (33370, 33376), True, 'import numpy as np\n'), ((33405, 33465), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(0)'}), '(t, window_size=w, verbose=False, pre_scrimp=0)\n', (33418, 33465), False, 'from pytsmp import pytsmp\n'), ((34041, 34061), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (34055, 34061), True, 'import numpy as np\n'), ((34090, 34153), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t', 't'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(0)'}), '(t, t, window_size=w, verbose=False, pre_scrimp=0)\n', (34103, 34153), False, 'from pytsmp import pytsmp\n'), ((34739, 34766), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (34756, 34766), True, 'import numpy as np\n'), ((34825, 34842), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (34839, 34842), True, 'import numpy as np\n'), ((34855, 34884), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (34872, 34884), True, 'import numpy as np\n'), ((34898, 34958), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(0)'}), '(t, window_size=w, verbose=False, pre_scrimp=0)\n', (34911, 34958), False, 'from pytsmp import pytsmp\n'), ((35027, 35073), 'tests.helpers.naive_matrix_profile', 'helpers.naive_matrix_profile', (['t'], {'window_size': 'w'}), '(t, window_size=w)\n', (35055, 35073), False, 'from tests import helpers\n'), ((35089, 35116), 'numpy.allclose', 'np.allclose', (['mpro', 'mp_naive'], {}), '(mpro, mp_naive)\n', (35100, 35116), True, 'import numpy as np\n'), ((35277, 35304), 'numpy.allclose', 'np.allclose', (['ipro', 'ip_naive'], {}), '(ipro, ip_naive)\n', (35288, 35304), True, 'import numpy as np\n'), ((35524, 35551), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (35541, 35551), True, 'import numpy as np\n'), ((35564, 35591), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (35581, 35591), True, 'import numpy as np\n'), ((35605, 35622), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (35619, 35622), True, 'import numpy as np\n'), ((35636, 35653), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (35650, 35653), True, 'import numpy as np\n'), ((35717, 35782), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(0)'}), '(t1, t2, window_size=w, verbose=False, pre_scrimp=0)\n', (35730, 35782), False, 'from pytsmp import pytsmp\n'), ((35851, 35902), 'tests.helpers.naive_matrix_profile', 'helpers.naive_matrix_profile', (['t1', 't2'], {'window_size': 'w'}), '(t1, t2, window_size=w)\n', (35879, 35902), False, 'from tests import helpers\n'), ((35918, 35945), 'numpy.allclose', 'np.allclose', (['mpro', 'mp_naive'], {}), '(mpro, mp_naive)\n', (35929, 35945), True, 'import numpy as np\n'), ((36101, 36128), 'numpy.allclose', 'np.allclose', (['ipro', 'ip_naive'], {}), '(ipro, ip_naive)\n', (36112, 36128), True, 'import numpy as np\n'), ((36337, 36378), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data.csv"""'], {}), "('./data/random_walk_data.csv')\n", (36347, 36378), True, 'import numpy as np\n'), ((36398, 36444), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data_mpro.csv"""'], {}), "('./data/random_walk_data_mpro.csv')\n", (36408, 36444), True, 'import numpy as np\n'), ((36464, 36510), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data_ipro.csv"""'], {}), "('./data/random_walk_data_ipro.csv')\n", (36474, 36510), True, 'import numpy as np\n'), ((36539, 36599), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(0)'}), '(t, window_size=w, verbose=False, pre_scrimp=0)\n', (36552, 36599), False, 'from pytsmp import pytsmp\n'), ((36654, 36681), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (36665, 36681), True, 'import numpy as np\n'), ((37170, 37211), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production.csv"""'], {}), "('./data/candy_production.csv')\n", (37180, 37211), True, 'import numpy as np\n'), ((37231, 37277), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production_mpro.csv"""'], {}), "('./data/candy_production_mpro.csv')\n", (37241, 37277), True, 'import numpy as np\n'), ((37297, 37343), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production_ipro.csv"""'], {}), "('./data/candy_production_ipro.csv')\n", (37307, 37343), True, 'import numpy as np\n'), ((37372, 37432), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(0)'}), '(t, window_size=w, verbose=False, pre_scrimp=0)\n', (37385, 37432), False, 'from pytsmp import pytsmp\n'), ((37487, 37514), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (37498, 37514), True, 'import numpy as np\n'), ((37769, 37796), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro_ans'], {}), '(ipro, ipro_ans)\n', (37780, 37796), True, 'import numpy as np\n'), ((37999, 38037), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price.csv"""'], {}), "('./data/bitcoin_price.csv')\n", (38009, 38037), True, 'import numpy as np\n'), ((38057, 38100), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price_mpro.csv"""'], {}), "('./data/bitcoin_price_mpro.csv')\n", (38067, 38100), True, 'import numpy as np\n'), ((38120, 38163), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price_ipro.csv"""'], {}), "('./data/bitcoin_price_ipro.csv')\n", (38130, 38163), True, 'import numpy as np\n'), ((38193, 38253), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(0)'}), '(t, window_size=w, verbose=False, pre_scrimp=0)\n', (38206, 38253), False, 'from pytsmp import pytsmp\n'), ((38308, 38335), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (38319, 38335), True, 'import numpy as np\n'), ((38590, 38617), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro_ans'], {}), '(ipro, ipro_ans)\n', (38601, 38617), True, 'import numpy as np\n'), ((38827, 38847), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (38841, 38847), True, 'import numpy as np\n'), ((38861, 38920), 'pytsmp.pytsmp.PreSCRIMP', 'pytsmp.PreSCRIMP', (['t'], {'window_size': '(10)', 's_size': '(1)', 'verbose': '(True)'}), '(t, window_size=10, s_size=1, verbose=True)\n', (38877, 38920), False, 'from pytsmp import pytsmp\n'), ((39731, 39759), 'numpy.random.randint', 'np.random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (39748, 39759), True, 'import numpy as np\n'), ((39772, 39800), 'numpy.random.randint', 'np.random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (39789, 39800), True, 'import numpy as np\n'), ((39814, 39831), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (39828, 39831), True, 'import numpy as np\n'), ((39845, 39862), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (39859, 39862), True, 'import numpy as np\n'), ((39921, 39975), 'pytsmp.pytsmp.PreSCRIMP', 'pytsmp.PreSCRIMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (39937, 39975), False, 'from pytsmp import pytsmp\n'), ((40323, 40343), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (40337, 40343), True, 'import numpy as np\n'), ((40372, 40421), 'pytsmp.pytsmp.PreSCRIMP', 'pytsmp.PreSCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (40388, 40421), False, 'from pytsmp import pytsmp\n'), ((41007, 41027), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (41021, 41027), True, 'import numpy as np\n'), ((41056, 41108), 'pytsmp.pytsmp.PreSCRIMP', 'pytsmp.PreSCRIMP', (['t', 't'], {'window_size': 'w', 'verbose': '(False)'}), '(t, t, window_size=w, verbose=False)\n', (41072, 41108), False, 'from pytsmp import pytsmp\n'), ((41696, 41716), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (41710, 41716), True, 'import numpy as np\n'), ((41746, 41798), 'pytsmp.pytsmp.PreSCRIMP', 'pytsmp.PreSCRIMP', (['t', 't'], {'window_size': 'w', 'verbose': '(False)'}), '(t, t, window_size=w, verbose=False)\n', (41762, 41798), False, 'from pytsmp import pytsmp\n'), ((41854, 41917), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t', 't'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(0)'}), '(t, t, window_size=w, verbose=False, pre_scrimp=0)\n', (41867, 41917), False, 'from pytsmp import pytsmp\n'), ((42354, 42381), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (42371, 42381), True, 'import numpy as np\n'), ((42440, 42457), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (42454, 42457), True, 'import numpy as np\n'), ((42470, 42499), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (42487, 42499), True, 'import numpy as np\n'), ((42513, 42562), 'pytsmp.pytsmp.PreSCRIMP', 'pytsmp.PreSCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)'}), '(t, window_size=w, verbose=False)\n', (42529, 42562), False, 'from pytsmp import pytsmp\n'), ((42631, 42677), 'tests.helpers.naive_matrix_profile', 'helpers.naive_matrix_profile', (['t'], {'window_size': 'w'}), '(t, window_size=w)\n', (42659, 42677), False, 'from tests import helpers\n'), ((42693, 42720), 'numpy.allclose', 'np.allclose', (['mpro', 'mp_naive'], {}), '(mpro, mp_naive)\n', (42704, 42720), True, 'import numpy as np\n'), ((42884, 42911), 'numpy.allclose', 'np.allclose', (['ipro', 'ip_naive'], {}), '(ipro, ip_naive)\n', (42895, 42911), True, 'import numpy as np\n'), ((43247, 43274), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (43264, 43274), True, 'import numpy as np\n'), ((43287, 43314), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (43304, 43314), True, 'import numpy as np\n'), ((43328, 43345), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (43342, 43345), True, 'import numpy as np\n'), ((43359, 43376), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (43373, 43376), True, 'import numpy as np\n'), ((43440, 43494), 'pytsmp.pytsmp.PreSCRIMP', 'pytsmp.PreSCRIMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)'}), '(t1, t2, window_size=w, verbose=False)\n', (43456, 43494), False, 'from pytsmp import pytsmp\n'), ((43563, 43614), 'tests.helpers.naive_matrix_profile', 'helpers.naive_matrix_profile', (['t1', 't2'], {'window_size': 'w'}), '(t1, t2, window_size=w)\n', (43591, 43614), False, 'from tests import helpers\n'), ((43630, 43657), 'numpy.allclose', 'np.allclose', (['mpro', 'mp_naive'], {}), '(mpro, mp_naive)\n', (43641, 43657), True, 'import numpy as np\n'), ((43816, 43843), 'numpy.allclose', 'np.allclose', (['ipro', 'ip_naive'], {}), '(ipro, ip_naive)\n', (43827, 43843), True, 'import numpy as np\n'), ((44384, 44404), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (44398, 44404), True, 'import numpy as np\n'), ((44418, 44489), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': '(10)', 's_size': '(1)', 'verbose': '(False)', 'pre_scrimp': '(0)'}), '(t, window_size=10, s_size=1, verbose=False, pre_scrimp=0)\n', (44431, 44489), False, 'from pytsmp import pytsmp\n'), ((44760, 44780), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (44774, 44780), True, 'import numpy as np\n'), ((44794, 44869), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': '(10)', 's_size': '(1)', 'verbose': '(False)', 'pre_scrimp': '(1 / 2)'}), '(t, window_size=10, s_size=1, verbose=False, pre_scrimp=1 / 2)\n', (44807, 44869), False, 'from pytsmp import pytsmp\n'), ((45157, 45185), 'numpy.random.randint', 'np.random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (45174, 45185), True, 'import numpy as np\n'), ((45198, 45226), 'numpy.random.randint', 'np.random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (45215, 45226), True, 'import numpy as np\n'), ((45240, 45257), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (45254, 45257), True, 'import numpy as np\n'), ((45271, 45288), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (45285, 45288), True, 'import numpy as np\n'), ((45347, 45416), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(1 / 4)'}), '(t1, t2, window_size=w, verbose=False, pre_scrimp=1 / 4)\n', (45360, 45416), False, 'from pytsmp import pytsmp\n'), ((45763, 45783), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (45777, 45783), True, 'import numpy as np\n'), ((45812, 45876), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(1 / 4)'}), '(t, window_size=w, verbose=False, pre_scrimp=1 / 4)\n', (45825, 45876), False, 'from pytsmp import pytsmp\n'), ((46460, 46480), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (46474, 46480), True, 'import numpy as np\n'), ((46509, 46576), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t', 't'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(1 / 4)'}), '(t, t, window_size=w, verbose=False, pre_scrimp=1 / 4)\n', (46522, 46576), False, 'from pytsmp import pytsmp\n'), ((47170, 47197), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (47187, 47197), True, 'import numpy as np\n'), ((47256, 47273), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (47270, 47273), True, 'import numpy as np\n'), ((47286, 47315), 'numpy.random.randint', 'np.random.randint', (['(10)', '(n // 4)'], {}), '(10, n // 4)\n', (47303, 47315), True, 'import numpy as np\n'), ((47329, 47393), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(1 / 4)'}), '(t, window_size=w, verbose=False, pre_scrimp=1 / 4)\n', (47342, 47393), False, 'from pytsmp import pytsmp\n'), ((47460, 47506), 'tests.helpers.naive_matrix_profile', 'helpers.naive_matrix_profile', (['t'], {'window_size': 'w'}), '(t, window_size=w)\n', (47488, 47506), False, 'from tests import helpers\n'), ((47522, 47549), 'numpy.allclose', 'np.allclose', (['mpro', 'mp_naive'], {}), '(mpro, mp_naive)\n', (47533, 47549), True, 'import numpy as np\n'), ((47710, 47737), 'numpy.allclose', 'np.allclose', (['ipro', 'ip_naive'], {}), '(ipro, ip_naive)\n', (47721, 47737), True, 'import numpy as np\n'), ((47967, 47994), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (47984, 47994), True, 'import numpy as np\n'), ((48007, 48034), 'numpy.random.randint', 'np.random.randint', (['(100)', '(200)'], {}), '(100, 200)\n', (48024, 48034), True, 'import numpy as np\n'), ((48048, 48065), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (48062, 48065), True, 'import numpy as np\n'), ((48079, 48096), 'numpy.random.rand', 'np.random.rand', (['m'], {}), '(m)\n', (48093, 48096), True, 'import numpy as np\n'), ((48160, 48229), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t1', 't2'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(1 / 4)'}), '(t1, t2, window_size=w, verbose=False, pre_scrimp=1 / 4)\n', (48173, 48229), False, 'from pytsmp import pytsmp\n'), ((48296, 48347), 'tests.helpers.naive_matrix_profile', 'helpers.naive_matrix_profile', (['t1', 't2'], {'window_size': 'w'}), '(t1, t2, window_size=w)\n', (48324, 48347), False, 'from tests import helpers\n'), ((48363, 48390), 'numpy.allclose', 'np.allclose', (['mpro', 'mp_naive'], {}), '(mpro, mp_naive)\n', (48374, 48390), True, 'import numpy as np\n'), ((48546, 48573), 'numpy.allclose', 'np.allclose', (['ipro', 'ip_naive'], {}), '(ipro, ip_naive)\n', (48557, 48573), True, 'import numpy as np\n'), ((48792, 48833), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data.csv"""'], {}), "('./data/random_walk_data.csv')\n", (48802, 48833), True, 'import numpy as np\n'), ((48853, 48899), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data_mpro.csv"""'], {}), "('./data/random_walk_data_mpro.csv')\n", (48863, 48899), True, 'import numpy as np\n'), ((48919, 48965), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/random_walk_data_ipro.csv"""'], {}), "('./data/random_walk_data_ipro.csv')\n", (48929, 48965), True, 'import numpy as np\n'), ((48994, 49058), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(1 / 4)'}), '(t, window_size=w, verbose=False, pre_scrimp=1 / 4)\n', (49007, 49058), False, 'from pytsmp import pytsmp\n'), ((49111, 49138), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (49122, 49138), True, 'import numpy as np\n'), ((49637, 49678), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production.csv"""'], {}), "('./data/candy_production.csv')\n", (49647, 49678), True, 'import numpy as np\n'), ((49698, 49744), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production_mpro.csv"""'], {}), "('./data/candy_production_mpro.csv')\n", (49708, 49744), True, 'import numpy as np\n'), ((49764, 49810), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/candy_production_ipro.csv"""'], {}), "('./data/candy_production_ipro.csv')\n", (49774, 49810), True, 'import numpy as np\n'), ((49839, 49903), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(1 / 4)'}), '(t, window_size=w, verbose=False, pre_scrimp=1 / 4)\n', (49852, 49903), False, 'from pytsmp import pytsmp\n'), ((49956, 49983), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (49967, 49983), True, 'import numpy as np\n'), ((50238, 50265), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro_ans'], {}), '(ipro, ipro_ans)\n', (50249, 50265), True, 'import numpy as np\n'), ((50478, 50516), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price.csv"""'], {}), "('./data/bitcoin_price.csv')\n", (50488, 50516), True, 'import numpy as np\n'), ((50536, 50579), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price_mpro.csv"""'], {}), "('./data/bitcoin_price_mpro.csv')\n", (50546, 50579), True, 'import numpy as np\n'), ((50599, 50642), 'numpy.loadtxt', 'np.loadtxt', (['"""./data/bitcoin_price_ipro.csv"""'], {}), "('./data/bitcoin_price_ipro.csv')\n", (50609, 50642), True, 'import numpy as np\n'), ((50672, 50736), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': 'w', 'verbose': '(False)', 'pre_scrimp': '(1 / 4)'}), '(t, window_size=w, verbose=False, pre_scrimp=1 / 4)\n', (50685, 50736), False, 'from pytsmp import pytsmp\n'), ((50789, 50816), 'numpy.allclose', 'np.allclose', (['mpro', 'mpro_ans'], {}), '(mpro, mpro_ans)\n', (50800, 50816), True, 'import numpy as np\n'), ((51071, 51098), 'numpy.allclose', 'np.allclose', (['ipro', 'ipro_ans'], {}), '(ipro, ipro_ans)\n', (51082, 51098), True, 'import numpy as np\n'), ((165, 189), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (178, 189), False, 'import pytest\n'), ((207, 227), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (221, 227), True, 'import numpy as np\n'), ((245, 300), 'pytsmp.pytsmp.MatrixProfile', 'pytsmp.MatrixProfile', (['t'], {'window_size': '(100)', 'verbose': '(False)'}), '(t, window_size=100, verbose=False)\n', (265, 300), False, 'from pytsmp import pytsmp\n'), ((386, 411), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (399, 411), False, 'import pytest\n'), ((440, 460), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (454, 460), True, 'import numpy as np\n'), ((478, 523), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(0)', 'verbose': '(False)'}), '(t, window_size=0, verbose=False)\n', (490, 523), False, 'from pytsmp import pytsmp\n'), ((668, 693), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (681, 693), False, 'import pytest\n'), ((722, 742), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (736, 742), True, 'import numpy as np\n'), ((760, 807), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(2.3)', 'verbose': '(False)'}), '(t, window_size=2.3, verbose=False)\n', (772, 807), False, 'from pytsmp import pytsmp\n'), ((952, 977), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (965, 977), False, 'import pytest\n'), ((1007, 1027), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (1021, 1027), True, 'import numpy as np\n'), ((1045, 1064), 'numpy.random.rand', 'np.random.rand', (['(500)'], {}), '(500)\n', (1059, 1064), True, 'import numpy as np\n'), ((1082, 1134), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t1', 't2'], {'window_size': '(501)', 'verbose': '(False)'}), '(t1, t2, window_size=501, verbose=False)\n', (1094, 1134), False, 'from pytsmp import pytsmp\n'), ((1281, 1306), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1294, 1306), False, 'import pytest\n'), ((1335, 1355), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (1349, 1355), True, 'import numpy as np\n'), ((1373, 1438), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 'exclusion_zone': '(-1)', 'verbose': '(False)'}), '(t, window_size=10, exclusion_zone=-1, verbose=False)\n', (1385, 1438), False, 'from pytsmp import pytsmp\n'), ((1582, 1607), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1595, 1607), False, 'import pytest\n'), ((1636, 1656), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (1650, 1656), True, 'import numpy as np\n'), ((1674, 1730), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 's_size': '(0)', 'verbose': '(False)'}), '(t, window_size=10, s_size=0, verbose=False)\n', (1686, 1730), False, 'from pytsmp import pytsmp\n'), ((1869, 1894), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1882, 1894), False, 'import pytest\n'), ((1923, 1943), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (1937, 1943), True, 'import numpy as np\n'), ((1961, 2019), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 's_size': '(1.2)', 'verbose': '(False)'}), '(t, window_size=10, s_size=1.2, verbose=False)\n', (1973, 2019), False, 'from pytsmp import pytsmp\n'), ((19176, 19201), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (19189, 19201), False, 'import pytest\n'), ((19230, 19250), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (19244, 19250), True, 'import numpy as np\n'), ((19268, 19314), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 'verbose': '(False)'}), '(t, window_size=10, verbose=False)\n', (19280, 19314), False, 'from pytsmp import pytsmp\n'), ((19506, 19531), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (19519, 19531), False, 'import pytest\n'), ((19560, 19580), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (19574, 19580), True, 'import numpy as np\n'), ((19598, 19644), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 'verbose': '(False)'}), '(t, window_size=10, verbose=False)\n', (19610, 19644), False, 'from pytsmp import pytsmp\n'), ((19837, 19862), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (19850, 19862), False, 'import pytest\n'), ((19891, 19911), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (19905, 19911), True, 'import numpy as np\n'), ((19929, 19975), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 'verbose': '(False)'}), '(t, window_size=10, verbose=False)\n', (19941, 19975), False, 'from pytsmp import pytsmp\n'), ((20167, 20192), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (20180, 20192), False, 'import pytest\n'), ((20221, 20241), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (20235, 20241), True, 'import numpy as np\n'), ((20259, 20305), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 'verbose': '(False)'}), '(t, window_size=10, verbose=False)\n', (20271, 20305), False, 'from pytsmp import pytsmp\n'), ((23273, 23297), 'numpy.abs', 'np.abs', (['(ab - discords[0])'], {}), '(ab - discords[0])\n', (23279, 23297), True, 'import numpy as np\n'), ((23468, 23493), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (23481, 23493), False, 'import pytest\n'), ((23522, 23542), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (23536, 23542), True, 'import numpy as np\n'), ((23560, 23606), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 'verbose': '(False)'}), '(t, window_size=10, verbose=False)\n', (23572, 23606), False, 'from pytsmp import pytsmp\n'), ((23788, 23813), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (23801, 23813), False, 'import pytest\n'), ((23842, 23862), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (23856, 23862), True, 'import numpy as np\n'), ((23880, 23926), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 'verbose': '(False)'}), '(t, window_size=10, verbose=False)\n', (23892, 23926), False, 'from pytsmp import pytsmp\n'), ((24109, 24134), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (24122, 24134), False, 'import pytest\n'), ((24163, 24183), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (24177, 24183), True, 'import numpy as np\n'), ((24201, 24247), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 'verbose': '(False)'}), '(t, window_size=10, verbose=False)\n', (24213, 24247), False, 'from pytsmp import pytsmp\n'), ((24431, 24456), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (24444, 24456), False, 'import pytest\n'), ((24485, 24505), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (24499, 24505), True, 'import numpy as np\n'), ((24523, 24569), 'pytsmp.pytsmp.STAMP', 'pytsmp.STAMP', (['t'], {'window_size': '(10)', 'verbose': '(False)'}), '(t, window_size=10, verbose=False)\n', (24535, 24569), False, 'from pytsmp import pytsmp\n'), ((39128, 39153), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (39141, 39153), False, 'import pytest\n'), ((39182, 39202), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (39196, 39202), True, 'import numpy as np\n'), ((39220, 39285), 'pytsmp.pytsmp.PreSCRIMP', 'pytsmp.PreSCRIMP', (['t'], {'window_size': '(10)', 'verbose': '(False)', 'sample_rate': '(0)'}), '(t, window_size=10, verbose=False, sample_rate=0)\n', (39236, 39285), False, 'from pytsmp import pytsmp\n'), ((39430, 39455), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (39443, 39455), False, 'import pytest\n'), ((39484, 39504), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (39498, 39504), True, 'import numpy as np\n'), ((39522, 39588), 'pytsmp.pytsmp.PreSCRIMP', 'pytsmp.PreSCRIMP', (['t'], {'window_size': '(10)', 'verbose': '(False)', 'sample_rate': '(-2)'}), '(t, window_size=10, verbose=False, sample_rate=-2)\n', (39538, 39588), False, 'from pytsmp import pytsmp\n'), ((44082, 44107), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (44095, 44107), False, 'import pytest\n'), ((44136, 44156), 'numpy.random.rand', 'np.random.rand', (['(1000)'], {}), '(1000)\n', (44150, 44156), True, 'import numpy as np\n'), ((44174, 44236), 'pytsmp.pytsmp.SCRIMP', 'pytsmp.SCRIMP', (['t'], {'window_size': '(10)', 'verbose': '(False)', 'pre_scrimp': '(-1)'}), '(t, window_size=10, verbose=False, pre_scrimp=-1)\n', (44187, 44236), False, 'from pytsmp import pytsmp\n'), ((6969, 6992), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (6975, 6992), True, 'import numpy as np\n'), ((7806, 7829), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (7812, 7829), True, 'import numpy as np\n'), ((8609, 8632), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (8615, 8632), True, 'import numpy as np\n'), ((9676, 9696), 'numpy.abs', 'np.abs', (['(mpro - mpro2)'], {}), '(mpro - mpro2)\n', (9682, 9696), True, 'import numpy as np\n'), ((10740, 10760), 'numpy.abs', 'np.abs', (['(mpro - mpro2)'], {}), '(mpro - mpro2)\n', (10746, 10760), True, 'import numpy as np\n'), ((11700, 11720), 'numpy.abs', 'np.abs', (['(mpro - mpro2)'], {}), '(mpro - mpro2)\n', (11706, 11720), True, 'import numpy as np\n'), ((12764, 12784), 'numpy.abs', 'np.abs', (['(mpro - mpro2)'], {}), '(mpro - mpro2)\n', (12770, 12784), True, 'import numpy as np\n'), ((13942, 13962), 'numpy.abs', 'np.abs', (['(mpro - mpro2)'], {}), '(mpro - mpro2)\n', (13948, 13962), True, 'import numpy as np\n'), ((14875, 14895), 'numpy.abs', 'np.abs', (['(mpro - mpro2)'], {}), '(mpro - mpro2)\n', (14881, 14895), True, 'import numpy as np\n'), ((15843, 15863), 'numpy.abs', 'np.abs', (['(mpro - mpro2)'], {}), '(mpro - mpro2)\n', (15849, 15863), True, 'import numpy as np\n'), ((16716, 16736), 'numpy.abs', 'np.abs', (['(mpro - mpro2)'], {}), '(mpro - mpro2)\n', (16722, 16736), True, 'import numpy as np\n'), ((17684, 17704), 'numpy.abs', 'np.abs', (['(mpro - mpro2)'], {}), '(mpro - mpro2)\n', (17690, 17704), True, 'import numpy as np\n'), ((18818, 18838), 'numpy.abs', 'np.abs', (['(mpro - mpro2)'], {}), '(mpro - mpro2)\n', (18824, 18838), True, 'import numpy as np\n'), ((30572, 30595), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (30578, 30595), True, 'import numpy as np\n'), ((31387, 31410), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (31393, 31410), True, 'import numpy as np\n'), ((32190, 32213), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (32196, 32213), True, 'import numpy as np\n'), ((36895, 36918), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (36901, 36918), True, 'import numpy as np\n'), ((37728, 37751), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (37734, 37751), True, 'import numpy as np\n'), ((38549, 38572), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (38555, 38572), True, 'import numpy as np\n'), ((49352, 49375), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (49358, 49375), True, 'import numpy as np\n'), ((50197, 50220), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (50203, 50220), True, 'import numpy as np\n'), ((51030, 51053), 'numpy.abs', 'np.abs', (['(mpro - mpro_ans)'], {}), '(mpro - mpro_ans)\n', (51036, 51053), True, 'import numpy as np\n')]
|
# Copyright (c) 2018 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import time
from util.speech import Speech
BOUNCE_TIME = 2000 # millisecs
def _current_millis():
return int(round(time.time() * 1000))
class DetectionFSM(object):
def __init__(self, play_speech):
self._last_detection_time = None
self._speech = None
if play_speech:
self._speech = Speech()
def update_status(self, detected):
"""It only reports False after not detected for BOUNCE_TIME."""
now = _current_millis()
if detected:
self._last_detection_time = now
return True
if (self._last_detection_time is not None and
now - self._last_detection_time < BOUNCE_TIME):
return True
return False
|
[
"util.speech.Speech",
"time.time"
] |
[((1418, 1426), 'util.speech.Speech', 'Speech', ([], {}), '()\n', (1424, 1426), False, 'from util.speech import Speech\n'), ((1210, 1221), 'time.time', 'time.time', ([], {}), '()\n', (1219, 1221), False, 'import time\n')]
|
#!/usr/bin/env python2
import zipfile, argparse, sys, threading
def unz(zipf, pw):
zf = zipfile.ZipFile(zipf)
try:
pw = pw.strip('\n')
zf.extractall(pwd=pw)
print("[!] Found! Password = %s" % pw)
sys.exit()
except:
return
if __name__=="__main__":
ap = argparse.ArgumentParser(description="Dictionary bruteforce password-protected zip file")
apr = ap.add_argument_group('Required')
apr.add_argument("-d", "--dict", type=str, default="", required=True, help="Dictionary file to use as passwords")
apr.add_argument("-z", "--zipfile", type=str, default="", required=True, help="Zipfile to bruteforce")
args = ap.parse_args()
with open(args.dict, 'r') as d:
counter=0
for word in d:
threading.Thread(target=unz, args=(args.zipfile, word)).start()
counter+=1
if counter%100000 == 0:
print(str(counter/100000) + " x 100k words")
d.close()
|
[
"threading.Thread",
"zipfile.ZipFile",
"argparse.ArgumentParser",
"sys.exit"
] |
[((94, 115), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zipf'], {}), '(zipf)\n', (109, 115), False, 'import zipfile, argparse, sys, threading\n'), ((311, 404), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Dictionary bruteforce password-protected zip file"""'}), "(description=\n 'Dictionary bruteforce password-protected zip file')\n", (334, 404), False, 'import zipfile, argparse, sys, threading\n'), ((238, 248), 'sys.exit', 'sys.exit', ([], {}), '()\n', (246, 248), False, 'import zipfile, argparse, sys, threading\n'), ((787, 842), 'threading.Thread', 'threading.Thread', ([], {'target': 'unz', 'args': '(args.zipfile, word)'}), '(target=unz, args=(args.zipfile, word))\n', (803, 842), False, 'import zipfile, argparse, sys, threading\n')]
|
#!/home/andrew/.envs/venv38/bin/python3
import sys
import numpy as np
def get_input():
for line in sys.stdin:
line = line.strip()
if len(line) == 0:
continue
if line.startswith("target area:"):
fields = line.split()
x_region = tuple(int(x) for x in fields[2].strip("x=,").split(".."))
y_region = tuple(int(y) for y in fields[3].strip("y=,").split(".."))
target = {"x":x_region, "y":y_region}
return target
def trial_range(target):
"""Estimate a trial range of velocities."""
assert min(target["x"]) > 0
assert max(target["y"]) < 0
min_vy = min(target["y"])
max_vy = -min(target["y"])
min_vx = 1
max_vx = max(target["x"])
vrange = {"x":(min_vx, max_vx), "y":(min_vy, max_vy)}
return vrange
def fire(v0, target):
"""Returns an array of coordinates that the probe would follow if it was
launched from (0,0) with initial velocity v0. Stop when the probe is
clearly below the target region and has negative y-velocity.
Example: v0 = {"x":10, "y":-2}
"""
assert v0["x"] >= 0
n_points = 5
beyond_the_target = False
points = None
while not beyond_the_target:
n_points *= 2
velocities_y = np.arange(v0["y"], v0["y"] - n_points, -1, dtype=int)
positions_y = np.cumsum(velocities_y)
if (positions_y[-1] < min(target["y"])) and (velocities_y[-1] < 0):
beyond_the_target = True
velocities_x = np.arange(v0["x"], v0["x"] - n_points, -1, dtype=int)
velocities_x = np.maximum(0, velocities_x)
positions_x = np.cumsum(velocities_x)
points = np.transpose(np.array([positions_x, positions_y]))
return points
def is_hit(points, target):
"""For each point [x,y] in the 2D numpy array of points, indicate True
if the point is within the target region.
"""
x_hit = (points[:,0] >= target["x"][0]) & (points[:,0] <= target["x"][1])
y_hit = (points[:,1] >= target["y"][0]) & (points[:,1] <= target["y"][1])
return x_hit & y_hit
##########################################################################
target = get_input()
print("Target region:", target)
vrange = trial_range(target)
print("Trial velocity ranges:", vrange)
max_y = 0 # maximum y of any trajectory that hits the target area
num_velocities_that_hit = 0
for vx in range(vrange["x"][0], vrange["x"][1]+1):
for vy in range(vrange["y"][0], vrange["y"][1]+1):
v0 = {"x":vx, "y":vy}
points = fire(v0, target)
any_hits = np.any(is_hit(points, target))
if any_hits:
max_y = max(max_y, np.max(points[:,1]))
num_velocities_that_hit += 1
print("v0=%s" % str(v0),
"hit=%s" % str(any_hits),
"max_y=%d" % np.max(points[:,1]))
print("Maximum y of any trajectory that hits the target area:", max_y)
print("Number of velocities that hit the target area:", num_velocities_that_hit)
|
[
"numpy.maximum",
"numpy.cumsum",
"numpy.max",
"numpy.array",
"numpy.arange"
] |
[((1280, 1333), 'numpy.arange', 'np.arange', (["v0['y']", "(v0['y'] - n_points)", '(-1)'], {'dtype': 'int'}), "(v0['y'], v0['y'] - n_points, -1, dtype=int)\n", (1289, 1333), True, 'import numpy as np\n'), ((1356, 1379), 'numpy.cumsum', 'np.cumsum', (['velocities_y'], {}), '(velocities_y)\n', (1365, 1379), True, 'import numpy as np\n'), ((1516, 1569), 'numpy.arange', 'np.arange', (["v0['x']", "(v0['x'] - n_points)", '(-1)'], {'dtype': 'int'}), "(v0['x'], v0['x'] - n_points, -1, dtype=int)\n", (1525, 1569), True, 'import numpy as np\n'), ((1593, 1620), 'numpy.maximum', 'np.maximum', (['(0)', 'velocities_x'], {}), '(0, velocities_x)\n', (1603, 1620), True, 'import numpy as np\n'), ((1643, 1666), 'numpy.cumsum', 'np.cumsum', (['velocities_x'], {}), '(velocities_x)\n', (1652, 1666), True, 'import numpy as np\n'), ((1697, 1733), 'numpy.array', 'np.array', (['[positions_x, positions_y]'], {}), '([positions_x, positions_y])\n', (1705, 1733), True, 'import numpy as np\n'), ((2662, 2682), 'numpy.max', 'np.max', (['points[:, 1]'], {}), '(points[:, 1])\n', (2668, 2682), True, 'import numpy as np\n'), ((2832, 2852), 'numpy.max', 'np.max', (['points[:, 1]'], {}), '(points[:, 1])\n', (2838, 2852), True, 'import numpy as np\n')]
|
"""Scan for the PhysBryks and return a list of devices found.
"""
import asyncio
from physbrykweb import physbrykweb as pb
bryks = []
async def run():
bryks = await pb.bryks_discover()
if len(bryks):
for b in bryks:
print(f'{b.getName()} found @ {b.getAddress()}')
else:
print('No PhysBryk found')
print(len(bryks))
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
|
[
"asyncio.get_event_loop",
"physbrykweb.physbrykweb.bryks_discover"
] |
[((377, 401), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (399, 401), False, 'import asyncio\n'), ((172, 191), 'physbrykweb.physbrykweb.bryks_discover', 'pb.bryks_discover', ([], {}), '()\n', (189, 191), True, 'from physbrykweb import physbrykweb as pb\n')]
|
import sys,os
from query import TCIAClient, get_response
import pandas as pd
import traceback
import zipfile
if __name__ == '__main__':
csv_file_path = sys.argv[1]
root_folder = sys.argv[2]
if csv_file_path.endswith('.csv'):
df = pd.read_csv(csv_file_path)
if csv_file_path.endswith('.tcia'):
with open(csv_file_path,'r') as f:
content = f.read()
content = [x for x in content.split('\n') if len(x) > 0]
i = content.index('ListOfSeriesToDownload=')
series_instance_uid_list = content[i+1:]
mylist = []
for n,uid in enumerate(series_instance_uid_list):
mylist.append(dict(study_instance_uid='na',series_instance_uid=uid))
df = pd.DataFrame(mylist)
else:
raise NotImplementedError()
for n,row in df.iterrows():
print(n,len(df))
study_instance_uid = row.study_instance_uid
series_instance_uid = row.series_instance_uid
if study_instance_uid == 'na':
file_path = os.path.join(root_folder,series_instance_uid,'img.zip')
else:
file_path = os.path.join(root_folder,study_instance_uid,series_instance_uid,'img.zip')
if os.path.exists(file_path):
continue
os.makedirs(os.path.dirname(file_path),exist_ok=True)
folder = os.path.dirname(file_path)
basename = os.path.basename(file_path)
tcia_client = TCIAClient(apiKey=None, baseUrl="https://services.cancerimagingarchive.net/services/v3",resource="TCIA")
tcia_client.get_image(seriesInstanceUid=series_instance_uid,downloadPath=folder,zipFileName=basename)
|
[
"pandas.DataFrame",
"os.path.basename",
"pandas.read_csv",
"os.path.dirname",
"os.path.exists",
"query.TCIAClient",
"os.path.join"
] |
[((257, 283), 'pandas.read_csv', 'pd.read_csv', (['csv_file_path'], {}), '(csv_file_path)\n', (268, 283), True, 'import pandas as pd\n'), ((756, 776), 'pandas.DataFrame', 'pd.DataFrame', (['mylist'], {}), '(mylist)\n', (768, 776), True, 'import pandas as pd\n'), ((1239, 1264), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (1253, 1264), False, 'import sys, os\n'), ((1388, 1414), 'os.path.dirname', 'os.path.dirname', (['file_path'], {}), '(file_path)\n', (1403, 1414), False, 'import sys, os\n'), ((1434, 1461), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (1450, 1461), False, 'import sys, os\n'), ((1485, 1595), 'query.TCIAClient', 'TCIAClient', ([], {'apiKey': 'None', 'baseUrl': '"""https://services.cancerimagingarchive.net/services/v3"""', 'resource': '"""TCIA"""'}), "(apiKey=None, baseUrl=\n 'https://services.cancerimagingarchive.net/services/v3', resource='TCIA')\n", (1495, 1595), False, 'from query import TCIAClient, get_response\n'), ((1050, 1107), 'os.path.join', 'os.path.join', (['root_folder', 'series_instance_uid', '"""img.zip"""'], {}), "(root_folder, series_instance_uid, 'img.zip')\n", (1062, 1107), False, 'import sys, os\n'), ((1144, 1221), 'os.path.join', 'os.path.join', (['root_folder', 'study_instance_uid', 'series_instance_uid', '"""img.zip"""'], {}), "(root_folder, study_instance_uid, series_instance_uid, 'img.zip')\n", (1156, 1221), False, 'import sys, os\n'), ((1320, 1346), 'os.path.dirname', 'os.path.dirname', (['file_path'], {}), '(file_path)\n', (1335, 1346), False, 'import sys, os\n')]
|
import itertools
import logging
from collections import Counter
from distriopt.constants import AssignmentError, NodeResourceError
_log = logging.getLogger(__name__)
class Solution(object):
"""Represent the output of the placement mapping.
Examples
--------
>>> solution.node_info(u)
('t3.2xlarge', 1)
>>> solution.node_info(v)
('t3.2xlarge', 2)
>>> solution.vm_used((u,v))
Counter({'t3.2xlarge': 5})
>>> solution.cost
1.89
"""
def __init__(self, nodes_assignment, vm_used, cost):
self.nodes_assignment = nodes_assignment
self.vm_used = vm_used
self.cost = cost
def node_info(self, node):
"""Return the physical node where the virtual node has been placed."""
return self.nodes_assignment[node]
def output(self):
raise NotImplementedError
@staticmethod
def verify_solution(virtual, physical, assignment_ec2_instances):
"""check if the solution is correct."""
# every node is mapped
nodes_assigned = set(itertools.chain(*assignment_ec2_instances.values()))
if len(nodes_assigned) != len(virtual.nodes()):
not_assigned_nodes = virtual.nodes() - nodes_assigned
raise AssignmentError(
f"{not_assigned_nodes} have not been assigned to any physical node"
)
# EC2 instance resources are not exceeded
for vm_type, vm_id in assignment_ec2_instances:
used_cores = sum(
virtual.req_cores(u) for u in assignment_ec2_instances[(vm_type, vm_id)]
)
vm_cores = physical.cores(vm_type)
used_memory = sum(
virtual.req_memory(u)
for u in assignment_ec2_instances[(vm_type, vm_id)]
)
vm_memory = physical.memory(vm_type)
# cpu cores
if used_cores > vm_cores:
raise NodeResourceError("cpu cores exceeded")
# memory
elif used_memory > vm_memory:
raise NodeResourceError("memory exceeded")
@classmethod
def build_solution(
cls, virtual, physical, assignment_ec2_instances, check_solution=True
):
if check_solution:
Solution.verify_solution(virtual, physical, assignment_ec2_instances)
nodes_assignment = {
node: instance_id
for instance_id in assignment_ec2_instances
for node in assignment_ec2_instances[instance_id]
}
vm_used = Counter(vm_type for vm_type, _ in set(nodes_assignment.values()))
cost = round(
sum(
physical.hourly_cost(vm_type) for vm_type, _ in assignment_ec2_instances
),
2,
)
return cls(nodes_assignment, vm_used, cost)
def __str__(self):
res = f"hourly cost = {self.cost} €\n"
res += f"machines used = {self.vm_used}\n"
for node, (vm_type, vm_id) in self.nodes_assignment.items():
res += f"{node} mapped on {vm_type} with id {vm_id}\n"
return res
|
[
"distriopt.constants.AssignmentError",
"distriopt.constants.NodeResourceError",
"logging.getLogger"
] |
[((140, 167), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (157, 167), False, 'import logging\n'), ((1246, 1335), 'distriopt.constants.AssignmentError', 'AssignmentError', (['f"""{not_assigned_nodes} have not been assigned to any physical node"""'], {}), "(\n f'{not_assigned_nodes} have not been assigned to any physical node')\n", (1261, 1335), False, 'from distriopt.constants import AssignmentError, NodeResourceError\n'), ((1931, 1970), 'distriopt.constants.NodeResourceError', 'NodeResourceError', (['"""cpu cores exceeded"""'], {}), "('cpu cores exceeded')\n", (1948, 1970), False, 'from distriopt.constants import AssignmentError, NodeResourceError\n'), ((2056, 2092), 'distriopt.constants.NodeResourceError', 'NodeResourceError', (['"""memory exceeded"""'], {}), "('memory exceeded')\n", (2073, 2092), False, 'from distriopt.constants import AssignmentError, NodeResourceError\n')]
|
from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.ChoiceBox import ChoiceBox
import Screens.Standby
from Components.ActionMap import ActionMap
from enigma import eTimer, eServiceCenter, iServiceInformation, eConsoleAppContainer, eEnv
from os import access, chmod, X_OK
recons_path = eEnv.resolve("${libdir}/enigma2/python/Plugins/Extensions/ReconstructApSc/bin/reconstruct_apsc")
def main(session, service, **kwargs):
# Hack to make sure it is executable
if not access(recons_path, X_OK):
chmod(recons_path, 493)
session.open(ReconstructApSc, service, **kwargs)
def Plugins(**kwargs):
return PluginDescriptor(name="ReconstructApSc", description=_("Reconstruct AP/SC ..."), where = PluginDescriptor.WHERE_MOVIELIST, fnc=main)
class ReconstructApSc(ChoiceBox):
def __init__(self, session, service):
self.service = service
serviceHandler = eServiceCenter.getInstance()
path = self.service.getPath()
info = serviceHandler.info(self.service)
if not info:
self.name = path
else:
self.name = info.getName(self.service)
tlist = [
(_("Don't reconstruct"), "CALLFUNC", self.confirmed0),
(_("Reconstruct the .ap and .sc files of the selected movie"), "CALLFUNC", self.confirmed1),
(_("Reconstruct all missing .ap and .sc files in this directory"), "CALLFUNC", self.confirmed2),
(_("Check any running reconstruct process"), "CALLFUNC", self.confirmed3),
]
ChoiceBox.__init__(self, session, _("What would you like to reconstruct? (\"%s\")") % (self.name), list = tlist, selection = 0)
self.skinName = "ChoiceBox"
def confirmed0(self, arg):
self.close()
def confirmed1(self, arg):
ReconstructApScSpawn(self.session, self, [recons_path, self.service.getPath()], self.name, _("movie"))
def confirmed2(self, arg):
dir = self.dirName(self.service.getPath())
ReconstructApScSpawn(self.session, self, [recons_path, "-d", dir], dir, _("directory"))
def confirmed3(self, arg):
output = global_recons_queue.checkOutput()
if output == False:
mess = "There is no running reconstruction process"
else:
mess = "Current reconstruction process output:\n%s" % output
self.session.openWithCallback(self.close, MessageBox, mess, MessageBox.TYPE_INFO)
def dirName(self, str):
return '/'.join(str.split('/')[:-1]) + '/'
class ReconstructApScQueue:
def __init__(self):
self.container = eConsoleAppContainer()
self.appClosed_conn = self.container.appClosed.connect(self.runDone)
self.dataAvail_conn = self.container.dataAvail.connect(self.collOutput)
self.queue = []
self.output = ""
self.running = False
def enqueue(self, cb, cmd):
self.queue.append((cb, cmd))
if not self.running:
self.runNext()
return True
else:
return False
def collOutput(self, data):
self.output += data
def checkOutput(self):
if not self.running:
return False
else:
return self.output
def runNext(self):
self.output = ""
if not self.queue:
self.running = False
else:
self.running = True
self.container.execute(*self.queue[0][1])
def runDone(self, retval):
cb = self.queue[0][0]
self.queue = self.queue[1:]
cb(retval, self.output)
self.runNext()
global_recons_errors = [_("The %s \"%s\" is successfully processed:\n%s"),
_("Processing failed for the %s \"%s\":\n%s")]
global_recons_queue = ReconstructApScQueue()
global_recons_block = False
class ReconstructApScSpawn:
def __init__(self, session, parent, clist, name, typename):
global global_recons_queue
global global_recons_block
self.session = session
self.parent = parent
self.name = name
self.typename = typename
self.clist = [clist[0]] + clist
self.mess = ""
self.dialog = False
self.waitTimer = eTimer()
self.waitTimer_conn = self.waitTimer.timeout.connect(self.doWaitAck)
if global_recons_queue.enqueue(self.doAck, self.clist):
mess = _("The %s \"%s\" is processed in the background.") % (self.typename, self.name)
else:
mess = _("Another movie or directory is currently processed.\nThe %s \"%s\" will be processed in the background after it.") % (self.typename, self.name)
global_recons_block = True
self.dialog = self.session.openWithCallback(self.endc, MessageBox, mess, MessageBox.TYPE_INFO)
def doAck(self, retval, output):
global global_recons_errors
self.mess = global_recons_errors[retval] % (self.typename, self.name, output)
self.doWaitAck()
def doWaitAck(self):
global global_recons_block
if Screens.Standby.inStandby or not self.session.in_exec or (global_recons_block and not self.dialog):
self.waitTimer.start(2000, True)
else:
global_recons_block = True
self.session.openWithCallback(self.endw, MessageBox, self.mess, MessageBox.TYPE_INFO)
def endw(self, arg = 0):
global global_recons_block
global_recons_block = False
if self.session.current_dialog == self.dialog:
self.session.current_dialog.close(True)
self.endc(arg)
def endc(self, arg = 0):
global global_recons_block
global_recons_block = False
self.dialog = False
self.parent.close()
|
[
"enigma.eEnv.resolve",
"os.chmod",
"enigma.eServiceCenter.getInstance",
"enigma.eConsoleAppContainer",
"enigma.eTimer",
"os.access"
] |
[((367, 473), 'enigma.eEnv.resolve', 'eEnv.resolve', (['"""${libdir}/enigma2/python/Plugins/Extensions/ReconstructApSc/bin/reconstruct_apsc"""'], {}), "(\n '${libdir}/enigma2/python/Plugins/Extensions/ReconstructApSc/bin/reconstruct_apsc'\n )\n", (379, 473), False, 'from enigma import eTimer, eServiceCenter, iServiceInformation, eConsoleAppContainer, eEnv\n'), ((549, 574), 'os.access', 'access', (['recons_path', 'X_OK'], {}), '(recons_path, X_OK)\n', (555, 574), False, 'from os import access, chmod, X_OK\n'), ((578, 601), 'os.chmod', 'chmod', (['recons_path', '(493)'], {}), '(recons_path, 493)\n', (583, 601), False, 'from os import access, chmod, X_OK\n'), ((936, 964), 'enigma.eServiceCenter.getInstance', 'eServiceCenter.getInstance', ([], {}), '()\n', (962, 964), False, 'from enigma import eTimer, eServiceCenter, iServiceInformation, eConsoleAppContainer, eEnv\n'), ((2424, 2446), 'enigma.eConsoleAppContainer', 'eConsoleAppContainer', ([], {}), '()\n', (2444, 2446), False, 'from enigma import eTimer, eServiceCenter, iServiceInformation, eConsoleAppContainer, eEnv\n'), ((3766, 3774), 'enigma.eTimer', 'eTimer', ([], {}), '()\n', (3772, 3774), False, 'from enigma import eTimer, eServiceCenter, iServiceInformation, eConsoleAppContainer, eEnv\n')]
|
import sqlalchemy
import folium
import markdown
import os
from threatmatrix import processing
from bokeh.embed import components
from flask import Flask, redirect, url_for, render_template, send_file, Markup
app = Flask(__name__)
df = processing.get_data(250)
@app.route("/")
def hello():
path = os.path.abspath('README.md')
with open(path, 'r') as f:
content = f.read()
content = Markup(markdown.markdown(content))
return render_template('index.html', **locals())
@app.route("/points_map.html")
def display_points():
return render_template('points_map.html')
@app.route("/choro_map.html")
def display_choro():
return render_template('choro_map.html')
@app.route("/plots.html")
def show_plots():
plot = processing.create_bar_chart(df)
plot_script, plot_div = components(plot)
table = processing.create_table(df)
table_script, table_div = components(table)
return render_template("charts.html", plot_div=plot_div,
plot_script=plot_script, table_div=table_div,
table_script=table_script)
@app.route('/maps/points.html')
def show_map_points():
processing.create_map(df, 'points')
return send_file('./maps/points.html')
@app.route('/maps/choropleth.html')
def show_map_choro():
processing.create_map(df, 'choropleth')
return send_file('./maps/choropleth.html')
@app.route('/blog')
def show_blog():
path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'posts\\sample.md')
with open(path, 'r') as f:
content = f.read()
content = Markup(markdown.markdown(content))
return render_template('blog.html', **locals())
|
[
"os.path.abspath",
"threatmatrix.processing.get_data",
"threatmatrix.processing.create_bar_chart",
"flask.Flask",
"threatmatrix.processing.create_table",
"markdown.markdown",
"flask.render_template",
"threatmatrix.processing.create_map",
"flask.send_file",
"bokeh.embed.components"
] |
[((216, 231), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (221, 231), False, 'from flask import Flask, redirect, url_for, render_template, send_file, Markup\n'), ((238, 262), 'threatmatrix.processing.get_data', 'processing.get_data', (['(250)'], {}), '(250)\n', (257, 262), False, 'from threatmatrix import processing\n'), ((304, 332), 'os.path.abspath', 'os.path.abspath', (['"""README.md"""'], {}), "('README.md')\n", (319, 332), False, 'import os\n'), ((559, 593), 'flask.render_template', 'render_template', (['"""points_map.html"""'], {}), "('points_map.html')\n", (574, 593), False, 'from flask import Flask, redirect, url_for, render_template, send_file, Markup\n'), ((658, 691), 'flask.render_template', 'render_template', (['"""choro_map.html"""'], {}), "('choro_map.html')\n", (673, 691), False, 'from flask import Flask, redirect, url_for, render_template, send_file, Markup\n'), ((747, 778), 'threatmatrix.processing.create_bar_chart', 'processing.create_bar_chart', (['df'], {}), '(df)\n', (774, 778), False, 'from threatmatrix import processing\n'), ((805, 821), 'bokeh.embed.components', 'components', (['plot'], {}), '(plot)\n', (815, 821), False, 'from bokeh.embed import components\n'), ((832, 859), 'threatmatrix.processing.create_table', 'processing.create_table', (['df'], {}), '(df)\n', (855, 859), False, 'from threatmatrix import processing\n'), ((888, 905), 'bokeh.embed.components', 'components', (['table'], {}), '(table)\n', (898, 905), False, 'from bokeh.embed import components\n'), ((915, 1041), 'flask.render_template', 'render_template', (['"""charts.html"""'], {'plot_div': 'plot_div', 'plot_script': 'plot_script', 'table_div': 'table_div', 'table_script': 'table_script'}), "('charts.html', plot_div=plot_div, plot_script=plot_script,\n table_div=table_div, table_script=table_script)\n", (930, 1041), False, 'from flask import Flask, redirect, url_for, render_template, send_file, Markup\n'), ((1149, 1184), 'threatmatrix.processing.create_map', 'processing.create_map', (['df', '"""points"""'], {}), "(df, 'points')\n", (1170, 1184), False, 'from threatmatrix import processing\n'), ((1194, 1225), 'flask.send_file', 'send_file', (['"""./maps/points.html"""'], {}), "('./maps/points.html')\n", (1203, 1225), False, 'from flask import Flask, redirect, url_for, render_template, send_file, Markup\n'), ((1288, 1327), 'threatmatrix.processing.create_map', 'processing.create_map', (['df', '"""choropleth"""'], {}), "(df, 'choropleth')\n", (1309, 1327), False, 'from threatmatrix import processing\n'), ((1337, 1372), 'flask.send_file', 'send_file', (['"""./maps/choropleth.html"""'], {}), "('./maps/choropleth.html')\n", (1346, 1372), False, 'from flask import Flask, redirect, url_for, render_template, send_file, Markup\n'), ((412, 438), 'markdown.markdown', 'markdown.markdown', (['content'], {}), '(content)\n', (429, 438), False, 'import markdown\n'), ((1608, 1634), 'markdown.markdown', 'markdown.markdown', (['content'], {}), '(content)\n', (1625, 1634), False, 'import markdown\n'), ((1452, 1477), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1467, 1477), False, 'import os\n')]
|
import socket
import time
def get_time():
return time.strftime('%Y-%m-%d',time.localtime(time.time()))
def get_host_ip():
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 80))
ip = s.getsockname()[0]
finally:
s.close()
return ip
print("当前时间是:" + get_time())
print("您的IP是:" + get_host_ip())
|
[
"socket.socket",
"time.time"
] |
[((149, 197), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (162, 197), False, 'import socket\n'), ((94, 105), 'time.time', 'time.time', ([], {}), '()\n', (103, 105), False, 'import time\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 All Rights Reserved
#
"""
File: data_processing.py
Author: shileicao(<EMAIL>)
Date: 11/03/2018 9:15 AM
"""
import csv
import argparse
import random
from itertools import chain
from func_utils import MIN_SEQ_LEN, MAX_SEQ_LEN, SIGNAL_WIDTH
def arg_parser():
parser = argparse.ArgumentParser(
prog='prepare_data',
formatter_class=argparse.RawTextHelpFormatter,
description='Prepare data for input of deep model')
parser.add_argument(
'-x_data',
dest='x_data',
default='Sample.csv',
type=str,
help='x data')
parser.add_argument(
'-y_data',
dest='y_data',
default='Label.csv',
type=str,
help='y data')
parser.add_argument(
'-train_data',
dest='train_data',
default='train_data.csv',
type=str,
help='train_data')
parser.add_argument(
'-test_data',
dest='test_data',
default='test_data.csv',
type=str,
help='test_data')
parser.add_argument(
'-train_ratio',
dest='train_ratio',
default=0.8,
type=float,
help='test_data')
return parser
def main(args):
x_data = []
with open(args.x_data) as f:
csv_reader = csv.reader(f)
for row in csv_reader:
x_data.append(row)
assert len(x_data[0]) == SIGNAL_WIDTH, 'Please make sure the signal width in func_utils.py'
y_data = []
with open(args.y_data) as f:
csv_reader = csv.reader(f)
for row in csv_reader:
y_data.extend(row)
min_seq_len = MIN_SEQ_LEN
max_seq_len = MAX_SEQ_LEN
formarted_data = []
i = 0
while True:
current_seq_len = random.randint(min_seq_len, max_seq_len)
if i + current_seq_len > len(x_data):
break
current_formart = list(chain.from_iterable(x_data[i:i+current_seq_len]))
current_formart += ['0'] * len(x_data[0]) * (max_seq_len-current_seq_len)
current_formart += y_data[i:i+current_seq_len]
current_formart += ['0'] * (max_seq_len-current_seq_len)
formarted_data.append(current_formart)
i += current_seq_len
train_len = int(args.train_ratio * len(formarted_data))
with open(args.train_data, 'w+') as f:
csv_writer = csv.writer(f, delimiter=' ')
csv_writer.writerows(formarted_data[:train_len])
with open(args.test_data, 'w+') as f:
csv_writer = csv.writer(f, delimiter=' ')
csv_writer.writerows(formarted_data[train_len:])
if __name__ == '__main__':
parser = arg_parser()
args = parser.parse_args()
main(args)
|
[
"csv.reader",
"csv.writer",
"random.randint",
"argparse.ArgumentParser",
"itertools.chain.from_iterable"
] |
[((343, 491), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""prepare_data"""', 'formatter_class': 'argparse.RawTextHelpFormatter', 'description': '"""Prepare data for input of deep model"""'}), "(prog='prepare_data', formatter_class=argparse.\n RawTextHelpFormatter, description='Prepare data for input of deep model')\n", (366, 491), False, 'import argparse\n'), ((1346, 1359), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (1356, 1359), False, 'import csv\n'), ((1590, 1603), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (1600, 1603), False, 'import csv\n'), ((1804, 1844), 'random.randint', 'random.randint', (['min_seq_len', 'max_seq_len'], {}), '(min_seq_len, max_seq_len)\n', (1818, 1844), False, 'import random\n'), ((2395, 2423), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '""" """'}), "(f, delimiter=' ')\n", (2405, 2423), False, 'import csv\n'), ((2545, 2573), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '""" """'}), "(f, delimiter=' ')\n", (2555, 2573), False, 'import csv\n'), ((1941, 1991), 'itertools.chain.from_iterable', 'chain.from_iterable', (['x_data[i:i + current_seq_len]'], {}), '(x_data[i:i + current_seq_len])\n', (1960, 1991), False, 'from itertools import chain\n')]
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""
* Copyright (C) 2017 <NAME>
*
* This file is subject to the terms and conditions of the MIT License
* See the file LICENSE in the top level directory for more details.
"""
from __future__ import absolute_import, print_function, unicode_literals
import textwrap
print('Content-Type: text/html')
print('\n\r')
print (textwrap.dedent("""
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>MyPiDrei API</title>
</head>
<body>
<p>Welcome on the MyPiDrei API. See information about the project <a href="https://github.com/HendrikVE/my_pi_drei">on GitHub</a></p>
</body>
</html>
"""))
|
[
"textwrap.dedent"
] |
[((375, 910), 'textwrap.dedent', 'textwrap.dedent', (['"""\n <!DOCTYPE html>\n <html lang="en">\n <head>\n <meta charset="utf-8">\n <meta name="viewport" content="width=device-width, initial-scale=1">\n \n <title>MyPiDrei API</title>\n </head>\n\n <body>\n \n <p>Welcome on the MyPiDrei API. See information about the project <a href="https://github.com/HendrikVE/my_pi_drei">on GitHub</a></p>\n\n </body>\n </html>\n """'], {}), '(\n """\n <!DOCTYPE html>\n <html lang="en">\n <head>\n <meta charset="utf-8">\n <meta name="viewport" content="width=device-width, initial-scale=1">\n \n <title>MyPiDrei API</title>\n </head>\n\n <body>\n \n <p>Welcome on the MyPiDrei API. See information about the project <a href="https://github.com/HendrikVE/my_pi_drei">on GitHub</a></p>\n\n </body>\n </html>\n """\n )\n', (390, 910), False, 'import textwrap\n')]
|
import pytest
from dotmailer.address_books import AddressBook
from dotmailer.contacts import Contact
@pytest.mark.notdemo
def test_add_contact(sample_address_book):
contact = Contact(email='<EMAIL>')
sample_address_book.add_contact(contact)
assert contact.id is not None
# Clean up by removing the contact afterwards
contact.delete()
def test_add_contact_invalid_address_book(sample_address_book_data,
sample_contact):
address_book = AddressBook(**sample_address_book_data)
with pytest.raises(Exception):
address_book.add_contact(sample_contact)
|
[
"dotmailer.contacts.Contact",
"pytest.raises",
"dotmailer.address_books.AddressBook"
] |
[((183, 207), 'dotmailer.contacts.Contact', 'Contact', ([], {'email': '"""<EMAIL>"""'}), "(email='<EMAIL>')\n", (190, 207), False, 'from dotmailer.contacts import Contact\n'), ((507, 546), 'dotmailer.address_books.AddressBook', 'AddressBook', ([], {}), '(**sample_address_book_data)\n', (518, 546), False, 'from dotmailer.address_books import AddressBook\n'), ((556, 580), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (569, 580), False, 'import pytest\n')]
|
from django.urls import path
from languages.views import LanguagesListView
urlpatterns = [
path('', LanguagesListView.as_view(), name='LanguagesList'),
]
|
[
"languages.views.LanguagesListView.as_view"
] |
[((107, 134), 'languages.views.LanguagesListView.as_view', 'LanguagesListView.as_view', ([], {}), '()\n', (132, 134), False, 'from languages.views import LanguagesListView\n')]
|
import json
from dotenv import load_dotenv
from square.client import Client
import pymongo
import os
import sys
import random
sys.path.insert(1, './discoverpage/')
sys.path.insert(2, './square-api/customer-api/')
sys.path.insert(3, './square-api/payments-api/')
from discoverpage_metrics import get_recommended_posts, get_trending_posts, get_hot_deals
from create_customercard import CustomerCardCreation
cc = __import__("create-customer")
pc = __import__("create-payment")
# APPOINTMENTS
def getAppointments():
appointments = []
return json.dumps(appointments), 200
def addAppointment():
return {'success': 'appointment successfully created'}, 200
# LOYALTIES
def getLoyalties():
loyalties = []
return json.dumps(loyalties), 200
def updateLoyalty():
return {'success': 'loyalty successfully updated'}, 200
# SAVED
def getSaved():
load_dotenv()
client = pymongo.MongoClient(os.getenv("MONGO_NORM_USER"))
db = client["customer"]["customers"]
"""
SELECT saved_business FROM customers
where id = XTGNQH10VCVVS9VBPW61218X0M
"""
# id for Tanjiwou
saved = db.find({"id": "XTGNQH10VCVVS9VBPW61218X0M"},
{"saved_business": 1, "_id": 0 })
return json.dumps(list(saved), default=str), 200
def addSaved():
load_dotenv()
client = pymongo.MongoClient(os.getenv("MONGO_NORM_USER"))
db = client["customer"]["customers"]
"""
SELECT saved_business FROM customers
where id = XTGNQH10VCVVS9VBPW61218X0M
"""
# id for Tanjiwou
db2 = client["business"]["2020-06-30"]
mongo_data = list(db2.find({}))
r_biz = random.sample(mongo_data, 1)
data = list(db.find({"id": "XTGNQH10VCVVS9VBPW61218X0M"}))
db.update_one({"_id": data[0]["_id"]},
{"$push": {"saved_business": r_biz[0] }})
return {'success': 'saved item successfully created'}, 200
def removeSaved():
load_dotenv()
client = pymongo.MongoClient(os.getenv("MONGO_NORM_USER"))
db = client["customer"]["customers"]
data = list(db.find({"id": "XTGNQH10VCVVS9VBPW61218X0M"}))
db.update_one({"_id": data[0]["_id"]},
{"$pop": {"saved_business": 1}})
return {'success', 'saved item successfully removed'}, 200
# BUSINESSES
def getBusinesses():
load_dotenv()
client = pymongo.MongoClient(os.getenv("MONGO_NORM_USER"))
db = client["business"]
col = db["2020-06-29"]
data = list(col.find({}))
test = {
"trending": get_trending_posts(data, 'n/a', 3),
"recommendations": get_recommended_posts(data, 'n/a', 4),
"hot": get_hot_deals(data, 'n/a')
}
return json.dumps(test, default=str), 200
# CREDITCARD
def getCards():
load_dotenv()
square_client = Client(
access_token=os.getenv("SQUARE_ACCESS_TOKEN"),
environment="sandbox"
)
customers_api = square_client.customers
# id for UwU OwO
result = customers_api.retrieve_customer("J9XYVXV5Z4SZ77PT2NRN72XFQC")
cards = result.body["customer"]["cards"]
return json.dumps(cards, default=str), 200
def addCard():
card = CustomerCardCreation(customer_id="J9XYVXV5Z4SZ77PT2NRN72XFQC")
card.get_customer()
card.gen_body("cnon:CBASEDmLIA4zG9Q1-4VtReIEmzw")
card.add_card_to_square()
card.update_db()
return {'success': 'card successfully created'}, 200
def removeCard():
load_dotenv()
square_client = Client(
access_token=os.getenv("SQUARE_ACCESS_TOKEN"),
environment="sandbox"
)
customers_api = square_client.customers
customer_id = "J9XYVXV5Z4SZ77PT2NRN72XFQC"
card_id = "ccof:8ywRmjQ3OeRVtRjT3GB"
result = customers_api.delete_customer_card(customer_id, card_id)
return {'success': 'card successfully removed'}, 200
# Customer Creation
def createCustomer():
# new one
customer = cc.CustomerCreation(
name="<NAME>",
address="750 Post St, San Francisco, CA 94109",
note="User input user"
)
customer.gen_body()
customer.create_payment_dict()
customer.add_to_db()
return json.dumps(customer.result, default=str), 200
# PAYMENTS
def sendPayment():
# id for Amelia
payment = pc.PaymentCreation(customer_id="11MVK1PCRRVY1D9QB72HVT64PM",
amount=80,
source_id="ccof:cegcXjydMLiVlDQk3GB",
tip=5)
payment.gen_body()
payment.create_payment_dict()
return json.dumps(payment.result, default=str), 200
#if __name__ == "__main__":
#print('Hello World')
#getSaved()
|
[
"discoverpage_metrics.get_recommended_posts",
"random.sample",
"create_customercard.CustomerCardCreation",
"sys.path.insert",
"json.dumps",
"dotenv.load_dotenv",
"discoverpage_metrics.get_trending_posts",
"discoverpage_metrics.get_hot_deals",
"os.getenv"
] |
[((126, 163), 'sys.path.insert', 'sys.path.insert', (['(1)', '"""./discoverpage/"""'], {}), "(1, './discoverpage/')\n", (141, 163), False, 'import sys\n'), ((164, 212), 'sys.path.insert', 'sys.path.insert', (['(2)', '"""./square-api/customer-api/"""'], {}), "(2, './square-api/customer-api/')\n", (179, 212), False, 'import sys\n'), ((213, 261), 'sys.path.insert', 'sys.path.insert', (['(3)', '"""./square-api/payments-api/"""'], {}), "(3, './square-api/payments-api/')\n", (228, 261), False, 'import sys\n'), ((867, 880), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (878, 880), False, 'from dotenv import load_dotenv\n'), ((1293, 1306), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (1304, 1306), False, 'from dotenv import load_dotenv\n'), ((1623, 1651), 'random.sample', 'random.sample', (['mongo_data', '(1)'], {}), '(mongo_data, 1)\n', (1636, 1651), False, 'import random\n'), ((1906, 1919), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (1917, 1919), False, 'from dotenv import load_dotenv\n'), ((2286, 2299), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (2297, 2299), False, 'from dotenv import load_dotenv\n'), ((2706, 2719), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (2717, 2719), False, 'from dotenv import load_dotenv\n'), ((3101, 3163), 'create_customercard.CustomerCardCreation', 'CustomerCardCreation', ([], {'customer_id': '"""J9XYVXV5Z4SZ77PT2NRN72XFQC"""'}), "(customer_id='J9XYVXV5Z4SZ77PT2NRN72XFQC')\n", (3121, 3163), False, 'from create_customercard import CustomerCardCreation\n'), ((3373, 3386), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (3384, 3386), False, 'from dotenv import load_dotenv\n'), ((547, 571), 'json.dumps', 'json.dumps', (['appointments'], {}), '(appointments)\n', (557, 571), False, 'import json\n'), ((728, 749), 'json.dumps', 'json.dumps', (['loyalties'], {}), '(loyalties)\n', (738, 749), False, 'import json\n'), ((914, 942), 'os.getenv', 'os.getenv', (['"""MONGO_NORM_USER"""'], {}), "('MONGO_NORM_USER')\n", (923, 942), False, 'import os\n'), ((1340, 1368), 'os.getenv', 'os.getenv', (['"""MONGO_NORM_USER"""'], {}), "('MONGO_NORM_USER')\n", (1349, 1368), False, 'import os\n'), ((1953, 1981), 'os.getenv', 'os.getenv', (['"""MONGO_NORM_USER"""'], {}), "('MONGO_NORM_USER')\n", (1962, 1981), False, 'import os\n'), ((2333, 2361), 'os.getenv', 'os.getenv', (['"""MONGO_NORM_USER"""'], {}), "('MONGO_NORM_USER')\n", (2342, 2361), False, 'import os\n'), ((2479, 2513), 'discoverpage_metrics.get_trending_posts', 'get_trending_posts', (['data', '"""n/a"""', '(3)'], {}), "(data, 'n/a', 3)\n", (2497, 2513), False, 'from discoverpage_metrics import get_recommended_posts, get_trending_posts, get_hot_deals\n'), ((2540, 2577), 'discoverpage_metrics.get_recommended_posts', 'get_recommended_posts', (['data', '"""n/a"""', '(4)'], {}), "(data, 'n/a', 4)\n", (2561, 2577), False, 'from discoverpage_metrics import get_recommended_posts, get_trending_posts, get_hot_deals\n'), ((2592, 2618), 'discoverpage_metrics.get_hot_deals', 'get_hot_deals', (['data', '"""n/a"""'], {}), "(data, 'n/a')\n", (2605, 2618), False, 'from discoverpage_metrics import get_recommended_posts, get_trending_posts, get_hot_deals\n'), ((2636, 2665), 'json.dumps', 'json.dumps', (['test'], {'default': 'str'}), '(test, default=str)\n', (2646, 2665), False, 'import json\n'), ((3038, 3068), 'json.dumps', 'json.dumps', (['cards'], {'default': 'str'}), '(cards, default=str)\n', (3048, 3068), False, 'import json\n'), ((4067, 4107), 'json.dumps', 'json.dumps', (['customer.result'], {'default': 'str'}), '(customer.result, default=str)\n', (4077, 4107), False, 'import json\n'), ((4466, 4505), 'json.dumps', 'json.dumps', (['payment.result'], {'default': 'str'}), '(payment.result, default=str)\n', (4476, 4505), False, 'import json\n'), ((2770, 2802), 'os.getenv', 'os.getenv', (['"""SQUARE_ACCESS_TOKEN"""'], {}), "('SQUARE_ACCESS_TOKEN')\n", (2779, 2802), False, 'import os\n'), ((3437, 3469), 'os.getenv', 'os.getenv', (['"""SQUARE_ACCESS_TOKEN"""'], {}), "('SQUARE_ACCESS_TOKEN')\n", (3446, 3469), False, 'import os\n')]
|
from . import base
from . import mixins
from datetime import date
class TransformedRecord(
mixins.GenericCompensationMixin,
mixins.GenericDepartmentMixin, mixins.GenericIdentifierMixin,
mixins.GenericJobTitleMixin, mixins.GenericPersonMixin,
mixins.MembershipMixin, mixins.OrganizationMixin, mixins.PostMixin,
mixins.RaceMixin, mixins.LinkMixin, base.BaseTransformedRecord):
MAP = {
'first_name': '<NAME>',
'last_name': '<NAME>',
'department': 'Department',
'job_title': 'Job Title',
'hire_date': 'Start Date',
'compensation': 'Annual Rt',
'status': 'FTE',
'gender': 'Gender',
'nationality': 'Ethnicity',
}
NAME_FIELDS = ('first_name', 'last_name', )
race_map = {
'AMIND': 'American Indian',
'WHITE': 'White',
'HISPA': 'Hispanic',
'ASIAN': 'Asian',
'2+RACE': 'Mixed race',
'PACIF': 'Pacific Islander',
'BLACK': 'Black',
'NSPEC': 'Not specified',
'': 'Not given',
}
# The name of the organization this WILL SHOW UP ON THE SITE, so double check it!
ORGANIZATION_NAME = 'University of Texas at Arlington'
# What type of organization is this? This MUST match what we use on the site, double check against salaries.texastribune.org
ORGANIZATION_CLASSIFICATION = 'University'
# How would you describe the compensation field? We try to respect how they use their system.
description = 'Annual rate'
# When did you receive the data? NOT when we added it to the site.
DATE_PROVIDED = date(2019, 2, 5)
# The URL to find the raw data in our S3 bucket.
URL = ('http://raw.texastribune.org.s3.amazonaws.com/ut_arlington/salaries/2019-02/UTA_HR_TX_TRIBUNE_2019.xlsx')
REJECT_ALL_IF_INVALID_RECORD_EXISTS = False
# This is how the loader checks for valid people. Defaults to checking to see if `last_name` is empty.
@property
def is_valid(self):
# Adjust to return False on invalid fields. For example:
return self.last_name.strip() != ''
@property
def race(self):
return {
'name': self.race_map[self.nationality.strip()]
}
@property
def compensation_type(self):
status = self.get_mapped_value('status')
if float(status) >= 1:
return 'FT'
return 'PT'
@property
def person(self):
name = self.get_name()
r = {
'family_name': name.last,
'given_name': name.first,
'additional_name': name.middle,
'name': unicode(name),
'gender': self.gender.strip()
}
return r
transform = base.transform_factory(TransformedRecord)
|
[
"datetime.date"
] |
[((1621, 1637), 'datetime.date', 'date', (['(2019)', '(2)', '(5)'], {}), '(2019, 2, 5)\n', (1625, 1637), False, 'from datetime import date\n')]
|
from src.TreeNode import TreeNode
from operator import itemgetter
class TreeCrawler:
epsilon = 0.0
root = None
def __init__(self, root):
# print("made TreeNode")
if (not isinstance(root, TreeNode)):
print("WARNING: root is not of type TreeNode")
return
self.root = root
def startMerging(self):
index = 0
# Make a list with the shortest distance of each child-node of root
shortesDistances = sorted([{"distance": n.getFirstDistance(), "Node": n} for n in self.root.children],
key=itemgetter('distance'))
# While the queue is not empty we can merge 2 nodes
while len(shortesDistances) > 1:
print("at index " + str(index) + " num of children in root: " + str(len(shortesDistances)))
# If a node has no more distances stored we recalculate all distances
if not self.root.allChildrenHaveDistances():
self.root.calcDistances()
index += 1
# The node with the shortest distance to another node will be merged first
newNode = TreeNode.mergeNodes(shortesDistances[0]["Node"], self.root)
# Remake the list of shortest distances
shortesDistances = sorted([{"distance": n.getFirstDistance(), "Node": n} for n in self.root.children],
key=itemgetter('distance'))
|
[
"src.TreeNode.TreeNode.mergeNodes",
"operator.itemgetter"
] |
[((1154, 1213), 'src.TreeNode.TreeNode.mergeNodes', 'TreeNode.mergeNodes', (["shortesDistances[0]['Node']", 'self.root'], {}), "(shortesDistances[0]['Node'], self.root)\n", (1173, 1213), False, 'from src.TreeNode import TreeNode\n'), ((608, 630), 'operator.itemgetter', 'itemgetter', (['"""distance"""'], {}), "('distance')\n", (618, 630), False, 'from operator import itemgetter\n'), ((1424, 1446), 'operator.itemgetter', 'itemgetter', (['"""distance"""'], {}), "('distance')\n", (1434, 1446), False, 'from operator import itemgetter\n')]
|
import collections
import fractions
import unittest
import utils
# O(n) time. O(1) space. Two pointers.
class Solution:
def reverseOnlyLetters(self, s):
"""
:type s: str
:rtype: str
"""
s = list(s)
lo = 0
hi = len(s) - 1
while True:
while lo < hi and not s[lo].isalpha():
lo += 1
if lo >= hi:
break
while lo < hi and not s[hi].isalpha():
hi -= 1
if lo >= hi:
break
s[lo], s[hi] = s[hi], s[lo]
lo += 1
hi -= 1
return ''.join(s)
class Test(unittest.TestCase):
def test(self):
cases = utils.load_test_json(__file__).test_cases
for case in cases:
args = str(case.args)
actual = Solution().reverseOnlyLetters(**case.args.__dict__)
self.assertEqual(case.expected, actual, msg=args)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"utils.load_test_json"
] |
[((998, 1013), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1011, 1013), False, 'import unittest\n'), ((726, 756), 'utils.load_test_json', 'utils.load_test_json', (['__file__'], {}), '(__file__)\n', (746, 756), False, 'import utils\n')]
|
import os
PROJECT_ROOT_DIRECTORY = '/'.join(os.path.dirname(__file__).split(os.sep)[:-1])
# If you didn't install TestEngine in this project's root folder, change this to the path of your .dll folder
TEST_ENGINE_DIRECTORY = '{0}/Neo.TestEngine'.format(PROJECT_ROOT_DIRECTORY)
|
[
"os.path.dirname"
] |
[((45, 70), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (60, 70), False, 'import os\n')]
|
"""
Given an undirected graph with n vertices and m edges, check whether it is bipartite.
An undirected graph is called bipartite if its vertices can be split into two parts such that each edge of the
graph joins to vertices from different parts. Bipartite graphs arise naturally in applications where a graph
is used to model connections between objects of two different types (say, boys and girls; or students and
dormitories).
"""
from collections import deque
def is_bipartite(g):
"""
Returns True if graph is bipartite
"""
def __switch_color(color):
if color == 1:
return 2
if color == 2:
return 1
raise ValueError()
color = {}
for x in g.get_vertices():
color[x] = -1
not_visited = set(g.get_vertices())
def __next_not_colored_vertex():
return next(iter(not_visited)) if not_visited else None
while True:
s = __next_not_colored_vertex()
if s is None:
break
color[s] = 1
q = deque()
q.append(s)
while q:
x = q.popleft()
not_visited.remove(x)
for e in g.get_edges(x):
if color[e.end] < 0:
color[e.end] = __switch_color(color[x])
q.append(e.end)
elif color[e.end] != __switch_color(color[x]):
return False
return True
|
[
"collections.deque"
] |
[((1031, 1038), 'collections.deque', 'deque', ([], {}), '()\n', (1036, 1038), False, 'from collections import deque\n')]
|
from flask import Blueprint, request, g
from .. import api
inference_api = Blueprint('inference_api', __name__)
@inference_api.route('/v1/object_detection', methods=['POST'])
def get_object_detection_prediction():
confidence_thresh = request.json.get('confidence_threshold', 0.5)
attr_thresh = request.json.get('attributer_threshold', 0.5)
request_id = request.json.get("request_id")
if request_id:
g.request_id = request_id
image = request.json.get('image', {})
image_b64, image_url = None, None
if 'b64' in image:
image_b64 = image.get("b64")
if 'url' in image:
image_url = image.get("url")
if not image_b64 and not image_url:
raise ValueError("Image url or base64 should be provided")
model = request.json.get('model', None)
cls_model_name = request.json.get('cls_model_name', None)
attr_model_name = request.json.get('attr_model_name', None)
predictions = api.inference.get_object_detection_prediction(
model,
image_b64=image_b64,
image_url=image_url,
confidence_thresh=confidence_thresh,
attr_thresh=attr_thresh,
cls_model_name=cls_model_name,
attr_model_name=attr_model_name,
)
results = {"predictions": predictions}
if request_id:
results["request_id"] = request_id
return api.base.get_json_response(results)
@inference_api.route("/v1/image_tagger", methods=["POST"])
def get_image_tagger_prediction():
confidence_thresh = request.json.get("confidence_threshold", 0.5)
request_id = request.json.get("request_id")
if request_id:
g.request_id = request_id
image = request.json.get("image", {})
image_b64, image_url = None, None
if "b64" in image:
image_b64 = image.get("b64")
if "url" in image:
image_url = image.get("url")
if not image_b64 and not image_url:
raise ValueError("Image url or base64 should be provided")
model = request.json.get("model", None)
predictions = api.inference.get_image_tagger_prediction(
model,
image_b64=image_b64,
image_url=image_url,
confidence_thresh=confidence_thresh,
)
results = {"predictions": predictions}
if request_id:
results["request_id"] = request_id
return api.base.get_json_response(results)
@inference_api.route("/v1/semantic_segmentor", methods=["POST"])
def get_semantic_segmentor_prediction():
request_id = request.json.get("request_id")
if request_id:
g.request_id = request_id
image = request.json.get("image", {})
image_b64, image_url = None, None
if "b64" in image:
image_b64 = image.get("b64")
if "url" in image:
image_url = image.get("url")
if not image_b64 and not image_url:
raise ValueError("Image url or base64 should be provided")
model = request.json.get("model", None)
predictions = api.inference.get_semantic_segmentor_prediction(
model,
image_b64=image_b64,
image_url=image_url,
)
results = {"predictions": predictions}
if request_id:
results["request_id"] = request_id
return api.base.get_json_response(results)
|
[
"flask.request.json.get",
"flask.Blueprint"
] |
[((77, 113), 'flask.Blueprint', 'Blueprint', (['"""inference_api"""', '__name__'], {}), "('inference_api', __name__)\n", (86, 113), False, 'from flask import Blueprint, request, g\n'), ((242, 287), 'flask.request.json.get', 'request.json.get', (['"""confidence_threshold"""', '(0.5)'], {}), "('confidence_threshold', 0.5)\n", (258, 287), False, 'from flask import Blueprint, request, g\n'), ((306, 351), 'flask.request.json.get', 'request.json.get', (['"""attributer_threshold"""', '(0.5)'], {}), "('attributer_threshold', 0.5)\n", (322, 351), False, 'from flask import Blueprint, request, g\n'), ((369, 399), 'flask.request.json.get', 'request.json.get', (['"""request_id"""'], {}), "('request_id')\n", (385, 399), False, 'from flask import Blueprint, request, g\n'), ((465, 494), 'flask.request.json.get', 'request.json.get', (['"""image"""', '{}'], {}), "('image', {})\n", (481, 494), False, 'from flask import Blueprint, request, g\n'), ((772, 803), 'flask.request.json.get', 'request.json.get', (['"""model"""', 'None'], {}), "('model', None)\n", (788, 803), False, 'from flask import Blueprint, request, g\n'), ((825, 865), 'flask.request.json.get', 'request.json.get', (['"""cls_model_name"""', 'None'], {}), "('cls_model_name', None)\n", (841, 865), False, 'from flask import Blueprint, request, g\n'), ((888, 929), 'flask.request.json.get', 'request.json.get', (['"""attr_model_name"""', 'None'], {}), "('attr_model_name', None)\n", (904, 929), False, 'from flask import Blueprint, request, g\n'), ((1504, 1549), 'flask.request.json.get', 'request.json.get', (['"""confidence_threshold"""', '(0.5)'], {}), "('confidence_threshold', 0.5)\n", (1520, 1549), False, 'from flask import Blueprint, request, g\n'), ((1567, 1597), 'flask.request.json.get', 'request.json.get', (['"""request_id"""'], {}), "('request_id')\n", (1583, 1597), False, 'from flask import Blueprint, request, g\n'), ((1663, 1692), 'flask.request.json.get', 'request.json.get', (['"""image"""', '{}'], {}), "('image', {})\n", (1679, 1692), False, 'from flask import Blueprint, request, g\n'), ((1970, 2001), 'flask.request.json.get', 'request.json.get', (['"""model"""', 'None'], {}), "('model', None)\n", (1986, 2001), False, 'from flask import Blueprint, request, g\n'), ((2464, 2494), 'flask.request.json.get', 'request.json.get', (['"""request_id"""'], {}), "('request_id')\n", (2480, 2494), False, 'from flask import Blueprint, request, g\n'), ((2560, 2589), 'flask.request.json.get', 'request.json.get', (['"""image"""', '{}'], {}), "('image', {})\n", (2576, 2589), False, 'from flask import Blueprint, request, g\n'), ((2867, 2898), 'flask.request.json.get', 'request.json.get', (['"""model"""', 'None'], {}), "('model', None)\n", (2883, 2898), False, 'from flask import Blueprint, request, g\n')]
|
import numpy as np
class NeuralNetwork():
def __init__(self):
# DO NOT CHANGE PARAMETERS
self.input_to_hidden_weights = np.matrix('1 1; 1 1; 1 1')
self.hidden_to_output_weights = np.matrix('1 1 1')
self.biases = np.matrix('0; 0; 0')
self.learning_rate = .001
self.epochs_to_train = 10
self.training_points = [((2, 1), 10), ((3, 3), 21), ((4, 5), 32), ((6, 6), 42)]
self.testing_points = [(1,1), (2,2), (3,3), (5,5), (10,10)]
def train(self, x1, x2, y):
def rectified_linear_unit(x):
return np.maximum(0, x)
def rectified_linear_unit_derivative(x):
if x>0:
return 1
else:
return 0
### Forward propagation ###
input_values = np.matrix([[x1],[x2]]) # 2 by 1
input_values = np.array([[x1],[x2]]).astype('float64')
##Converting to numpy array
self_input_to_hidden_weights = np.array(self.input_to_hidden_weights).astype('float64')
self_hidden_to_output_weights = np.array(self.hidden_to_output_weights).astype('float64')
self_biases = np.array(self.biases).astype('float64')
#Activation functions
relu= np.vectorize(rectified_linear_unit)
relu_dev = np.vectorize(rectified_linear_unit_derivative)
# Calculate the input and activation of the hidden layer
hidden_layer_weighted_input = np.dot(self_input_to_hidden_weights, input_values) + self_biases # TODO (3 by 1 matrix)
hidden_layer_activation = relu(hidden_layer_weighted_input) # TODO (3 by 1 matrix)
hidden_to_output= np.dot(self_hidden_to_output_weights, hidden_layer_activation) # TODO 1x1
output = hidden_to_output # TODO 1x1
activated_output = relu(hidden_to_output) # TODO 1X1
### Backpropagation ###
# Compute gradients
output_layer_error = (y- activated_output) * relu_dev(hidden_to_output)
hidden_layer_error = output_layer_error * relu_dev(hidden_layer_weighted_input) # TODO (3 by 1 matrix)
bias_gradients = (y- activated_output)* relu_dev(hidden_layer_weighted_input)
hidden_to_output_weight_gradients = output * (y- activated_output)* relu_dev(activated_output)
input_to_hidden_weight_gradients = (np.dot(np.dot(relu_dev(hidden_layer_weighted_input), input_values.T).T, relu_dev(self_hidden_to_output_weights.T)) * relu_dev(hidden_to_output) * (y- activated_output)).T
# Use gradients to adjust weights and biases using gradient descent
new_biases = self_biases - (self.learning_rate*bias_gradients)
new_input_to_hidden_weights = self_input_to_hidden_weights - (self.learning_rate*input_to_hidden_weight_gradients)
new_hidden_to_output_weights = self_hidden_to_output_weights - (self.learning_rate*hidden_to_output_weight_gradients)
##This goes at the end to convert back the matrices
#And at the end, I convert back after calculating the new biases and weights:
self.biases = np.matrix(new_biases)
self.input_to_hidden_weights = np.matrix(new_input_to_hidden_weights)
self.hidden_to_output_weights = np.matrix(new_hidden_to_output_weights)
# Run this to train your neural network once you complete the train method
def train_neural_network(self):
print('Training pairs: ', self.training_points)
print('Starting params: ')
print('')
print('(Input --> Hidden Layer) Weights: ', self.input_to_hidden_weights)
print('(Hidden --> Output Layer) Weights: ', self.hidden_to_output_weights)
print('Biases: ', self.biases)
for epoch in range(self.epochs_to_train):
print('')
print('Epoch ', epoch)
for x,y in self.training_points:
self.train(x[0], x[1], y)
print('(Input --> Hidden Layer) Weights: ', self.input_to_hidden_weights)
print('(Hidden --> Output Layer) Weights: ', self.hidden_to_output_weights)
print('Biases: ', self.biases)
##Testing the Neural Network
x = NeuralNetwork()
x.train_neural_network()
|
[
"numpy.matrix",
"numpy.vectorize",
"numpy.maximum",
"numpy.array",
"numpy.dot"
] |
[((142, 168), 'numpy.matrix', 'np.matrix', (['"""1 1; 1 1; 1 1"""'], {}), "('1 1; 1 1; 1 1')\n", (151, 168), True, 'import numpy as np\n'), ((209, 227), 'numpy.matrix', 'np.matrix', (['"""1 1 1"""'], {}), "('1 1 1')\n", (218, 227), True, 'import numpy as np\n'), ((250, 270), 'numpy.matrix', 'np.matrix', (['"""0; 0; 0"""'], {}), "('0; 0; 0')\n", (259, 270), True, 'import numpy as np\n'), ((821, 844), 'numpy.matrix', 'np.matrix', (['[[x1], [x2]]'], {}), '([[x1], [x2]])\n', (830, 844), True, 'import numpy as np\n'), ((1253, 1288), 'numpy.vectorize', 'np.vectorize', (['rectified_linear_unit'], {}), '(rectified_linear_unit)\n', (1265, 1288), True, 'import numpy as np\n'), ((1308, 1354), 'numpy.vectorize', 'np.vectorize', (['rectified_linear_unit_derivative'], {}), '(rectified_linear_unit_derivative)\n', (1320, 1354), True, 'import numpy as np\n'), ((1673, 1735), 'numpy.dot', 'np.dot', (['self_hidden_to_output_weights', 'hidden_layer_activation'], {}), '(self_hidden_to_output_weights, hidden_layer_activation)\n', (1679, 1735), True, 'import numpy as np\n'), ((3090, 3111), 'numpy.matrix', 'np.matrix', (['new_biases'], {}), '(new_biases)\n', (3099, 3111), True, 'import numpy as np\n'), ((3151, 3189), 'numpy.matrix', 'np.matrix', (['new_input_to_hidden_weights'], {}), '(new_input_to_hidden_weights)\n', (3160, 3189), True, 'import numpy as np\n'), ((3231, 3270), 'numpy.matrix', 'np.matrix', (['new_hidden_to_output_weights'], {}), '(new_hidden_to_output_weights)\n', (3240, 3270), True, 'import numpy as np\n'), ((593, 609), 'numpy.maximum', 'np.maximum', (['(0)', 'x'], {}), '(0, x)\n', (603, 609), True, 'import numpy as np\n'), ((1467, 1517), 'numpy.dot', 'np.dot', (['self_input_to_hidden_weights', 'input_values'], {}), '(self_input_to_hidden_weights, input_values)\n', (1473, 1517), True, 'import numpy as np\n'), ((876, 898), 'numpy.array', 'np.array', (['[[x1], [x2]]'], {}), '([[x1], [x2]])\n', (884, 898), True, 'import numpy as np\n'), ((992, 1030), 'numpy.array', 'np.array', (['self.input_to_hidden_weights'], {}), '(self.input_to_hidden_weights)\n', (1000, 1030), True, 'import numpy as np\n'), ((1089, 1128), 'numpy.array', 'np.array', (['self.hidden_to_output_weights'], {}), '(self.hidden_to_output_weights)\n', (1097, 1128), True, 'import numpy as np\n'), ((1169, 1190), 'numpy.array', 'np.array', (['self.biases'], {}), '(self.biases)\n', (1177, 1190), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
blogger
-------
A personal blog framework with flask and sqlite3
:copyright: (c) 2017 by cjhang.
:license: MIT, see LICENSE for details.
"""
import os
import sqlite3
import codecs
from datetime import date
import markdown
from flask import Flask, request, g, redirect, url_for, render_template,\
flash, send_from_directory
# create the application
app = Flask(__name__)
# configuration
DATABASE = os.path.join(app.root_path, 'blogger.db')
BLOGS = os.path.join(app.root_path, 'blogs')
DEBUG = True
SECRET_KEY='<KEY>'
PER_PAGE = 10
app.config.from_object(__name__)
app.config.from_envvar('BLOGGER_SETTINGS', silent=True)
extensions = ['markdown.extensions.meta',
'markdown.extensions.toc', 'pymdownx.githubemoji', 'pymdownx.tasklist',
'markdown.extensions.extra', 'markdown.extensions.codehilite']
md = markdown.Markdown(extensions = extensions);
def connect_db():
rv = sqlite3.connect(app.config['DATABASE'])
rv.row_factory = sqlite3.Row
return rv
def get_db():
if not hasattr(g, 'sqlite_db'):
g.sqlite_db = connect_db()
return g.sqlite_db
@app.teardown_appcontext
def close_db(error):
if hasattr(g, 'sqlite_db'):
g.sqlite_db.close()
def init_db():
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
def query_db(query, args=(), method='r'):
'''
Queries the database and returns a list of dictionaries.
'''
db = get_db()
cur = db.execute(query, args)
if method == 'w':
db.commit()
return 0
elif method == 'r':
rv = cur.fetchall()
return rv
def readMarkdown(filename):
with codecs.open(filename, encoding='utf-8') as htmlSource:
body = md.convert(htmlSource.read())
toc = md.toc
meta = md.Meta
name = meta['name'][0]
title_zh = meta['title_zh'][0]
author = meta['author'][0] # TODO multi author surpport
# try:
# release = str(datetime.strftime(datetime.strptime(meta['release'][0],
# '%Y-%m-%d'),'%Y年%m月%d日'))
# except:
# release = ""
# try:
# revise = str(datetime.strftime(datetime.strptime(meta['revise'][0],
# '%Y-%m-%d'),'%Y年%m月%d日'))
# except:
# revise = ""
release = meta['release'][0]
revise = meta['revise'][0]
tags = meta['tags'][0] # TODO: multi tag surpport, should modify database
abstract = meta['abstract'][0]
return name, title_zh, author, release, revise, tags, abstract, toc, body
@app.cli.command('initdb')
def initdb_command():
init_db()
print('Initialized the database')
@app.cli.command('updateblog')
def updatedb_command():
blogs_fs = os.listdir(BLOGS)
blogs_db = query_db('select name, revise from blogs')
blogs_name_db = []
for item in blogs_db:
blogs_name_db.append(item['name'])
for item in blogs_fs:
print(os.path.join(BLOGS, item, item+'.blog'))
name, title_zh, author, release, revise, tags, abstract, toc,content \
= readMarkdown(os.path.join(BLOGS, item, item+'.blog'))
if name in blogs_name_db:
query_db('''update blogs set revise = ?, abstract = ?, toc = ?,
content = ? where name = ? and revise < ?''',
[revise, abstract, toc, content, name, revise],
method='w')
else:
# create in blogs table
query_db('''insert into blogs (name, title_zh, author, release,
revise, tags, abstract, toc, content)
values (?, ?, ?, ?, ?, ?, ?, ?, ?)''',[name, title_zh,
author, release, revise, tags, abstract, toc, content],
method='w')
@app.route('/')
def show_updates():
blogs=query_db('''
select name, title_zh, author, revise, tags, abstract from blogs
where release != "" order by revise desc limit 30''')
return render_template('update.html', blogs=blogs)
@app.route('/blogs/<blogname>', methods=['GET', 'POST'])
def show_article(blogname):
if request.method == 'GET':
article = query_db('''
select title_zh, author, release, revise, toc, content
from blogs where name = ?''', [blogname])
comments = query_db('''
select user_name, comment_date, comment_detail from comments
where blog_name = ?''', [blogname])
return render_template('article.html', article = article,
comments = comments)
elif request.method == 'POST':
comment_date = date.today().isoformat()
query_db('''
insert into comments(blog_name, user_name, comment_date,
contact_detail ,comment_detail) values(?, ?, ?, ?, ?)''',
[blogname, request.form['user_name'], comment_date,
request.form['contact_detail'],
request.form['comment_detail']], method = 'w')
flash("Comments was successfully posted")
return redirect(url_for('show_article', blogname = blogname))
else:
print('Unsurpported request!')
return 0
@app.route('/images/<path:filename>')
def get_image(filename):
# blog_name = request.args.get('name')
# pic_name = request.args.get('pic')
# file_type = request.args.get('type')
# file_name = blog_name+'/'+pic_name+'.'+file_type
# return send_file(os.path.join(BLOGS, filename), mimetype='image/'+'file_type')
return send_from_directory(BLOGS, filename)
@app.route('/add', methods=['GET', 'POST'])
# Currently, the add post only used for test
def add_blog():
if request.method == 'POST':
content = md.convert(request.form['content'])
toc = md.toc
query_db('''insert into blogs (name, title_zh, author, release,
tags, abstract, toc, content)
values (?, ?, ?, ?, ?, ?, ?, ?)''',
[request.form['name'], request.form['title_zh'],
request.form['author'], request.form['release'],
request.form['tags'], request.form['abstract'],
toc, content], method='w')
flash('Blog was successfully posted')
return redirect(url_for('show_updates'))
return render_template('add.html')
if __name__ == '__main__':
app.run()
|
[
"flask.flash",
"codecs.open",
"flask.Flask",
"datetime.date.today",
"flask.url_for",
"sqlite3.connect",
"flask.render_template",
"markdown.Markdown",
"flask.send_from_directory",
"flask.g.sqlite_db.close",
"os.path.join",
"os.listdir"
] |
[((414, 429), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (419, 429), False, 'from flask import Flask, request, g, redirect, url_for, render_template, flash, send_from_directory\n'), ((458, 499), 'os.path.join', 'os.path.join', (['app.root_path', '"""blogger.db"""'], {}), "(app.root_path, 'blogger.db')\n", (470, 499), False, 'import os\n'), ((508, 544), 'os.path.join', 'os.path.join', (['app.root_path', '"""blogs"""'], {}), "(app.root_path, 'blogs')\n", (520, 544), False, 'import os\n'), ((872, 912), 'markdown.Markdown', 'markdown.Markdown', ([], {'extensions': 'extensions'}), '(extensions=extensions)\n', (889, 912), False, 'import markdown\n'), ((945, 984), 'sqlite3.connect', 'sqlite3.connect', (["app.config['DATABASE']"], {}), "(app.config['DATABASE'])\n", (960, 984), False, 'import sqlite3\n'), ((2759, 2776), 'os.listdir', 'os.listdir', (['BLOGS'], {}), '(BLOGS)\n', (2769, 2776), False, 'import os\n'), ((4020, 4063), 'flask.render_template', 'render_template', (['"""update.html"""'], {'blogs': 'blogs'}), "('update.html', blogs=blogs)\n", (4035, 4063), False, 'from flask import Flask, request, g, redirect, url_for, render_template, flash, send_from_directory\n'), ((5555, 5591), 'flask.send_from_directory', 'send_from_directory', (['BLOGS', 'filename'], {}), '(BLOGS, filename)\n', (5574, 5591), False, 'from flask import Flask, request, g, redirect, url_for, render_template, flash, send_from_directory\n'), ((6327, 6354), 'flask.render_template', 'render_template', (['"""add.html"""'], {}), "('add.html')\n", (6342, 6354), False, 'from flask import Flask, request, g, redirect, url_for, render_template, flash, send_from_directory\n'), ((1228, 1247), 'flask.g.sqlite_db.close', 'g.sqlite_db.close', ([], {}), '()\n', (1245, 1247), False, 'from flask import Flask, request, g, redirect, url_for, render_template, flash, send_from_directory\n'), ((1739, 1778), 'codecs.open', 'codecs.open', (['filename'], {'encoding': '"""utf-8"""'}), "(filename, encoding='utf-8')\n", (1750, 1778), False, 'import codecs\n'), ((4503, 4570), 'flask.render_template', 'render_template', (['"""article.html"""'], {'article': 'article', 'comments': 'comments'}), "('article.html', article=article, comments=comments)\n", (4518, 4570), False, 'from flask import Flask, request, g, redirect, url_for, render_template, flash, send_from_directory\n'), ((6229, 6266), 'flask.flash', 'flash', (['"""Blog was successfully posted"""'], {}), "('Blog was successfully posted')\n", (6234, 6266), False, 'from flask import Flask, request, g, redirect, url_for, render_template, flash, send_from_directory\n'), ((2965, 3006), 'os.path.join', 'os.path.join', (['BLOGS', 'item', "(item + '.blog')"], {}), "(BLOGS, item, item + '.blog')\n", (2977, 3006), False, 'import os\n'), ((3108, 3149), 'os.path.join', 'os.path.join', (['BLOGS', 'item', "(item + '.blog')"], {}), "(BLOGS, item, item + '.blog')\n", (3120, 3149), False, 'import os\n'), ((5034, 5075), 'flask.flash', 'flash', (['"""Comments was successfully posted"""'], {}), "('Comments was successfully posted')\n", (5039, 5075), False, 'from flask import Flask, request, g, redirect, url_for, render_template, flash, send_from_directory\n'), ((6291, 6314), 'flask.url_for', 'url_for', (['"""show_updates"""'], {}), "('show_updates')\n", (6298, 6314), False, 'from flask import Flask, request, g, redirect, url_for, render_template, flash, send_from_directory\n'), ((5100, 5142), 'flask.url_for', 'url_for', (['"""show_article"""'], {'blogname': 'blogname'}), "('show_article', blogname=blogname)\n", (5107, 5142), False, 'from flask import Flask, request, g, redirect, url_for, render_template, flash, send_from_directory\n'), ((4650, 4662), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4660, 4662), False, 'from datetime import date\n')]
|
from django.utils.text import slugify
from faker import Faker
import factory
from factory.django import DjangoModelFactory
from video.models import Videos
from areas.tests.faker_data import CityTownFactory
import random
fake = Faker()
class Climbing_Name_Random():
climbs = ['The Pocket', 'Ground Zero', 'Samurai', 'The Engineer',
'Footless Traverse', 'Beam Me Up', 'Jaws', 'Bricklayer']
num_climbs = len(climbs) - 1
random_num = random.randint(0, num_climbs)
climb_name = climbs[random_num]
class VideoFactory(DjangoModelFactory):
class Meta:
model = Videos
name = Climbing_Name_Random.climb_name
city = factory.SubFactory(CityTownFactory)
author = factory.Faker('user_name')
thumbnail = factory.Faker('image_url')
description = factory.Faker('sentence')
slug = slugify(name)
|
[
"factory.Faker",
"random.randint",
"faker.Faker",
"factory.SubFactory",
"django.utils.text.slugify"
] |
[((232, 239), 'faker.Faker', 'Faker', ([], {}), '()\n', (237, 239), False, 'from faker import Faker\n'), ((463, 492), 'random.randint', 'random.randint', (['(0)', 'num_climbs'], {}), '(0, num_climbs)\n', (477, 492), False, 'import random\n'), ((665, 700), 'factory.SubFactory', 'factory.SubFactory', (['CityTownFactory'], {}), '(CityTownFactory)\n', (683, 700), False, 'import factory\n'), ((714, 740), 'factory.Faker', 'factory.Faker', (['"""user_name"""'], {}), "('user_name')\n", (727, 740), False, 'import factory\n'), ((757, 783), 'factory.Faker', 'factory.Faker', (['"""image_url"""'], {}), "('image_url')\n", (770, 783), False, 'import factory\n'), ((802, 827), 'factory.Faker', 'factory.Faker', (['"""sentence"""'], {}), "('sentence')\n", (815, 827), False, 'import factory\n'), ((839, 852), 'django.utils.text.slugify', 'slugify', (['name'], {}), '(name)\n', (846, 852), False, 'from django.utils.text import slugify\n')]
|
from networkx import DiGraph
from tests.data.data_registry import PATH_INFO_GRAPH, PATH_INFO_GRAPH_OUTPUT
from thucydides.datasets.info_graph_dataset import InfoGraphDataSet
def test_info_graph_dataset_save() -> None:
if PATH_INFO_GRAPH_OUTPUT.is_file():
PATH_INFO_GRAPH_OUTPUT.unlink()
nx_g = DiGraph()
nx_g.add_nodes_from([(0, {"dummy_attr_1": "dummy_value"})])
info_graph_dataset = InfoGraphDataSet(filepath=PATH_INFO_GRAPH_OUTPUT)
info_graph_dataset.save(nx_g=nx_g)
assert PATH_INFO_GRAPH_OUTPUT.is_file()
def test_info_graph_dataset_load() -> None:
info_graph_dataset = InfoGraphDataSet(filepath=PATH_INFO_GRAPH)
info_graph = info_graph_dataset.load()
assert info_graph.number_of_nodes() > 0
|
[
"networkx.DiGraph",
"tests.data.data_registry.PATH_INFO_GRAPH_OUTPUT.is_file",
"tests.data.data_registry.PATH_INFO_GRAPH_OUTPUT.unlink",
"thucydides.datasets.info_graph_dataset.InfoGraphDataSet"
] |
[((228, 260), 'tests.data.data_registry.PATH_INFO_GRAPH_OUTPUT.is_file', 'PATH_INFO_GRAPH_OUTPUT.is_file', ([], {}), '()\n', (258, 260), False, 'from tests.data.data_registry import PATH_INFO_GRAPH, PATH_INFO_GRAPH_OUTPUT\n'), ((314, 323), 'networkx.DiGraph', 'DiGraph', ([], {}), '()\n', (321, 323), False, 'from networkx import DiGraph\n'), ((414, 463), 'thucydides.datasets.info_graph_dataset.InfoGraphDataSet', 'InfoGraphDataSet', ([], {'filepath': 'PATH_INFO_GRAPH_OUTPUT'}), '(filepath=PATH_INFO_GRAPH_OUTPUT)\n', (430, 463), False, 'from thucydides.datasets.info_graph_dataset import InfoGraphDataSet\n'), ((515, 547), 'tests.data.data_registry.PATH_INFO_GRAPH_OUTPUT.is_file', 'PATH_INFO_GRAPH_OUTPUT.is_file', ([], {}), '()\n', (545, 547), False, 'from tests.data.data_registry import PATH_INFO_GRAPH, PATH_INFO_GRAPH_OUTPUT\n'), ((619, 661), 'thucydides.datasets.info_graph_dataset.InfoGraphDataSet', 'InfoGraphDataSet', ([], {'filepath': 'PATH_INFO_GRAPH'}), '(filepath=PATH_INFO_GRAPH)\n', (635, 661), False, 'from thucydides.datasets.info_graph_dataset import InfoGraphDataSet\n'), ((270, 301), 'tests.data.data_registry.PATH_INFO_GRAPH_OUTPUT.unlink', 'PATH_INFO_GRAPH_OUTPUT.unlink', ([], {}), '()\n', (299, 301), False, 'from tests.data.data_registry import PATH_INFO_GRAPH, PATH_INFO_GRAPH_OUTPUT\n')]
|
import pytest
from tekdrive import TekDrive
from tekdrive.exceptions import (
ClientException,
TekDriveAPIException,
FileGoneAPIException,
)
from .base import UnitTest
class TestTekDrive(UnitTest):
FAKE_KEY = "abc123"
def test_access_key_required(self):
with pytest.raises(ClientException) as e:
TekDrive(access_key=None)
assert str(e.value).startswith("Missing required attribute 'access_key'.")
def test_custom_base_url(self):
base_url = "https://drive.dev-api.tekcloud.com"
td = TekDrive(
access_key="abc123",
base_url=base_url,
)
assert td._session._request_wrapper.base_url == base_url
class TestParser(UnitTest):
def test_parser_no_content(self):
data = None
assert self.tekdrive._parser.parse(data) is None
def test_parse_error_no_error_code(self):
for data in ({}, {"a": 1}):
assert self.tekdrive._parser.parse_error(data, headers=None) is None
def test_parse_error_has_error_code(self):
data = {"errorCode": "SOME_ERROR_CODE"}
error = self.tekdrive._parser.parse_error(data, headers=None)
assert isinstance(error, TekDriveAPIException)
def test_parse_error_maps_correctly(self):
data = {"errorCode": "FILE_GONE", "message": "File is in the trash."}
error = self.tekdrive._parser.parse_error(data, headers=None)
assert isinstance(error, FileGoneAPIException)
|
[
"pytest.raises",
"tekdrive.TekDrive"
] |
[((556, 604), 'tekdrive.TekDrive', 'TekDrive', ([], {'access_key': '"""abc123"""', 'base_url': 'base_url'}), "(access_key='abc123', base_url=base_url)\n", (564, 604), False, 'from tekdrive import TekDrive\n'), ((292, 322), 'pytest.raises', 'pytest.raises', (['ClientException'], {}), '(ClientException)\n', (305, 322), False, 'import pytest\n'), ((341, 366), 'tekdrive.TekDrive', 'TekDrive', ([], {'access_key': 'None'}), '(access_key=None)\n', (349, 366), False, 'from tekdrive import TekDrive\n')]
|
from typing import Any, Dict, Optional
from apii.machine_calls import Caller, MachineCalls
class SDICalls:
"""
SDI_Calls class is the first link in the chain or responsibility. Retrieving calls from the APII, it either prepares
those calls for the Caller or passes it to the Machine_Calls class. Calls prepared here change the SDI settings.
"""
def __init__(self, caller: Caller) -> None:
"""
Sets and passes the Caller reference, as well as defines the API calls that this class is responsible for.
"""
self.__caller = caller
self.__machine_calls = MachineCalls(caller)
self.__known_calls = ["storage_general", "get_users", "get_sdis", "create_sdi", "edit_sdi", "run_sdi", "configure_sdi"]
def get_sdi_id(self) -> str:
return self.__machine_calls.get_sdi_id()
def make_call(self, api_call: str, args: Dict[str, Any] = {}) -> Optional[Any]:
"""
The make_call method, called by the APII, prepares arguments for the Caller. If the API call needed is to be
handled here, the argument list is converted from a single dict to two dictionaries, for URL extensions and
API arguments. These are then passed to the caller for API interaction. If an SDI is created, the id is saved in
the Machine_Calls class. If the call is not known by this class, it is passed to the next class in the chain.
"""
if api_call in self.__known_calls:
extension_dict = dict()
other_dict = dict()
for arg, val in args.items():
if arg == "user_pk" or arg == "sdi_id": # URL extensions only need these two params.
extension_dict[arg] = val
else: # Everything else passed as arguments.
other_dict[arg] = val
response = self.__caller.make_call(api_call, extension_dict, other_dict)
if api_call == "create_sdi" and response is not None:
self.__machine_calls.set_sdi_id(response["sdi_id"])
return response
else:
return self.__machine_calls.make_call(api_call, args)
|
[
"apii.machine_calls.MachineCalls"
] |
[((614, 634), 'apii.machine_calls.MachineCalls', 'MachineCalls', (['caller'], {}), '(caller)\n', (626, 634), False, 'from apii.machine_calls import Caller, MachineCalls\n')]
|
#!/usr/bin/env python3
import itertools
import multiprocessing as mp
def isValid0(l):
return sum(l[0:3]) == 38 and sum(l[3:7]) == 38 and sum(l[7:12]) == 38 and sum(l[12:16]) == 38 and sum(l[16:19]) == 38
def rotate(it, indices):
return map(lambda index: it[index], indices)
rotation1 = [2, 6, 11, 1, 5, 10, 15, 0, 4, 9, 14, 18, 3, 8, 13, 17, 7, 12, 16]
rotation2 = [11, 15, 18, 6, 10, 14, 17, 2, 5, 9, 13, 16, 1, 4, 8, 12, 0, 3, 7]
def isValid(l):
return isValid0(l) and isValid0(list(rotate(l, rotation1))) and isValid0(list(rotate(l, rotation2)))
def validOrEmpty(l):
if isValid(l):
return l
else:
return []
if __name__ == '__main__':
numbers = range(1, 20)
# result = next(filter(isValid, itertools.permutations(numbers)))
# print(result)
with mp.Pool(mp.cpu_count()) as pool:
result = list(filter(lambda l: len(l) > 0, pool.imap(validOrEmpty, itertools.permutations(numbers), 10000)))
for r in result:
print(r)
|
[
"itertools.permutations",
"multiprocessing.cpu_count"
] |
[((814, 828), 'multiprocessing.cpu_count', 'mp.cpu_count', ([], {}), '()\n', (826, 828), True, 'import multiprocessing as mp\n'), ((914, 945), 'itertools.permutations', 'itertools.permutations', (['numbers'], {}), '(numbers)\n', (936, 945), False, 'import itertools\n')]
|
from matplotlib import pyplot as plt
import networkx as nx
graph = nx.Graph()
with open('model.out') as f:
for line in f:
line = line.split()
if line:
graph.add_edge(int(line[0]), int(line[1]))
# end if
# end for
# end with
with open('match.out') as f:
matched = [int(i) for i in f.read().split()]
# end with
not_matched = [v for v in graph.nodes() if v not in matched]
pos = nx.shell_layout(graph)
plt.xticks([]), plt.yticks([])
nx.draw_networkx_nodes(graph, pos, nodelist=not_matched,
alpha=0.4, node_color='blue', node_size=1300)
nx.draw_networkx_nodes(graph, pos, nodelist=matched,
alpha=0.8, node_color='red', node_size=1300)
nx.draw_networkx_labels(graph, pos, font_family='sans-serif', font_size=15)
nx.draw_networkx_edges(graph, pos, width=1.5, alpha=0.5)
plt.show()
|
[
"matplotlib.pyplot.show",
"networkx.draw_networkx_edges",
"matplotlib.pyplot.yticks",
"networkx.draw_networkx_nodes",
"networkx.Graph",
"networkx.draw_networkx_labels",
"matplotlib.pyplot.xticks",
"networkx.shell_layout"
] |
[((68, 78), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (76, 78), True, 'import networkx as nx\n'), ((428, 450), 'networkx.shell_layout', 'nx.shell_layout', (['graph'], {}), '(graph)\n', (443, 450), True, 'import networkx as nx\n'), ((482, 588), 'networkx.draw_networkx_nodes', 'nx.draw_networkx_nodes', (['graph', 'pos'], {'nodelist': 'not_matched', 'alpha': '(0.4)', 'node_color': '"""blue"""', 'node_size': '(1300)'}), "(graph, pos, nodelist=not_matched, alpha=0.4,\n node_color='blue', node_size=1300)\n", (504, 588), True, 'import networkx as nx\n'), ((608, 710), 'networkx.draw_networkx_nodes', 'nx.draw_networkx_nodes', (['graph', 'pos'], {'nodelist': 'matched', 'alpha': '(0.8)', 'node_color': '"""red"""', 'node_size': '(1300)'}), "(graph, pos, nodelist=matched, alpha=0.8, node_color=\n 'red', node_size=1300)\n", (630, 710), True, 'import networkx as nx\n'), ((729, 804), 'networkx.draw_networkx_labels', 'nx.draw_networkx_labels', (['graph', 'pos'], {'font_family': '"""sans-serif"""', 'font_size': '(15)'}), "(graph, pos, font_family='sans-serif', font_size=15)\n", (752, 804), True, 'import networkx as nx\n'), ((805, 861), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['graph', 'pos'], {'width': '(1.5)', 'alpha': '(0.5)'}), '(graph, pos, width=1.5, alpha=0.5)\n', (827, 861), True, 'import networkx as nx\n'), ((862, 872), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (870, 872), True, 'from matplotlib import pyplot as plt\n'), ((451, 465), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]'], {}), '([])\n', (461, 465), True, 'from matplotlib import pyplot as plt\n'), ((467, 481), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[]'], {}), '([])\n', (477, 481), True, 'from matplotlib import pyplot as plt\n')]
|
#
# Copyright 2016-2017 Games Creators Club
#
# MIT License
#
import sys
import time
import pygame
import pyros
import pyros.gcc
import pyros.gccui
import pyros.agent
import pyros.pygamehelper
WHITE = (255, 255, 255)
MAX_PING_TIMEOUT = 1
INITIAL_SPEED = 40
INITIAL_SIDE_GAIN = 0.4
INITIAL_FORWARD_GAIN = 1.0
INITIAL_DISTANCE_GAIN = 1.7
INITIAL_CORNER_GAIN = 1.3
sideGain = INITIAL_SIDE_GAIN
forwardGain = INITIAL_FORWARD_GAIN
distanceGain = INITIAL_DISTANCE_GAIN
cornerGain = INITIAL_CORNER_GAIN
speed = INITIAL_SPEED
pingLastTime = 0
angle = 0
distanceDeg1 = -1
distanceDeg2 = -1
distance1 = -1
distance2 = -1
avgDistance1 = -1
avgDistance2 = -1
gyroAngle = 0
corridorWidth = 0
idealDistance = 0
run = False
driveAngle = 0
screen = pyros.gccui.initAll((600, 600), True)
font = pyros.gccui.font
bigFont = pyros.gccui.bigFont
def connected():
pyros.agent.init(pyros.client, "maze-agent.py")
pyros.publish("maze/ping", "")
pyros.publish("maze/sideGain", str(sideGain))
pyros.publish("maze/forwardGain", str(forwardGain))
pyros.publish("maze/distanceGain", str(distanceGain))
pyros.publish("maze/cornerGain", str(cornerGain))
pyros.publish("maze/speed", str(speed))
stop()
def handleDistances(topic, message, groups):
global distanceDeg1, distanceDeg2, distance1, distance2, avgDistance1, avgDistance2
c = 0
split1 = message.split(",")
for s1 in split1:
split2 = s1.split(":")
if len(split2) == 2:
deg = int(split2[0])
split3 = split2[1].split(";")
if len(split3) == 2:
dis = float(split3[0])
avg = float(split3[1])
if c == 0:
distanceDeg1 = deg
distance1 = dis
avgDistance1 = avg
elif c == 1:
distanceDeg2 = deg
distance2 = dis
avgDistance2 = avg
c += 1
def handleGyro(topic, message, groups):
global gyroAngle
gyroAngle = float(message)
def handleDataCorridor(topic, message, groups):
global corridorWidth
corridorWidth = float(message)
def handleDataIdealDistance(topic, message, groups):
global idealDistance
idealDistance = float(message)
def stop():
global run
pyros.publish("maze/command", "stop")
run = False
def quickstart():
global run
pyros.publish("maze/command", "quickstart")
run = True
def start():
global run
pyros.publish("maze/command", "start")
run = True
def scanWidth():
pyros.publish("maze/command", "scan")
def onKeyDown(key):
global run, angle, speed, gyroAngle, sideGain, forwardGain, distanceGain, cornerGain
if pyros.gcc.handleConnectKeyDown(key):
pass
elif key == pygame.K_ESCAPE:
stop()
pyros.loop(0.7)
elif key == pygame.K_SPACE:
stop()
elif key == pygame.K_RETURN:
print("** Starting...")
run = True
quickstart()
elif key == pygame.K_BACKSLASH:
print("** Starting...")
run = True
start()
elif key == pygame.K_s:
scanWidth()
# pyros.publish("sensor/distance/scan", "")
# print("** Asked for scan")
elif key == pygame.K_r:
pyros.publish("sensor/distance/read", str(angle))
print("** Asked for distance")
elif key == pygame.K_o:
angle -= 22.5
if angle < -90:
angle = -90
pyros.publish("sensor/distance/read", str(angle))
print("** Asked for distance")
elif key == pygame.K_p:
angle += 22.5
if angle > 90:
angle = 90
pyros.publish("sensor/distance/read", str(angle))
print("** Asked for distance")
elif key == pygame.K_DOWN:
if speed > 100:
speed -= 50
elif speed > 50:
speed -= 5
else:
speed -= 1
if speed < 1:
speed = 1
pyros.publish("maze/speed", int(speed))
elif key == pygame.K_UP:
if speed >= 100:
speed += 50
elif speed >= 50:
speed += 10
else:
speed += 1
if speed > 300:
speed = 300
pyros.publish("maze/speed", int(speed))
elif key == pygame.K_LEFT and (pyros.gcc.rshift or pyros.gcc.lshift):
sideGain -= 0.1
if sideGain < 0.1:
sideGain = 0.1
pyros.publish("maze/sideGain", str(float(round(sideGain, 1))))
elif key == pygame.K_RIGHT and (pyros.gcc.rshift or pyros.gcc.lshift):
sideGain += 0.1
if sideGain > 10:
sideGain = 10
pyros.publish("maze/sideGain", str(float(round(sideGain, 1))))
elif key == pygame.K_LEFT and not pyros.gcc.rshift and not pyros.gcc.lshift:
forwardGain -= 0.1
if forwardGain < 0.1:
forwardGain = 0.1
pyros.publish("maze/forwardGain", str(float(round(forwardGain, 1))))
elif key == pygame.K_RIGHT and not pyros.gcc.rshift and not pyros.gcc.lshift:
forwardGain += 0.1
if forwardGain > 10:
forwardGain = 10
pyros.publish("maze/forwardGain", str(float(round(forwardGain, 1))))
elif key == pygame.K_LEFTBRACKET and (pyros.gcc.rshift or pyros.gcc.lshift):
distanceGain -= 0.1
if distanceGain < 0.1:
distanceGain = 0.1
pyros.publish("maze/distanceGain", str(float(round(distanceGain, 1))))
elif key == pygame.K_RIGHTBRACKET and (pyros.gcc.rshift or pyros.gcc.lshift):
distanceGain += 0.1
if distanceGain > 10:
distanceGain = 10
pyros.publish("maze/distanceGain", str(float(round(distanceGain, 1))))
elif key == pygame.K_LEFTBRACKET and not pyros.gcc.rshift and not pyros.gcc.lshift:
cornerGain -= 0.1
if cornerGain < 0.1:
cornerGain = 0.1
pyros.publish("maze/cornerGain", str(float(round(cornerGain, 1))))
elif key == pygame.K_RIGHTBRACKET and not pyros.gcc.rshift and not pyros.gcc.lshift:
cornerGain += 0.1
if cornerGain > 10:
cornerGain = 10
pyros.publish("maze/cornerGain", str(float(round(cornerGain, 1))))
elif key == pygame.K_g:
pyros.publish("sensor/gyro/continuous", "calibrate,50")
gyroAngle = 0
def onKeyUp(key):
if pyros.gcc.handleConnectKeyUp(key):
pass
pyros.subscribe("maze/data/distances", handleDistances)
pyros.subscribe("maze/data/gyro", handleGyro)
pyros.subscribe("maze/data/corridor", handleDataCorridor)
pyros.subscribe("maze/data/idealDistance", handleDataIdealDistance)
pyros.init("maze-client-#", unique=True, host=pyros.gcc.getHost(), port=pyros.gcc.getPort(), waitToConnect=False, onConnected=connected)
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
pyros.pygamehelper.processKeys(onKeyDown, onKeyUp)
pyros.loop(0.03)
pyros.gccui.background(True)
pyros.agent.keepAgents()
hpos = 50
hpos = pyros.gccui.drawKeyValue("Stopped", str(not run), 8, hpos)
hpos = pyros.gccui.drawKeyValue("Angle", str(angle), 8, hpos)
hpos = pyros.gccui.drawKeyValue("Speed", str(speed), 8, hpos)
avgDistance1String = str(format(avgDistance1, '.2f'))
avgDistance2String = str(format(avgDistance2, '.2f'))
hpos +=40
hpos = pyros.gccui.drawKeyValue("Dist @ " + str(distanceDeg1), str(distance1) + ", avg: " + avgDistance1String, 8, hpos)
hpos = pyros.gccui.drawKeyValue("Dist @ " + str(distanceDeg2), str(distance2) + ", avg: " + avgDistance2String, 8, hpos)
hpos = pyros.gccui.drawKeyValue("Gyro angle", str(round(gyroAngle, 2)), 8, hpos)
hpos +=40
hpos = pyros.gccui.drawKeyValue("Selected", str(driveAngle), 8, hpos)
hpos = pyros.gccui.drawKeyValue("Side Gain", str(round(sideGain, 1)), 8, hpos)
hpos = pyros.gccui.drawKeyValue("Fwd Gain", str(round(forwardGain, 1)), 8, hpos)
hpos = pyros.gccui.drawKeyValue("Dist Gain", str(round(distanceGain, 1)), 8, hpos)
hpos = pyros.gccui.drawKeyValue("Cor Gain", str(round(cornerGain, 1)), 8, hpos)
hpos = pyros.gccui.drawKeyValue("Corridor", str(round(corridorWidth, 1)), 8, hpos)
hpos = pyros.gccui.drawKeyValue("Ideal dist", str(round(idealDistance, 1)), 8, hpos)
pyros.gccui.drawSmallText("g-calibrate, s-scan, r-read, o/p-change angle, DOWN/UP-speed, LEFT/RIGHT-gain, SPACE-stop, RETURN-start", (8, screen.get_height() - pyros.gccui.smallFont.get_height()))
pyros.gcc.drawConnection()
pyros.gccui.frameEnd()
|
[
"pygame.quit",
"pyros.agent.keepAgents",
"pyros.gccui.initAll",
"pyros.gcc.drawConnection",
"pygame.event.get",
"pyros.publish",
"pyros.pygamehelper.processKeys",
"pyros.gccui.background",
"pyros.gcc.handleConnectKeyDown",
"pyros.loop",
"pyros.gccui.frameEnd",
"pyros.gcc.getPort",
"pyros.gcc.getHost",
"pyros.agent.init",
"pyros.gccui.smallFont.get_height",
"sys.exit",
"pyros.subscribe",
"pyros.gcc.handleConnectKeyUp"
] |
[((746, 783), 'pyros.gccui.initAll', 'pyros.gccui.initAll', (['(600, 600)', '(True)'], {}), '((600, 600), True)\n', (765, 783), False, 'import pyros\n'), ((6403, 6458), 'pyros.subscribe', 'pyros.subscribe', (['"""maze/data/distances"""', 'handleDistances'], {}), "('maze/data/distances', handleDistances)\n", (6418, 6458), False, 'import pyros\n'), ((6459, 6504), 'pyros.subscribe', 'pyros.subscribe', (['"""maze/data/gyro"""', 'handleGyro'], {}), "('maze/data/gyro', handleGyro)\n", (6474, 6504), False, 'import pyros\n'), ((6505, 6562), 'pyros.subscribe', 'pyros.subscribe', (['"""maze/data/corridor"""', 'handleDataCorridor'], {}), "('maze/data/corridor', handleDataCorridor)\n", (6520, 6562), False, 'import pyros\n'), ((6563, 6630), 'pyros.subscribe', 'pyros.subscribe', (['"""maze/data/idealDistance"""', 'handleDataIdealDistance'], {}), "('maze/data/idealDistance', handleDataIdealDistance)\n", (6578, 6630), False, 'import pyros\n'), ((861, 908), 'pyros.agent.init', 'pyros.agent.init', (['pyros.client', '"""maze-agent.py"""'], {}), "(pyros.client, 'maze-agent.py')\n", (877, 908), False, 'import pyros\n'), ((913, 943), 'pyros.publish', 'pyros.publish', (['"""maze/ping"""', '""""""'], {}), "('maze/ping', '')\n", (926, 943), False, 'import pyros\n'), ((2318, 2355), 'pyros.publish', 'pyros.publish', (['"""maze/command"""', '"""stop"""'], {}), "('maze/command', 'stop')\n", (2331, 2355), False, 'import pyros\n'), ((2411, 2454), 'pyros.publish', 'pyros.publish', (['"""maze/command"""', '"""quickstart"""'], {}), "('maze/command', 'quickstart')\n", (2424, 2454), False, 'import pyros\n'), ((2504, 2542), 'pyros.publish', 'pyros.publish', (['"""maze/command"""', '"""start"""'], {}), "('maze/command', 'start')\n", (2517, 2542), False, 'import pyros\n'), ((2581, 2618), 'pyros.publish', 'pyros.publish', (['"""maze/command"""', '"""scan"""'], {}), "('maze/command', 'scan')\n", (2594, 2618), False, 'import pyros\n'), ((2738, 2773), 'pyros.gcc.handleConnectKeyDown', 'pyros.gcc.handleConnectKeyDown', (['key'], {}), '(key)\n', (2768, 2773), False, 'import pyros\n'), ((6353, 6386), 'pyros.gcc.handleConnectKeyUp', 'pyros.gcc.handleConnectKeyUp', (['key'], {}), '(key)\n', (6381, 6386), False, 'import pyros\n'), ((6800, 6818), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (6816, 6818), False, 'import pygame\n'), ((6912, 6962), 'pyros.pygamehelper.processKeys', 'pyros.pygamehelper.processKeys', (['onKeyDown', 'onKeyUp'], {}), '(onKeyDown, onKeyUp)\n', (6942, 6962), False, 'import pyros\n'), ((6968, 6984), 'pyros.loop', 'pyros.loop', (['(0.03)'], {}), '(0.03)\n', (6978, 6984), False, 'import pyros\n'), ((6989, 7017), 'pyros.gccui.background', 'pyros.gccui.background', (['(True)'], {}), '(True)\n', (7011, 7017), False, 'import pyros\n'), ((7022, 7046), 'pyros.agent.keepAgents', 'pyros.agent.keepAgents', ([], {}), '()\n', (7044, 7046), False, 'import pyros\n'), ((8542, 8568), 'pyros.gcc.drawConnection', 'pyros.gcc.drawConnection', ([], {}), '()\n', (8566, 8568), False, 'import pyros\n'), ((8573, 8595), 'pyros.gccui.frameEnd', 'pyros.gccui.frameEnd', ([], {}), '()\n', (8593, 8595), False, 'import pyros\n'), ((6678, 6697), 'pyros.gcc.getHost', 'pyros.gcc.getHost', ([], {}), '()\n', (6695, 6697), False, 'import pyros\n'), ((6704, 6723), 'pyros.gcc.getPort', 'pyros.gcc.getPort', ([], {}), '()\n', (6721, 6723), False, 'import pyros\n'), ((2844, 2859), 'pyros.loop', 'pyros.loop', (['(0.7)'], {}), '(0.7)\n', (2854, 2859), False, 'import pyros\n'), ((6870, 6883), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (6881, 6883), False, 'import pygame\n'), ((6896, 6906), 'sys.exit', 'sys.exit', ([], {}), '()\n', (6904, 6906), False, 'import sys\n'), ((8500, 8534), 'pyros.gccui.smallFont.get_height', 'pyros.gccui.smallFont.get_height', ([], {}), '()\n', (8532, 8534), False, 'import pyros\n'), ((6248, 6303), 'pyros.publish', 'pyros.publish', (['"""sensor/gyro/continuous"""', '"""calibrate,50"""'], {}), "('sensor/gyro/continuous', 'calibrate,50')\n", (6261, 6303), False, 'import pyros\n')]
|
import requests
import time
from decimal import Decimal
CMC_API_KEY = None
class ExchangeRates():
def __init__(self):
self.limit = 200
self.cmc_url = 'https://pro-api.coinmarketcap.com/'
self.cmc_endpoint = 'v1/cryptocurrency/listings/latest?sort=market_cap&start=1&limit={}'.format(self.limit)
self.cmc_header = {'X-CMC_PRO_API_KEY': CMC_API_KEY}
def run(self):
rates = self.get_cmc_rates()
rates = self.add_manual_rates(rates)
return rates
def decode(self, arg):
if isinstance(arg, unicode):
return arg.encode()
return arg
def get_cmc_rates(self):
url = self.cmc_url + self.cmc_endpoint
response = requests.get(url, headers=self.cmc_header)
r = response.json()
self.check_api_status(r)
rates = {}
for _ in r['data']:
rates['{}_USD'.format(_['symbol'])] = Decimal(_['quote']['USD']['price'])
rates = {self.decode(k): v for k,v in rates.items()}
return rates
def add_manual_rates(self, rates):
rates['USD_USD'] = Decimal(1.0)
rates['IOTA_USD'] = rates['MIOTA_USD']
rates['XRB_USD'] = rates['NANO_USD']
return rates
def check_api_status(self, response):
if str(response['status']['error_message']) == "API key missing.":
raise Exception("API Key Missing. Get free API key from https://coinmarketcap.com/api/ and enter on current_exchange_rates.py")
|
[
"requests.get",
"decimal.Decimal"
] |
[((722, 764), 'requests.get', 'requests.get', (['url'], {'headers': 'self.cmc_header'}), '(url, headers=self.cmc_header)\n', (734, 764), False, 'import requests\n'), ((1109, 1121), 'decimal.Decimal', 'Decimal', (['(1.0)'], {}), '(1.0)\n', (1116, 1121), False, 'from decimal import Decimal\n'), ((924, 959), 'decimal.Decimal', 'Decimal', (["_['quote']['USD']['price']"], {}), "(_['quote']['USD']['price'])\n", (931, 959), False, 'from decimal import Decimal\n')]
|
import logging
import colorlog
__version__ = "0.0.1"
def format_logger(logger: logging.Logger, debug: bool):
message_format = "%(log_color)s[%(asctime)s.%(msecs).03d pid#%(process)d# %(levelname).1s] %(message)s"
date_format = "%Y%m%dT%H:%M:%S"
log_colors = {
"DEBUG": "cyan",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "white,bg_red",
}
format = colorlog.ColoredFormatter(
message_format, datefmt=date_format, log_colors=log_colors
)
loggingStreamHandler = logging.StreamHandler()
loggingStreamHandler.setFormatter(format)
logger.addHandler(loggingStreamHandler)
logger.setLevel(logging.DEBUG if debug else logging.INFO)
|
[
"colorlog.ColoredFormatter",
"logging.StreamHandler"
] |
[((433, 523), 'colorlog.ColoredFormatter', 'colorlog.ColoredFormatter', (['message_format'], {'datefmt': 'date_format', 'log_colors': 'log_colors'}), '(message_format, datefmt=date_format, log_colors=\n log_colors)\n', (458, 523), False, 'import colorlog\n'), ((561, 584), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (582, 584), False, 'import logging\n')]
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
class res_users(osv.Model):
_inherit = 'res.users'
_columns = {
'default_section_id': fields.many2one('crm.case.section', 'Default Sales Team'),
}
def __init__(self, pool, cr):
init_res = super(res_users, self).__init__(pool, cr)
# duplicate list to avoid modifying the original reference
self.SELF_WRITEABLE_FIELDS = list(self.SELF_WRITEABLE_FIELDS)
self.SELF_WRITEABLE_FIELDS.extend(['default_section_id'])
return init_res
class sale_order(osv.osv):
_inherit = 'sale.order'
_columns = {
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
'categ_ids': fields.many2many('crm.case.categ', 'sale_order_category_rel', 'order_id', 'category_id', 'Categories', \
domain="['|',('section_id','=',section_id),('section_id','=',False), ('object_id.model', '=', 'crm.lead')]", context="{'object_name': 'crm.lead'}")
}
def _get_default_section_id(self, cr, uid, context=None):
""" Gives default section by checking if present in the context """
section_id = self.pool.get('crm.lead')._resolve_section_id_from_context(cr, uid, context=context) or False
if not section_id:
section_id = self.pool.get('res.users').browse(cr, uid, uid, context).default_section_id.id or False
return section_id
_defaults = {
'section_id': lambda s, cr, uid, c: s._get_default_section_id(cr, uid, c),
}
def _prepare_invoice(self, cr, uid, order, lines, context=None):
invoice_vals = super(sale_order, self)._prepare_invoice(cr, uid, order, lines, context=context)
if order.section_id and order.section_id.id:
invoice_vals['section_id'] = order.section_id.id
return invoice_vals
class sale_crm_lead(osv.Model):
_inherit = 'crm.lead'
def on_change_user(self, cr, uid, ids, user_id, context=None):
""" Override of on change user_id on lead/opportunity; when having sale
the new logic is :
- use user.default_section_id
- or fallback on previous behavior """
if user_id:
user = self.pool.get('res.users').browse(cr, uid, user_id, context=context)
if user.default_section_id and user.default_section_id.id:
return {'value': {'section_id': user.default_section_id.id}}
return super(sale_crm_lead, self).on_change_user(cr, uid, ids, user_id, context=context)
class account_invoice(osv.osv):
_inherit = 'account.invoice'
_columns = {
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
}
_defaults = {
'section_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).default_section_id.id or False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"openerp.osv.fields.many2one",
"openerp.osv.fields.many2many"
] |
[((1118, 1175), 'openerp.osv.fields.many2one', 'fields.many2one', (['"""crm.case.section"""', '"""Default Sales Team"""'], {}), "('crm.case.section', 'Default Sales Team')\n", (1133, 1175), False, 'from openerp.osv import osv, fields\n'), ((1601, 1650), 'openerp.osv.fields.many2one', 'fields.many2one', (['"""crm.case.section"""', '"""Sales Team"""'], {}), "('crm.case.section', 'Sales Team')\n", (1616, 1650), False, 'from openerp.osv import osv, fields\n'), ((1673, 1937), 'openerp.osv.fields.many2many', 'fields.many2many', (['"""crm.case.categ"""', '"""sale_order_category_rel"""', '"""order_id"""', '"""category_id"""', '"""Categories"""'], {'domain': '"""[\'|\',(\'section_id\',\'=\',section_id),(\'section_id\',\'=\',False), (\'object_id.model\', \'=\', \'crm.lead\')]"""', 'context': '"""{\'object_name\': \'crm.lead\'}"""'}), '(\'crm.case.categ\', \'sale_order_category_rel\', \'order_id\',\n \'category_id\', \'Categories\', domain=\n "[\'|\',(\'section_id\',\'=\',section_id),(\'section_id\',\'=\',False), (\'object_id.model\', \'=\', \'crm.lead\')]"\n , context="{\'object_name\': \'crm.lead\'}")\n', (1689, 1937), False, 'from openerp.osv import osv, fields\n'), ((3579, 3628), 'openerp.osv.fields.many2one', 'fields.many2one', (['"""crm.case.section"""', '"""Sales Team"""'], {}), "('crm.case.section', 'Sales Team')\n", (3594, 3628), False, 'from openerp.osv import osv, fields\n')]
|
import datetime
import json
from http.client import CannotSendRequest
from json import JSONEncoder
from uuid import UUID
from xmlrpc.client import ServerProxy
from arcsecond import ArcsecondAPI
from playhouse.shortcuts import model_to_dict
from oort.shared.config import (
get_oort_config_folder_section,
get_oort_config_upload_folder_sections,
get_oort_config_value
)
from oort.shared.models import (Status, Upload)
class BoostedJSONEncoder(JSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return f'{o:%Y-%m-%d %H:%M:%S%z}'
if isinstance(o, UUID):
return str(o)
return super().default(o)
class Context:
"""Context class used only once, and associated with the currently
logged in user, not a potential custom astronomer."""
def __init__(self, config):
self.debug = bool(config.get('debug', 'False'))
self.start_time = datetime.datetime.utcnow()
self.login_error = config.get('login_error')
self.username = ArcsecondAPI.username(debug=self.debug)
self.is_authenticated = ArcsecondAPI.is_logged_in(debug=self.debug)
self._memberships = ArcsecondAPI.memberships(debug=self.debug)
self._uploader = ServerProxy('http://localhost:9001/RPC2')
def _get_uploader_state(self):
try:
_info = self._uploader.supervisor.getProcessInfo('oort-batch-uploader')
except (ConnectionRefusedError, CannotSendRequest) as e:
return str(e)[:30] + '...'
else:
return _info.get('statename')
def updateUploader(self, action):
_info = self._uploader.supervisor.getProcessInfo('oort-batch-uploader')
if action == 'stop' and _info.get('statename') == 'RUNNING':
self._uploader.supervisor.stopProcess('oort-batch-uploader')
elif action == 'start' and _info.get('statename') == 'STOPPED':
self._uploader.supervisor.startProcess('oort-batch-uploader')
def to_dict(self):
return {
'username': self.username,
'isAuthenticated': self.is_authenticated,
'loginError': self.login_error,
'debug': self.debug,
'startTime': self.start_time.isoformat(),
'folders': get_oort_config_upload_folder_sections(),
'uploaderState': self._get_uploader_state()
}
def _get_queries_dicts(self, selected_path: str):
pending_query = Upload.select() \
.where(Upload.file_path.startswith(selected_path)) \
.where(Upload.status == Status.NEW.value)
current_query = Upload.select() \
.where(Upload.file_path.startswith(selected_path)) \
.where((Upload.status == Status.PREPARING.value) | (Upload.status == Status.UPLOADING.value))
error_query = Upload.select() \
.where(Upload.file_path.startswith(selected_path)) \
.where(Upload.status == Status.ERROR.value)
one_day_back = datetime.datetime.now() - datetime.timedelta(days=7)
finished_query = Upload.select() \
.where(Upload.file_path.startswith(selected_path)) \
.where(Upload.status == Status.OK.value) \
.where(Upload.ended >= one_day_back)
hidden_query = Upload.select() \
.where(Upload.file_path.startswith(selected_path)) \
.where(Upload.status == Status.OK.value) \
.where(Upload.ended < one_day_back)
hidden_count = hidden_query.count()
pending_count = pending_query.count()
current_count = current_query.count()
error_count = error_query.count()
finished_count = finished_query.count()
skipped_count = finished_query.where(Upload.substatus.startswith('skipped')).count()
# def _ff(u):
# fill and flatten
# u['night_log'] = {}
# if u.get('dataset', None) is not None:
# ds = Dataset.get(Dataset.uuid == u['dataset']['uuid'])
# if ds.observation is not None:
# u['observation'] = model_to_dict(ds.observation, recurse=False)
# if ds.calibration is not None:
# u['calibration'] = model_to_dict(ds.calibration, recurse=False)
# obs_or_calib = ds.observation or ds.calibration
# if obs_or_calib:
# u['night_log'] = model_to_dict(obs_or_calib.night_log, recurse=False)
# return u
return {
'counts': {
'hidden': hidden_count,
'pending': pending_count,
'current': current_count,
'error': error_count,
'finished': finished_count,
'skipped': skipped_count
},
'pending': [model_to_dict(u, max_depth=1) for u in
pending_query.limit(1000).order_by(Upload.created).iterator()],
'current': [model_to_dict(u, max_depth=1) for u in
current_query.order_by(Upload.created).iterator()],
'finished': [model_to_dict(u, max_depth=1) for u in
finished_query.limit(1000).order_by(-Upload.ended).iterator()],
'errors': [model_to_dict(u, max_depth=1) for u in
error_query.limit(1000).order_by(Upload.created).iterator()]
}
def get_yield_string(self) -> str:
data = {'state': self.to_dict()}
selected_folder = get_oort_config_value('server', 'selected_folder')
selected_section = get_oort_config_folder_section(selected_folder)
if selected_section:
data.update(**self._get_queries_dicts(selected_section.get('path')))
subdomain = selected_section.get('subdomain')
if subdomain:
role = self._memberships.get(subdomain)
if role:
data['state'].update(membership=(subdomain, role))
json_data = json.dumps(data, cls=BoostedJSONEncoder)
# Having 2-lines return (that is, one empty line) is key to distinguish response streams packets!
return f"data:{json_data}\n\n"
|
[
"oort.shared.config.get_oort_config_upload_folder_sections",
"arcsecond.ArcsecondAPI.is_logged_in",
"oort.shared.config.get_oort_config_folder_section",
"oort.shared.models.Upload.select",
"oort.shared.config.get_oort_config_value",
"json.dumps",
"datetime.datetime.utcnow",
"arcsecond.ArcsecondAPI.username",
"playhouse.shortcuts.model_to_dict",
"datetime.timedelta",
"oort.shared.models.Upload.substatus.startswith",
"datetime.datetime.now",
"xmlrpc.client.ServerProxy",
"arcsecond.ArcsecondAPI.memberships",
"oort.shared.models.Upload.file_path.startswith"
] |
[((942, 968), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (966, 968), False, 'import datetime\n'), ((1046, 1085), 'arcsecond.ArcsecondAPI.username', 'ArcsecondAPI.username', ([], {'debug': 'self.debug'}), '(debug=self.debug)\n', (1067, 1085), False, 'from arcsecond import ArcsecondAPI\n'), ((1118, 1161), 'arcsecond.ArcsecondAPI.is_logged_in', 'ArcsecondAPI.is_logged_in', ([], {'debug': 'self.debug'}), '(debug=self.debug)\n', (1143, 1161), False, 'from arcsecond import ArcsecondAPI\n'), ((1190, 1232), 'arcsecond.ArcsecondAPI.memberships', 'ArcsecondAPI.memberships', ([], {'debug': 'self.debug'}), '(debug=self.debug)\n', (1214, 1232), False, 'from arcsecond import ArcsecondAPI\n'), ((1258, 1299), 'xmlrpc.client.ServerProxy', 'ServerProxy', (['"""http://localhost:9001/RPC2"""'], {}), "('http://localhost:9001/RPC2')\n", (1269, 1299), False, 'from xmlrpc.client import ServerProxy\n'), ((5446, 5496), 'oort.shared.config.get_oort_config_value', 'get_oort_config_value', (['"""server"""', '"""selected_folder"""'], {}), "('server', 'selected_folder')\n", (5467, 5496), False, 'from oort.shared.config import get_oort_config_folder_section, get_oort_config_upload_folder_sections, get_oort_config_value\n'), ((5524, 5571), 'oort.shared.config.get_oort_config_folder_section', 'get_oort_config_folder_section', (['selected_folder'], {}), '(selected_folder)\n', (5554, 5571), False, 'from oort.shared.config import get_oort_config_folder_section, get_oort_config_upload_folder_sections, get_oort_config_value\n'), ((5939, 5979), 'json.dumps', 'json.dumps', (['data'], {'cls': 'BoostedJSONEncoder'}), '(data, cls=BoostedJSONEncoder)\n', (5949, 5979), False, 'import json\n'), ((2288, 2328), 'oort.shared.config.get_oort_config_upload_folder_sections', 'get_oort_config_upload_folder_sections', ([], {}), '()\n', (2326, 2328), False, 'from oort.shared.config import get_oort_config_folder_section, get_oort_config_upload_folder_sections, get_oort_config_value\n'), ((3012, 3035), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3033, 3035), False, 'import datetime\n'), ((3038, 3064), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(7)'}), '(days=7)\n', (3056, 3064), False, 'import datetime\n'), ((4763, 4792), 'playhouse.shortcuts.model_to_dict', 'model_to_dict', (['u'], {'max_depth': '(1)'}), '(u, max_depth=1)\n', (4776, 4792), False, 'from playhouse.shortcuts import model_to_dict\n'), ((4914, 4943), 'playhouse.shortcuts.model_to_dict', 'model_to_dict', (['u'], {'max_depth': '(1)'}), '(u, max_depth=1)\n', (4927, 4943), False, 'from playhouse.shortcuts import model_to_dict\n'), ((5054, 5083), 'playhouse.shortcuts.model_to_dict', 'model_to_dict', (['u'], {'max_depth': '(1)'}), '(u, max_depth=1)\n', (5067, 5083), False, 'from playhouse.shortcuts import model_to_dict\n'), ((5205, 5234), 'playhouse.shortcuts.model_to_dict', 'model_to_dict', (['u'], {'max_depth': '(1)'}), '(u, max_depth=1)\n', (5218, 5234), False, 'from playhouse.shortcuts import model_to_dict\n'), ((2512, 2554), 'oort.shared.models.Upload.file_path.startswith', 'Upload.file_path.startswith', (['selected_path'], {}), '(selected_path)\n', (2539, 2554), False, 'from oort.shared.models import Status, Upload\n'), ((2674, 2716), 'oort.shared.models.Upload.file_path.startswith', 'Upload.file_path.startswith', (['selected_path'], {}), '(selected_path)\n', (2701, 2716), False, 'from oort.shared.models import Status, Upload\n'), ((2886, 2928), 'oort.shared.models.Upload.file_path.startswith', 'Upload.file_path.startswith', (['selected_path'], {}), '(selected_path)\n', (2913, 2928), False, 'from oort.shared.models import Status, Upload\n'), ((3759, 3797), 'oort.shared.models.Upload.substatus.startswith', 'Upload.substatus.startswith', (['"""skipped"""'], {}), "('skipped')\n", (3786, 3797), False, 'from oort.shared.models import Status, Upload\n'), ((2475, 2490), 'oort.shared.models.Upload.select', 'Upload.select', ([], {}), '()\n', (2488, 2490), False, 'from oort.shared.models import Status, Upload\n'), ((2637, 2652), 'oort.shared.models.Upload.select', 'Upload.select', ([], {}), '()\n', (2650, 2652), False, 'from oort.shared.models import Status, Upload\n'), ((2849, 2864), 'oort.shared.models.Upload.select', 'Upload.select', ([], {}), '()\n', (2862, 2864), False, 'from oort.shared.models import Status, Upload\n'), ((3127, 3169), 'oort.shared.models.Upload.file_path.startswith', 'Upload.file_path.startswith', (['selected_path'], {}), '(selected_path)\n', (3154, 3169), False, 'from oort.shared.models import Status, Upload\n'), ((3338, 3380), 'oort.shared.models.Upload.file_path.startswith', 'Upload.file_path.startswith', (['selected_path'], {}), '(selected_path)\n', (3365, 3380), False, 'from oort.shared.models import Status, Upload\n'), ((3090, 3105), 'oort.shared.models.Upload.select', 'Upload.select', ([], {}), '()\n', (3103, 3105), False, 'from oort.shared.models import Status, Upload\n'), ((3301, 3316), 'oort.shared.models.Upload.select', 'Upload.select', ([], {}), '()\n', (3314, 3316), False, 'from oort.shared.models import Status, Upload\n')]
|
#!/usr/bin/env python3
import os
def main(args):
table = {}
table['"'] = '\\"'
table['\\'] = '\\\\'
table['\n'] = '\\n'
for arg in args:
with open(arg, 'r') as f:
quoted_lines = []
for line in f:
escaped_line = line.translate(line.maketrans(table))
quoted_lines.append('"{}"'.format(escaped_line))
print(' examples.add(new ExampleProgram(')
print(' "{}",'.format(os.path.basename(arg)))
for quoted_line in quoted_lines[:-1]:
print(" {} +".format(quoted_line))
print(" {},".format(quoted_lines[-1]))
print(' cpProperties')
print(' ));')
if __name__ == '__main__':
import sys
main(sys.argv[1:])
|
[
"os.path.basename"
] |
[((486, 507), 'os.path.basename', 'os.path.basename', (['arg'], {}), '(arg)\n', (502, 507), False, 'import os\n')]
|
import FWCore.ParameterSet.Config as cms
from PhysicsTools.PatAlgos.recoLayer0.jetCorrFactors_cfi import *
from JetMETCorrections.Configuration.JetCorrectionServicesAllAlgos_cff import *
## for scheduled mode
patJetCorrectionsTask = cms.Task(patJetCorrFactors)
patJetCorrections = cms.Sequence(patJetCorrectionsTask)
|
[
"FWCore.ParameterSet.Config.Sequence",
"FWCore.ParameterSet.Config.Task"
] |
[((235, 262), 'FWCore.ParameterSet.Config.Task', 'cms.Task', (['patJetCorrFactors'], {}), '(patJetCorrFactors)\n', (243, 262), True, 'import FWCore.ParameterSet.Config as cms\n'), ((283, 318), 'FWCore.ParameterSet.Config.Sequence', 'cms.Sequence', (['patJetCorrectionsTask'], {}), '(patJetCorrectionsTask)\n', (295, 318), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
# this file uses 'mbox.txt' and gets the mail address from that file
import re
file_name = input("Please enter file nane: ")
try:
file_handle = open(file_name)
except:
print("No such file found")
def example1(fh):
for line in fh:
line = line.rstrip()
x = re.findall("\S+@\S+", line)
if len(x) > 0:
print(x)
example1(file_handle)
|
[
"re.findall"
] |
[((287, 316), 're.findall', 're.findall', (['"""\\\\S+@\\\\S+"""', 'line'], {}), "('\\\\S+@\\\\S+', line)\n", (297, 316), False, 'import re\n')]
|
import tensorflow as tf
from tensorflow.keras import backend as K
from tensorflow.keras.losses import categorical_crossentropy
from config import epsilon, lambda_rpn_regr, lambda_rpn_class, lambda_cls_regr, lambda_cls_class
__all__ = [
'rpn_loss_regr',
'rpn_loss_cls',
'class_loss_regr',
'class_loss_cls',
]
def rpn_loss_regr(num_anchors):
"""Loss function for rpn regression
Args:
num_anchors: number of anchors (9 in here)
Returns:
Smooth L1 loss function
0.5 * x * x (if x_abs < 1)
x_abx - 0.5 (otherwise)
"""
def rpn_loss_regr_fixed_num(y_true, y_pred):
# x is the difference between true value and predicted value
x = y_true[:, :, :, 4 * num_anchors:] - y_pred
# absolute value of x
x_abs = K.abs(x)
# If x_abs <= 1.0, x_bool = 1
x_bool = K.cast(K.less_equal(x_abs, 1.0), tf.float32)
return lambda_rpn_regr * K.sum(y_true[:, :, :, : 4 * num_anchors] * (x_bool * (0.5 * x * x) + (1 - x_bool) * (x_abs - 0.5))) / K.sum(epsilon + y_true[:, :, :, :4 * num_anchors])
return rpn_loss_regr_fixed_num
def rpn_loss_cls(num_anchors):
"""Loss function for rpn classification
Args:
num_anchors: number of anchors (9 in here)
y_true[:, :, :, :9]: [0,1,0,0,0,0,0,1,0] means only the second and the eighth box is valid which contains pos or neg anchor => isValid
y_true[:, :, :, 9:]: [0,1,0,0,0,0,0,0,0] means the second box is pos and eighth box is negative
Returns:
lambda * sum((binary_crossentropy(isValid*y_pred,y_true))) / N
"""
def rpn_loss_cls_fixed_num(y_true, y_pred):
return lambda_rpn_class *\
K.sum(y_true[:, :, :, :num_anchors] * K.binary_crossentropy(y_pred[:, :, :, :], y_true[:, :, :, num_anchors:])) /\
K.sum(epsilon + y_true[:, :, :, :num_anchors])
return rpn_loss_cls_fixed_num
def class_loss_regr(num_classes):
"""Loss function for rpn regression
Args:
num_classes: number of classes
Returns:
Smooth L1 loss function
0.5*x*x (if x_abs < 1)
x_abx - 0.5 (otherwise)
"""
def class_loss_regr_fixed_num(y_true, y_pred):
x = y_true[:, :, 4 * num_classes:] - y_pred
x_abs = K.abs(x)
x_bool = K.cast(K.less_equal(x_abs, 1.0), 'float32')
return lambda_cls_regr *\
K.sum(y_true[:, :, : 4 * num_classes] * (x_bool * (0.5 * x * x) + (1 - x_bool) * (x_abs - 0.5))) /\
K.sum(epsilon + y_true[:, :, :4*num_classes])
return class_loss_regr_fixed_num
def class_loss_cls(y_true, y_pred):
return lambda_cls_class * K.mean(categorical_crossentropy(y_true[0, :, :], y_pred[0, :, :]))
|
[
"tensorflow.keras.backend.sum",
"tensorflow.keras.backend.binary_crossentropy",
"tensorflow.keras.backend.abs",
"tensorflow.keras.losses.categorical_crossentropy",
"tensorflow.keras.backend.less_equal"
] |
[((838, 846), 'tensorflow.keras.backend.abs', 'K.abs', (['x'], {}), '(x)\n', (843, 846), True, 'from tensorflow.keras import backend as K\n'), ((2352, 2360), 'tensorflow.keras.backend.abs', 'K.abs', (['x'], {}), '(x)\n', (2357, 2360), True, 'from tensorflow.keras import backend as K\n'), ((909, 933), 'tensorflow.keras.backend.less_equal', 'K.less_equal', (['x_abs', '(1.0)'], {}), '(x_abs, 1.0)\n', (921, 933), True, 'from tensorflow.keras import backend as K\n'), ((1082, 1132), 'tensorflow.keras.backend.sum', 'K.sum', (['(epsilon + y_true[:, :, :, :4 * num_anchors])'], {}), '(epsilon + y_true[:, :, :, :4 * num_anchors])\n', (1087, 1132), True, 'from tensorflow.keras import backend as K\n'), ((1873, 1919), 'tensorflow.keras.backend.sum', 'K.sum', (['(epsilon + y_true[:, :, :, :num_anchors])'], {}), '(epsilon + y_true[:, :, :, :num_anchors])\n', (1878, 1919), True, 'from tensorflow.keras import backend as K\n'), ((2385, 2409), 'tensorflow.keras.backend.less_equal', 'K.less_equal', (['x_abs', '(1.0)'], {}), '(x_abs, 1.0)\n', (2397, 2409), True, 'from tensorflow.keras import backend as K\n'), ((2586, 2633), 'tensorflow.keras.backend.sum', 'K.sum', (['(epsilon + y_true[:, :, :4 * num_classes])'], {}), '(epsilon + y_true[:, :, :4 * num_classes])\n', (2591, 2633), True, 'from tensorflow.keras import backend as K\n'), ((2744, 2802), 'tensorflow.keras.losses.categorical_crossentropy', 'categorical_crossentropy', (['y_true[0, :, :]', 'y_pred[0, :, :]'], {}), '(y_true[0, :, :], y_pred[0, :, :])\n', (2768, 2802), False, 'from tensorflow.keras.losses import categorical_crossentropy\n'), ((980, 1082), 'tensorflow.keras.backend.sum', 'K.sum', (['(y_true[:, :, :, :4 * num_anchors] * (x_bool * (0.5 * x * x) + (1 - x_bool) *\n (x_abs - 0.5)))'], {}), '(y_true[:, :, :, :4 * num_anchors] * (x_bool * (0.5 * x * x) + (1 -\n x_bool) * (x_abs - 0.5)))\n', (985, 1082), True, 'from tensorflow.keras import backend as K\n'), ((2471, 2570), 'tensorflow.keras.backend.sum', 'K.sum', (['(y_true[:, :, :4 * num_classes] * (x_bool * (0.5 * x * x) + (1 - x_bool) *\n (x_abs - 0.5)))'], {}), '(y_true[:, :, :4 * num_classes] * (x_bool * (0.5 * x * x) + (1 -\n x_bool) * (x_abs - 0.5)))\n', (2476, 2570), True, 'from tensorflow.keras import backend as K\n'), ((1781, 1853), 'tensorflow.keras.backend.binary_crossentropy', 'K.binary_crossentropy', (['y_pred[:, :, :, :]', 'y_true[:, :, :, num_anchors:]'], {}), '(y_pred[:, :, :, :], y_true[:, :, :, num_anchors:])\n', (1802, 1853), True, 'from tensorflow.keras import backend as K\n')]
|
"""
This script is used to process the discharge-concentration data.
"""
# import packages
import numpy as np
import pandas as pd
import os
import datetime
# define the repository path
from common_settings import fpath
import matplotlib.pyplot as plt
import seaborn as sns
# Refer to rainfall data to split the data into different time periods: washoff, dilution and recession
# read rainfall data
filepath = '../../../model_ies/rainfall_sandy_creek/'; filename = 'rainfall_sandy_creek.csv'
rain = pd.read_csv(filepath + filename)
time_period = [[2018, 5, 1], [2020, 8, 1]]
bool1 = (rain.Year == time_period[0][0]) & (rain.Month == time_period[0][1]) & (rain.Day == time_period[0][2])
bool2 = (rain.Year == time_period[1][0]) & (rain.Month == time_period[1][1]) & (rain.Day == time_period[1][2])
index_slice = [*rain[bool1].index.tolist(), *rain[bool2].index.tolist()]
rain = rain.iloc[index_slice[0]:index_slice[1]]
rain.reset_index(inplace=True)
for i in range(rain.shape[0]):
rain.loc[i, 'Time'] = datetime.date(rain.Year[i], rain.Month[i], rain.Day[i]).strftime("%Y-%m-%d")
rain = rain.filter(items=['Time', 'Rainfall amount (millimetres)'])
rain.to_csv(filepath + 'rain_sliced.csv')
rain.plot(x = 'Time', y = 'Rainfall amount (millimetres)')
|
[
"pandas.read_csv",
"datetime.date"
] |
[((501, 533), 'pandas.read_csv', 'pd.read_csv', (['(filepath + filename)'], {}), '(filepath + filename)\n', (512, 533), True, 'import pandas as pd\n'), ((1009, 1064), 'datetime.date', 'datetime.date', (['rain.Year[i]', 'rain.Month[i]', 'rain.Day[i]'], {}), '(rain.Year[i], rain.Month[i], rain.Day[i])\n', (1022, 1064), False, 'import datetime\n')]
|
import FWCore.ParameterSet.Config as cms
mergedtruth = cms.EDProducer("TrackingTruthProducer",
mixLabel = cms.string('mix'),
simHitLabel = cms.string('g4SimHits'),
volumeRadius = cms.double(1200.0),
vertexDistanceCut = cms.double(0.003),
volumeZ = cms.double(3000.0),
mergedBremsstrahlung = cms.bool(True),
removeDeadModules = cms.bool(False),
HepMCDataLabels = cms.vstring('generatorSmeared',
'generator',
'PythiaSource',
'source'
),
useMultipleHepMCLabels = cms.bool(False),
simHitCollections = cms.PSet(
pixel = cms.vstring (
'g4SimHitsTrackerHitsPixelBarrelLowTof',
'g4SimHitsTrackerHitsPixelBarrelHighTof',
'g4SimHitsTrackerHitsPixelEndcapLowTof',
'g4SimHitsTrackerHitsPixelEndcapHighTof'
),
tracker = cms.vstring (
'g4SimHitsTrackerHitsTIBLowTof',
'g4SimHitsTrackerHitsTIBHighTof',
'g4SimHitsTrackerHitsTIDLowTof',
'g4SimHitsTrackerHitsTIDHighTof',
'g4SimHitsTrackerHitsTOBLowTof',
'g4SimHitsTrackerHitsTOBHighTof',
'g4SimHitsTrackerHitsTECLowTof',
'g4SimHitsTrackerHitsTECHighTof'
),
muon = cms.vstring (
'g4SimHitsMuonDTHits',
'g4SimHitsMuonCSCHits',
'g4SimHitsMuonRPCHits'
)
)
)
trackingParticles = cms.Sequence(mergedtruth)
|
[
"FWCore.ParameterSet.Config.string",
"FWCore.ParameterSet.Config.double",
"FWCore.ParameterSet.Config.bool",
"FWCore.ParameterSet.Config.vstring",
"FWCore.ParameterSet.Config.Sequence"
] |
[((1429, 1454), 'FWCore.ParameterSet.Config.Sequence', 'cms.Sequence', (['mergedtruth'], {}), '(mergedtruth)\n', (1441, 1454), True, 'import FWCore.ParameterSet.Config as cms\n'), ((112, 129), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""mix"""'], {}), "('mix')\n", (122, 129), True, 'import FWCore.ParameterSet.Config as cms\n'), ((149, 172), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""g4SimHits"""'], {}), "('g4SimHits')\n", (159, 172), True, 'import FWCore.ParameterSet.Config as cms\n'), ((193, 211), 'FWCore.ParameterSet.Config.double', 'cms.double', (['(1200.0)'], {}), '(1200.0)\n', (203, 211), True, 'import FWCore.ParameterSet.Config as cms\n'), ((237, 254), 'FWCore.ParameterSet.Config.double', 'cms.double', (['(0.003)'], {}), '(0.003)\n', (247, 254), True, 'import FWCore.ParameterSet.Config as cms\n'), ((270, 288), 'FWCore.ParameterSet.Config.double', 'cms.double', (['(3000.0)'], {}), '(3000.0)\n', (280, 288), True, 'import FWCore.ParameterSet.Config as cms\n'), ((317, 331), 'FWCore.ParameterSet.Config.bool', 'cms.bool', (['(True)'], {}), '(True)\n', (325, 331), True, 'import FWCore.ParameterSet.Config as cms\n'), ((357, 372), 'FWCore.ParameterSet.Config.bool', 'cms.bool', (['(False)'], {}), '(False)\n', (365, 372), True, 'import FWCore.ParameterSet.Config as cms\n'), ((397, 467), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', (['"""generatorSmeared"""', '"""generator"""', '"""PythiaSource"""', '"""source"""'], {}), "('generatorSmeared', 'generator', 'PythiaSource', 'source')\n", (408, 467), True, 'import FWCore.ParameterSet.Config as cms\n'), ((531, 546), 'FWCore.ParameterSet.Config.bool', 'cms.bool', (['(False)'], {}), '(False)\n', (539, 546), True, 'import FWCore.ParameterSet.Config as cms\n'), ((599, 788), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', (['"""g4SimHitsTrackerHitsPixelBarrelLowTof"""', '"""g4SimHitsTrackerHitsPixelBarrelHighTof"""', '"""g4SimHitsTrackerHitsPixelEndcapLowTof"""', '"""g4SimHitsTrackerHitsPixelEndcapHighTof"""'], {}), "('g4SimHitsTrackerHitsPixelBarrelLowTof',\n 'g4SimHitsTrackerHitsPixelBarrelHighTof',\n 'g4SimHitsTrackerHitsPixelEndcapLowTof',\n 'g4SimHitsTrackerHitsPixelEndcapHighTof')\n", (610, 788), True, 'import FWCore.ParameterSet.Config as cms\n'), ((855, 1150), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', (['"""g4SimHitsTrackerHitsTIBLowTof"""', '"""g4SimHitsTrackerHitsTIBHighTof"""', '"""g4SimHitsTrackerHitsTIDLowTof"""', '"""g4SimHitsTrackerHitsTIDHighTof"""', '"""g4SimHitsTrackerHitsTOBLowTof"""', '"""g4SimHitsTrackerHitsTOBHighTof"""', '"""g4SimHitsTrackerHitsTECLowTof"""', '"""g4SimHitsTrackerHitsTECHighTof"""'], {}), "('g4SimHitsTrackerHitsTIBLowTof',\n 'g4SimHitsTrackerHitsTIBHighTof', 'g4SimHitsTrackerHitsTIDLowTof',\n 'g4SimHitsTrackerHitsTIDHighTof', 'g4SimHitsTrackerHitsTOBLowTof',\n 'g4SimHitsTrackerHitsTOBHighTof', 'g4SimHitsTrackerHitsTECLowTof',\n 'g4SimHitsTrackerHitsTECHighTof')\n", (866, 1150), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1258, 1344), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', (['"""g4SimHitsMuonDTHits"""', '"""g4SimHitsMuonCSCHits"""', '"""g4SimHitsMuonRPCHits"""'], {}), "('g4SimHitsMuonDTHits', 'g4SimHitsMuonCSCHits',\n 'g4SimHitsMuonRPCHits')\n", (1269, 1344), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
'''
Created on 02/10/2013
@author: david
'''
from easycanvas import EasyCanvas
from algoritmia.datastructures.digraphs import UndirectedGraph
from math import pi
colors = [
"#000000", "#FFFF00", "#1CE6FF", "#FF34FF", "#FF4A46", "#008941", "#006FA6", "#A30059",
"#FFDBE5", "#7A4900", "#0000A6", "#63FFAC", "#B79762", "#004D43", "#8FB0FF", "#997D87",
"#5A0007", "#809693", "#FEFFE6", "#1B4400", "#4FC601", "#3B5DFF", "#4A3B53", "#FF2F80",
"#61615A", "#BA0900", "#6B7900", "#00C2A0", "#FFAA92", "#FF90C9", "#B903AA", "#D16100",
"#DDEFFF", "#000035", "#7B4F4B", "#A1C299", "#300018", "#0AA6D8", "#013349", "#00846F",
"#372101", "#FFB500", "#C2FFED", "#A079BF", "#CC0744", "#C0B9B2", "#C2FF99", "#001E09",
"#00489C", "#6F0062", "#0CBD66", "#EEC3FF", "#456D75", "#B77B68", "#7A87A1", "#788D66",
"#885578", "#FAD09F", "#FF8A9A", "#D157A0", "#BEC459", "#456648", "#0086ED", "#886F4C",
"#34362D", "#B4A8BD", "#00A6AA", "#452C2C", "#636375", "#A3C8C9", "#FF913F", "#938A81",
"#575329", "#00FECF", "#B05B6F", "#8CD0FF", "#3B9700", "#04F757", "#C8A1A1", "#1E6E00",
"#7900D7", "#A77500", "#6367A9", "#A05837", "#6B002C", "#772600", "#D790FF", "#9B9700",
"#549E79", "#FFF69F", "#201625", "#72418F", "#BC23FF", "#99ADC0", "#3A2465", "#922329",
"#5B4534", "#FDE8DC", "#404E55", "#0089A3", "#CB7E98", "#A4E804", "#324E72", "#6A3A4C",
"#83AB58", "#001C1E", "#D1F7CE", "#004B28", "#C8D0F6", "#A3A489", "#806C66", "#222800",
"#BF5650", "#E83000", "#66796D", "#DA007C", "#FF1A59", "#8ADBB4", "#1E0200", "#5B4E51",
"#C895C5", "#320033", "#FF6832", "#66E1D3", "#CFCDAC", "#D0AC94", "#7ED379", "#012C58"
]
class GraphColoring2DViewer(EasyCanvas):
def __init__(self, g, colors=None, window_size=(400, 400)):
EasyCanvas.__init__(self)
self.colors = colors
# check 'g' type
if not isinstance(g, UndirectedGraph) or \
any([type(p) != type((1, 1)) and type(p) != type((1.0, 1.0)) or len(p) != 2 for p in g.V]):
raise TypeError("The graph must be an UnirectedGraph of two integer tuples")
self.g = g
self.right = max(p[0] for p in self.g.V)
self.bottom = min(p[1] for p in self.g.V)
self.left = min(p[0] for p in self.g.V)
self.top = max(p[1] for p in self.g.V)
self.window_size = window_size
self.height = self.top - self.bottom
self.width = self.right - self.left
self.ar = self.width / self.height
ar = self.window_size[0] / self.window_size[1]
if ar < self.ar:
self.window_size = (self.window_size[0], self.window_size[0] / self.ar)
else:
self.window_size = (self.window_size[1] * self.ar, self.window_size[1])
def main(self):
rad = (self.window_size[0] * self.window_size[1] / (pi * 7000)) ** 0.5 * self.width / self.window_size[0]
margin = rad * 2
self.easycanvas_configure(title='Graph Coloring 2D Viewer',
background='white',
size=self.window_size,
coordinates=(
self.left - margin, self.bottom - margin, self.right + margin, self.top + margin))
for u, v in self.g.E:
self.create_line(u[0], u[1], v[0], v[1], 'gray')
for u in self.g.V:
color_relleno = 'red' if colors is None else colors[self.colors[u] % len(colors)]
self.create_filled_circle(u[0], u[1], rad, color='black', relleno=color_relleno)
# Wait for a key
self.readkey(True)
if __name__ == '__main__':
g = UndirectedGraph(E=[((-3, -2), (0, 0)), ((0, 0), (1, 1))])
color_dic = {(-3, -2): 0, (0, 0): 1, (1, 1): 2}
viewer = GraphColoring2DViewer(g, color_dic, window_size=(800, 800))
viewer.run()
|
[
"easycanvas.EasyCanvas.__init__",
"algoritmia.datastructures.digraphs.UndirectedGraph"
] |
[((3641, 3698), 'algoritmia.datastructures.digraphs.UndirectedGraph', 'UndirectedGraph', ([], {'E': '[((-3, -2), (0, 0)), ((0, 0), (1, 1))]'}), '(E=[((-3, -2), (0, 0)), ((0, 0), (1, 1))])\n', (3656, 3698), False, 'from algoritmia.datastructures.digraphs import UndirectedGraph\n'), ((1764, 1789), 'easycanvas.EasyCanvas.__init__', 'EasyCanvas.__init__', (['self'], {}), '(self)\n', (1783, 1789), False, 'from easycanvas import EasyCanvas\n')]
|
import numpy as np
from tensorflow.contrib.keras.api.keras.models import Sequential,load_model
from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D
from tensorflow.contrib.keras.api.keras.layers import Dropout, Flatten, Dense
import cv2
class Classifier():
def __init__(self,img_shape):
self.img_shape=img_shape
def inference(self):
model = Sequential()
model.add(Conv2D(32, (3, 3), padding='same', activation='relu', input_shape=self.img_shape))
model.add(Conv2D(32, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), padding='same', activation='relu'))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), padding='same', activation='relu'))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(25, activation='softmax'))
model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
return model
def save(self,model,model_path):
model.save(model_path)
def load(self,model_path):
model=load_model(model_path)
return model
def train(self,train_images,train_labels_oh,val_images,val_labels_oh,batch_size=64,get_saved=False,save=False,model_path="model.h5",epochs=10,num_classes=25):
if get_saved:
model=self.load(model_path)
else:
model=self.inference()
model.fit(train_images, train_labels_oh, batch_size=batch_size, epochs=epochs, verbose=1,
validation_data=(val_images, val_labels_oh))
if save:
self.save(model,model_path)
return model
def evalaute(self,model,test_images,test_labels_oh):
model.evaluate(test_images,test_labels_oh)
def predict(self,model,image):
image=np.array(image)
norm_image = np.zeros_like(image,dtype=np.float)
norm_image=cv2.normalize(image, norm_image, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_32F)
norm_image=norm_image.reshape((1,self.img_shape[0],self.img_shape[1],self.img_shape[2]))
label_oh=model.predict(norm_image)
return label_oh
|
[
"numpy.zeros_like",
"tensorflow.contrib.keras.api.keras.layers.Conv2D",
"tensorflow.contrib.keras.api.keras.models.Sequential",
"tensorflow.contrib.keras.api.keras.layers.MaxPooling2D",
"tensorflow.contrib.keras.api.keras.layers.Dense",
"tensorflow.contrib.keras.api.keras.layers.Flatten",
"numpy.array",
"cv2.normalize",
"tensorflow.contrib.keras.api.keras.models.load_model",
"tensorflow.contrib.keras.api.keras.layers.Dropout"
] |
[((408, 420), 'tensorflow.contrib.keras.api.keras.models.Sequential', 'Sequential', ([], {}), '()\n', (418, 420), False, 'from tensorflow.contrib.keras.api.keras.models import Sequential, load_model\n'), ((1527, 1549), 'tensorflow.contrib.keras.api.keras.models.load_model', 'load_model', (['model_path'], {}), '(model_path)\n', (1537, 1549), False, 'from tensorflow.contrib.keras.api.keras.models import Sequential, load_model\n'), ((2257, 2272), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (2265, 2272), True, 'import numpy as np\n'), ((2294, 2330), 'numpy.zeros_like', 'np.zeros_like', (['image'], {'dtype': 'np.float'}), '(image, dtype=np.float)\n', (2307, 2330), True, 'import numpy as np\n'), ((2349, 2447), 'cv2.normalize', 'cv2.normalize', (['image', 'norm_image'], {'alpha': '(0)', 'beta': '(1)', 'norm_type': 'cv2.NORM_MINMAX', 'dtype': 'cv2.CV_32F'}), '(image, norm_image, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX,\n dtype=cv2.CV_32F)\n', (2362, 2447), False, 'import cv2\n'), ((439, 525), 'tensorflow.contrib.keras.api.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': 'self.img_shape'}), "(32, (3, 3), padding='same', activation='relu', input_shape=self.\n img_shape)\n", (445, 525), False, 'from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D\n'), ((540, 577), 'tensorflow.contrib.keras.api.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {'activation': '"""relu"""'}), "(32, (3, 3), activation='relu')\n", (546, 577), False, 'from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D\n'), ((597, 627), 'tensorflow.contrib.keras.api.keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (609, 627), False, 'from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D\n'), ((647, 660), 'tensorflow.contrib.keras.api.keras.layers.Dropout', 'Dropout', (['(0.25)'], {}), '(0.25)\n', (654, 660), False, 'from tensorflow.contrib.keras.api.keras.layers import Dropout, Flatten, Dense\n'), ((691, 744), 'tensorflow.contrib.keras.api.keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(64, (3, 3), padding='same', activation='relu')\n", (697, 744), False, 'from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D\n'), ((764, 801), 'tensorflow.contrib.keras.api.keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""'}), "(64, (3, 3), activation='relu')\n", (770, 801), False, 'from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D\n'), ((821, 851), 'tensorflow.contrib.keras.api.keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (833, 851), False, 'from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D\n'), ((871, 884), 'tensorflow.contrib.keras.api.keras.layers.Dropout', 'Dropout', (['(0.25)'], {}), '(0.25)\n', (878, 884), False, 'from tensorflow.contrib.keras.api.keras.layers import Dropout, Flatten, Dense\n'), ((915, 968), 'tensorflow.contrib.keras.api.keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(64, (3, 3), padding='same', activation='relu')\n", (921, 968), False, 'from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D\n'), ((988, 1025), 'tensorflow.contrib.keras.api.keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""'}), "(64, (3, 3), activation='relu')\n", (994, 1025), False, 'from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D\n'), ((1045, 1075), 'tensorflow.contrib.keras.api.keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (1057, 1075), False, 'from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D\n'), ((1095, 1108), 'tensorflow.contrib.keras.api.keras.layers.Dropout', 'Dropout', (['(0.25)'], {}), '(0.25)\n', (1102, 1108), False, 'from tensorflow.contrib.keras.api.keras.layers import Dropout, Flatten, Dense\n'), ((1139, 1148), 'tensorflow.contrib.keras.api.keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (1146, 1148), False, 'from tensorflow.contrib.keras.api.keras.layers import Dropout, Flatten, Dense\n'), ((1168, 1197), 'tensorflow.contrib.keras.api.keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""'}), "(128, activation='relu')\n", (1173, 1197), False, 'from tensorflow.contrib.keras.api.keras.layers import Dropout, Flatten, Dense\n'), ((1217, 1229), 'tensorflow.contrib.keras.api.keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (1224, 1229), False, 'from tensorflow.contrib.keras.api.keras.layers import Dropout, Flatten, Dense\n'), ((1249, 1280), 'tensorflow.contrib.keras.api.keras.layers.Dense', 'Dense', (['(25)'], {'activation': '"""softmax"""'}), "(25, activation='softmax')\n", (1254, 1280), False, 'from tensorflow.contrib.keras.api.keras.layers import Dropout, Flatten, Dense\n')]
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
import oslotest.base as base
import six
from osc_placement import version
class TestVersion(base.BaseTestCase):
def test_compare(self):
self.assertTrue(version._compare('1.0', version.gt('0.9')))
self.assertTrue(version._compare('1.0', version.ge('0.9')))
self.assertTrue(version._compare('1.0', version.ge('1.0')))
self.assertTrue(version._compare('1.0', version.eq('1.0')))
self.assertTrue(version._compare('1.0', version.le('1.0')))
self.assertTrue(version._compare('1.0', version.le('1.1')))
self.assertTrue(version._compare('1.0', version.lt('1.1')))
self.assertTrue(
version._compare('1.1', version.gt('1.0'), version.lt('1.2')))
self.assertTrue(
version._compare(
'0.3', version.eq('0.2'), version.eq('0.3'), op=any))
# Test error message
msg = 'Operation or argument is not supported with version 1.0; '
self.assertEqual((msg + 'requires version greater than 1.0'),
version._compare('1.0', version.gt('1.0')))
self.assertEqual((msg + 'requires at least version 1.1'),
version._compare('1.0', version.ge('1.1')))
self.assertEqual((msg + 'requires version 1.1'),
version._compare('1.0', version.eq('1.1')))
self.assertEqual((msg + 'requires at most version 0.9'),
version._compare('1.0', version.le('0.9')))
self.assertEqual((msg + 'requires version less than 0.9'),
version._compare('1.0', version.lt('0.9')))
self.assertRaises(
ValueError, version._compare, 'abc', version.le('1.1'))
self.assertRaises(
ValueError, version._compare, '1.0', version.le('.0'))
self.assertRaises(
ValueError, version._compare, '1', version.le('2'))
ex = self.assertRaises(
ValueError, version.compare, '1.0', version.ge('1.1'))
self.assertEqual(
'Operation or argument is not supported with version 1.0; '
'requires at least version 1.1', six.text_type(ex))
ex = self.assertRaises(
ValueError, version.compare, '1.0',
version.eq('1.1'), version.eq('1.5'), op=any)
self.assertEqual(
'Operation or argument is not supported with version 1.0; '
'requires version 1.1, or requires version 1.5', six.text_type(ex))
def test_compare_with_exc(self):
self.assertTrue(version.compare('1.05', version.gt('1.4')))
self.assertFalse(version.compare('1.3', version.gt('1.4'), exc=False))
self.assertRaisesRegex(
ValueError,
'Operation or argument is not supported',
version.compare, '3.1.2', version.gt('3.1.3'))
def test_check_decorator(self):
fake_api = mock.Mock()
fake_api_dec = version.check(version.gt('2.11'))(fake_api)
obj = mock.Mock()
obj.app.client_manager.placement.api_version = '2.12'
fake_api_dec(obj, 1, 2, 3)
fake_api.assert_called_once_with(obj, 1, 2, 3)
fake_api.reset_mock()
obj.app.client_manager.placement.api_version = '2.10'
self.assertRaisesRegex(
ValueError,
'Operation or argument is not supported',
fake_api_dec,
obj, 1, 2, 3)
fake_api.assert_not_called()
def test_check_mixin(self):
class Test(version.CheckerMixin):
app = mock.Mock()
app.client_manager.placement.api_version = '1.2'
t = Test()
self.assertTrue(t.compare_version(version.le('1.3')))
self.assertTrue(t.check_version(version.ge('1.0')))
self.assertRaisesRegex(
ValueError,
'Operation or argument is not supported',
t.check_version, version.lt('1.2'))
def test_max_version_consistency(self):
def _convert_to_tuple(str):
return tuple(map(int, str.split(".")))
versions = [
_convert_to_tuple(ver) for ver in version.SUPPORTED_MICROVERSIONS]
max_ver = _convert_to_tuple(version.MAX_VERSION_NO_GAP)
there_is_gap = False
for i in range(len(versions) - 1):
j = i + 1
if versions[j][1] - versions[i][1] != 1:
there_is_gap = True
self.assertEqual(max_ver, versions[i])
break
if not there_is_gap:
self.assertEqual(max_ver, versions[-1])
def test_get_version_returns_max_no_gap_when_no_session(self):
obj = mock.Mock()
obj.app.client_manager.session = None
ret = version.get_version(obj)
self.assertEqual(version.MAX_VERSION_NO_GAP, ret)
obj.app.client_manager.placement.api_version.assert_not_called()
|
[
"osc_placement.version.lt",
"unittest.mock.Mock",
"six.text_type",
"osc_placement.version.eq",
"osc_placement.version.get_version",
"osc_placement.version.gt",
"osc_placement.version.le",
"osc_placement.version.ge"
] |
[((3460, 3471), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3469, 3471), False, 'from unittest import mock\n'), ((3553, 3564), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3562, 3564), False, 'from unittest import mock\n'), ((5196, 5207), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (5205, 5207), False, 'from unittest import mock\n'), ((5268, 5292), 'osc_placement.version.get_version', 'version.get_version', (['obj'], {}), '(obj)\n', (5287, 5292), False, 'from osc_placement import version\n'), ((2268, 2285), 'osc_placement.version.le', 'version.le', (['"""1.1"""'], {}), "('1.1')\n", (2278, 2285), False, 'from osc_placement import version\n'), ((2363, 2379), 'osc_placement.version.le', 'version.le', (['""".0"""'], {}), "('.0')\n", (2373, 2379), False, 'from osc_placement import version\n'), ((2455, 2470), 'osc_placement.version.le', 'version.le', (['"""2"""'], {}), "('2')\n", (2465, 2470), False, 'from osc_placement import version\n'), ((2553, 2570), 'osc_placement.version.ge', 'version.ge', (['"""1.1"""'], {}), "('1.1')\n", (2563, 2570), False, 'from osc_placement import version\n'), ((2715, 2732), 'six.text_type', 'six.text_type', (['ex'], {}), '(ex)\n', (2728, 2732), False, 'import six\n'), ((2826, 2843), 'osc_placement.version.eq', 'version.eq', (['"""1.1"""'], {}), "('1.1')\n", (2836, 2843), False, 'from osc_placement import version\n'), ((2845, 2862), 'osc_placement.version.eq', 'version.eq', (['"""1.5"""'], {}), "('1.5')\n", (2855, 2862), False, 'from osc_placement import version\n'), ((3031, 3048), 'six.text_type', 'six.text_type', (['ex'], {}), '(ex)\n', (3044, 3048), False, 'import six\n'), ((3383, 3402), 'osc_placement.version.gt', 'version.gt', (['"""3.1.3"""'], {}), "('3.1.3')\n", (3393, 3402), False, 'from osc_placement import version\n'), ((4102, 4113), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (4111, 4113), False, 'from unittest import mock\n'), ((4456, 4473), 'osc_placement.version.lt', 'version.lt', (['"""1.2"""'], {}), "('1.2')\n", (4466, 4473), False, 'from osc_placement import version\n'), ((764, 781), 'osc_placement.version.gt', 'version.gt', (['"""0.9"""'], {}), "('0.9')\n", (774, 781), False, 'from osc_placement import version\n'), ((832, 849), 'osc_placement.version.ge', 'version.ge', (['"""0.9"""'], {}), "('0.9')\n", (842, 849), False, 'from osc_placement import version\n'), ((900, 917), 'osc_placement.version.ge', 'version.ge', (['"""1.0"""'], {}), "('1.0')\n", (910, 917), False, 'from osc_placement import version\n'), ((968, 985), 'osc_placement.version.eq', 'version.eq', (['"""1.0"""'], {}), "('1.0')\n", (978, 985), False, 'from osc_placement import version\n'), ((1036, 1053), 'osc_placement.version.le', 'version.le', (['"""1.0"""'], {}), "('1.0')\n", (1046, 1053), False, 'from osc_placement import version\n'), ((1104, 1121), 'osc_placement.version.le', 'version.le', (['"""1.1"""'], {}), "('1.1')\n", (1114, 1121), False, 'from osc_placement import version\n'), ((1172, 1189), 'osc_placement.version.lt', 'version.lt', (['"""1.1"""'], {}), "('1.1')\n", (1182, 1189), False, 'from osc_placement import version\n'), ((1253, 1270), 'osc_placement.version.gt', 'version.gt', (['"""1.0"""'], {}), "('1.0')\n", (1263, 1270), False, 'from osc_placement import version\n'), ((1272, 1289), 'osc_placement.version.lt', 'version.lt', (['"""1.2"""'], {}), "('1.2')\n", (1282, 1289), False, 'from osc_placement import version\n'), ((1370, 1387), 'osc_placement.version.eq', 'version.eq', (['"""0.2"""'], {}), "('0.2')\n", (1380, 1387), False, 'from osc_placement import version\n'), ((1389, 1406), 'osc_placement.version.eq', 'version.eq', (['"""0.3"""'], {}), "('0.3')\n", (1399, 1406), False, 'from osc_placement import version\n'), ((1640, 1657), 'osc_placement.version.gt', 'version.gt', (['"""1.0"""'], {}), "('1.0')\n", (1650, 1657), False, 'from osc_placement import version\n'), ((1775, 1792), 'osc_placement.version.ge', 'version.ge', (['"""1.1"""'], {}), "('1.1')\n", (1785, 1792), False, 'from osc_placement import version\n'), ((1901, 1918), 'osc_placement.version.eq', 'version.eq', (['"""1.1"""'], {}), "('1.1')\n", (1911, 1918), False, 'from osc_placement import version\n'), ((2035, 2052), 'osc_placement.version.le', 'version.le', (['"""0.9"""'], {}), "('0.9')\n", (2045, 2052), False, 'from osc_placement import version\n'), ((2171, 2188), 'osc_placement.version.lt', 'version.lt', (['"""0.9"""'], {}), "('0.9')\n", (2181, 2188), False, 'from osc_placement import version\n'), ((3136, 3153), 'osc_placement.version.gt', 'version.gt', (['"""1.4"""'], {}), "('1.4')\n", (3146, 3153), False, 'from osc_placement import version\n'), ((3204, 3221), 'osc_placement.version.gt', 'version.gt', (['"""1.4"""'], {}), "('1.4')\n", (3214, 3221), False, 'from osc_placement import version\n'), ((3509, 3527), 'osc_placement.version.gt', 'version.gt', (['"""2.11"""'], {}), "('2.11')\n", (3519, 3527), False, 'from osc_placement import version\n'), ((4237, 4254), 'osc_placement.version.le', 'version.le', (['"""1.3"""'], {}), "('1.3')\n", (4247, 4254), False, 'from osc_placement import version\n'), ((4297, 4314), 'osc_placement.version.ge', 'version.ge', (['"""1.0"""'], {}), "('1.0')\n", (4307, 4314), False, 'from osc_placement import version\n')]
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField
from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField
from wtforms.validators import ValidationError, DataRequired, Email, EqualTo
from wtforms.validators import Length
from app_source.models import User
class LoginForm(FlaskForm):
"""Form object to enable user login."""
username = StringField("Adresse électronique", validators=[DataRequired()])
password = PasswordField('<PASSWORD>', validators=[DataRequired()])
submit = SubmitField('Connexion')
class RegistrationForm(FlaskForm):
"""Form object to enable user registration."""
username = StringField('Adresse électronique', validators=[DataRequired(), Email()])
password = PasswordField('<PASSWORD>', validators=[DataRequired()])
password2 = PasswordField(
'<PASSWORD>', validators=[DataRequired(), EqualTo('password')])
submit = SubmitField("Valider")
def validate_username(self, username):
"""Check if a user is already registered in the database."""
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Cette adresse électronique a déjà été utilisée.')
class SpokenLanguagesSubform(FlaskForm):
class Meta:
csrf = False
with open('data/languages_list.txt', 'r') as f:
languages = f.read().splitlines()[:-1]
languages = sorted(languages)
language_choices = list(zip(languages, languages))
language = SelectField('Langue', choices=language_choices)
levels = ['Débutant', 'Intermédiaire', 'Avancé', 'Langue maternelle']
level_choices = list(zip(levels, levels))
level = SelectField('Niveau', choices=level_choices)
class GeneralInfoForm(FlaskForm):
"""Form object to store general informations about the user."""
first_name = StringField('Prénom', validators=[DataRequired()])
last_name = StringField('Nom', validators=[DataRequired()])
phone_number = StringField('Numéro de téléphone', validators=[DataRequired()])
postal_code = StringField('Code postal', validators=[DataRequired(),
Length(min=5, max=5)])
city = StringField('Ville de résidence', validators=[DataRequired()])
mobility_choices = ['Ville', 'Département', 'Région', 'France entière']
mobility = SelectField('Mobilité',
choices=list(zip(mobility_choices, mobility_choices)))
languages = FieldList(FormField(SpokenLanguagesSubform),
min_entries=1, max_entries=5)
add_language = SubmitField('Ajouter une langue')
remove_language = SubmitField('Retirer une langue')
submit = SubmitField('Valider et continuer')
class DriverLicensesSubform(FlaskForm):
class Meta:
csrf = False
with open('data/french_driver_licenses.txt', 'r') as f:
licenses = f.read().splitlines()[:-1]
licenses = sorted(licenses)
license_choices = list(zip(licenses, licenses))
driver_license = SelectField('Permis', choices=license_choices)
class OtherCertificationsSubform(FlaskForm):
class Meta:
csrf = False
other_certif = StringField('Autre certification')
class CertificationsForm(FlaskForm):
driver_licenses = FieldList(FormField(DriverLicensesSubform),
min_entries=0, max_entries=10)
add_license = SubmitField('Ajouter un permis')
remove_license = SubmitField('Retirer un permis')
other_certifications = FieldList(FormField(OtherCertificationsSubform),
min_entries=0, max_entries=10)
add_other_certif = SubmitField('Ajouter une certification')
remove_other_certif = SubmitField('Retirer une certification')
submit = SubmitField('Valider et continuer')
class FormationExpererienceSubform(FlaskForm):
class Meta:
csrf = False
date_start = DateField('Date de début', format='%m/%Y',
validators=[DataRequired()])
date_end = DateField('Date de fin', format='%m/%Y',
validators=[DataRequired()])
title = StringField('Titre', validators=[DataRequired()])
institution = StringField('Établissement', validators=[DataRequired()])
desc = TextAreaField('Description', render_kw={"rows": 5, "cols": 50})
is_relevant = BooleanField("Cette expérience est pertinente pour ma recherche d'emploi actuelle")
class FormationForm(FlaskForm):
formation_entries = FieldList(FormField(FormationExpererienceSubform),
min_entries=0, max_entries=10)
add_formation = SubmitField('Ajouter une formation')
remove_formation = SubmitField('Retirer une formation')
submit = SubmitField('Valider et continuer')
class ExperienceForm(FlaskForm):
experience_entries = FieldList(FormField(FormationExpererienceSubform),
min_entries=0, max_entries=10)
add_experience = SubmitField('Ajouter une expérience')
remove_experience = SubmitField('Retirer une expérience')
submit = SubmitField('Valider et continuer')
class ComputerSkillsSubform(FlaskForm):
class Meta:
csrf = False
computer_skill = StringField('Outil informatique')
class OtherSkillsSubform(FlaskForm):
class Meta:
csrf = False
other_skill = StringField('Outil informatique')
class SkillsForm(FlaskForm):
"""Form object to store skills of the user."""
computer_skills = FieldList(FormField(ComputerSkillsSubform),
min_entries=0, max_entries=10)
add_computer_skill = SubmitField('Ajouter une compétence')
remove_computer_skill = SubmitField('Retirer une compétence')
other_skills = FieldList(FormField(OtherSkillsSubform),
min_entries=0, max_entries=10)
add_other_skill = SubmitField('Ajouter une compétence')
remove_other_skill = SubmitField('Retirer une compétence')
submit = SubmitField('Valider et continuer')
class PresentationForm(FlaskForm):
"""Form object to store the user self presentation."""
presentation = TextAreaField('Présentation', render_kw={"rows": 5, "cols": 50},
validators=[DataRequired()])
submit = SubmitField('Valider et terminer')
|
[
"wtforms.SelectField",
"wtforms.validators.Email",
"wtforms.validators.Length",
"wtforms.BooleanField",
"app_source.models.User.query.filter_by",
"wtforms.TextAreaField",
"wtforms.SubmitField",
"wtforms.validators.EqualTo",
"wtforms.StringField",
"wtforms.validators.DataRequired",
"wtforms.validators.ValidationError",
"wtforms.FormField"
] |
[((589, 613), 'wtforms.SubmitField', 'SubmitField', (['"""Connexion"""'], {}), "('Connexion')\n", (600, 613), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((979, 1001), 'wtforms.SubmitField', 'SubmitField', (['"""Valider"""'], {}), "('Valider')\n", (990, 1001), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((1582, 1629), 'wtforms.SelectField', 'SelectField', (['"""Langue"""'], {'choices': 'language_choices'}), "('Langue', choices=language_choices)\n", (1593, 1629), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((1762, 1806), 'wtforms.SelectField', 'SelectField', (['"""Niveau"""'], {'choices': 'level_choices'}), "('Niveau', choices=level_choices)\n", (1773, 1806), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((2664, 2697), 'wtforms.SubmitField', 'SubmitField', (['"""Ajouter une langue"""'], {}), "('Ajouter une langue')\n", (2675, 2697), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((2720, 2753), 'wtforms.SubmitField', 'SubmitField', (['"""Retirer une langue"""'], {}), "('Retirer une langue')\n", (2731, 2753), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((2767, 2802), 'wtforms.SubmitField', 'SubmitField', (['"""Valider et continuer"""'], {}), "('Valider et continuer')\n", (2778, 2802), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((3095, 3141), 'wtforms.SelectField', 'SelectField', (['"""Permis"""'], {'choices': 'license_choices'}), "('Permis', choices=license_choices)\n", (3106, 3141), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((3247, 3281), 'wtforms.StringField', 'StringField', (['"""Autre certification"""'], {}), "('Autre certification')\n", (3258, 3281), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((3469, 3501), 'wtforms.SubmitField', 'SubmitField', (['"""Ajouter un permis"""'], {}), "('Ajouter un permis')\n", (3480, 3501), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((3523, 3555), 'wtforms.SubmitField', 'SubmitField', (['"""Retirer un permis"""'], {}), "('Retirer un permis')\n", (3534, 3555), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((3723, 3763), 'wtforms.SubmitField', 'SubmitField', (['"""Ajouter une certification"""'], {}), "('Ajouter une certification')\n", (3734, 3763), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((3790, 3830), 'wtforms.SubmitField', 'SubmitField', (['"""Retirer une certification"""'], {}), "('Retirer une certification')\n", (3801, 3830), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((3844, 3879), 'wtforms.SubmitField', 'SubmitField', (['"""Valider et continuer"""'], {}), "('Valider et continuer')\n", (3855, 3879), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((4343, 4406), 'wtforms.TextAreaField', 'TextAreaField', (['"""Description"""'], {'render_kw': "{'rows': 5, 'cols': 50}"}), "('Description', render_kw={'rows': 5, 'cols': 50})\n", (4356, 4406), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((4425, 4513), 'wtforms.BooleanField', 'BooleanField', (['"""Cette expérience est pertinente pour ma recherche d\'emploi actuelle"""'], {}), '(\n "Cette expérience est pertinente pour ma recherche d\'emploi actuelle")\n', (4437, 4513), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((4704, 4740), 'wtforms.SubmitField', 'SubmitField', (['"""Ajouter une formation"""'], {}), "('Ajouter une formation')\n", (4715, 4740), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((4764, 4800), 'wtforms.SubmitField', 'SubmitField', (['"""Retirer une formation"""'], {}), "('Retirer une formation')\n", (4775, 4800), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((4814, 4849), 'wtforms.SubmitField', 'SubmitField', (['"""Valider et continuer"""'], {}), "('Valider et continuer')\n", (4825, 4849), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((5049, 5086), 'wtforms.SubmitField', 'SubmitField', (['"""Ajouter une expérience"""'], {}), "('Ajouter une expérience')\n", (5060, 5086), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((5111, 5148), 'wtforms.SubmitField', 'SubmitField', (['"""Retirer une expérience"""'], {}), "('Retirer une expérience')\n", (5122, 5148), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((5162, 5197), 'wtforms.SubmitField', 'SubmitField', (['"""Valider et continuer"""'], {}), "('Valider et continuer')\n", (5173, 5197), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((5300, 5333), 'wtforms.StringField', 'StringField', (['"""Outil informatique"""'], {}), "('Outil informatique')\n", (5311, 5333), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((5430, 5463), 'wtforms.StringField', 'StringField', (['"""Outil informatique"""'], {}), "('Outil informatique')\n", (5441, 5463), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((5700, 5737), 'wtforms.SubmitField', 'SubmitField', (['"""Ajouter une compétence"""'], {}), "('Ajouter une compétence')\n", (5711, 5737), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((5766, 5803), 'wtforms.SubmitField', 'SubmitField', (['"""Retirer une compétence"""'], {}), "('Retirer une compétence')\n", (5777, 5803), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((5946, 5983), 'wtforms.SubmitField', 'SubmitField', (['"""Ajouter une compétence"""'], {}), "('Ajouter une compétence')\n", (5957, 5983), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((6009, 6046), 'wtforms.SubmitField', 'SubmitField', (['"""Retirer une compétence"""'], {}), "('Retirer une compétence')\n", (6020, 6046), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((6060, 6095), 'wtforms.SubmitField', 'SubmitField', (['"""Valider et continuer"""'], {}), "('Valider et continuer')\n", (6071, 6095), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((6351, 6385), 'wtforms.SubmitField', 'SubmitField', (['"""Valider et terminer"""'], {}), "('Valider et terminer')\n", (6362, 6385), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, IntegerField\n'), ((2554, 2587), 'wtforms.FormField', 'FormField', (['SpokenLanguagesSubform'], {}), '(SpokenLanguagesSubform)\n', (2563, 2587), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((3354, 3386), 'wtforms.FormField', 'FormField', (['DriverLicensesSubform'], {}), '(DriverLicensesSubform)\n', (3363, 3386), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((3593, 3630), 'wtforms.FormField', 'FormField', (['OtherCertificationsSubform'], {}), '(OtherCertificationsSubform)\n', (3602, 3630), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((4578, 4617), 'wtforms.FormField', 'FormField', (['FormationExpererienceSubform'], {}), '(FormationExpererienceSubform)\n', (4587, 4617), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((4921, 4960), 'wtforms.FormField', 'FormField', (['FormationExpererienceSubform'], {}), '(FormationExpererienceSubform)\n', (4930, 4960), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((5578, 5610), 'wtforms.FormField', 'FormField', (['ComputerSkillsSubform'], {}), '(ComputerSkillsSubform)\n', (5587, 5610), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((5833, 5862), 'wtforms.FormField', 'FormField', (['OtherSkillsSubform'], {}), '(OtherSkillsSubform)\n', (5842, 5862), False, 'from wtforms import TextAreaField, SelectField, FieldList, FormField, DateField\n'), ((1230, 1296), 'wtforms.validators.ValidationError', 'ValidationError', (['"""Cette adresse électronique a déjà été utilisée."""'], {}), "('Cette adresse électronique a déjà été utilisée.')\n", (1245, 1296), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((488, 502), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (500, 502), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((559, 573), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (571, 573), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((766, 780), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (778, 780), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((782, 789), 'wtforms.validators.Email', 'Email', ([], {}), '()\n', (787, 789), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((846, 860), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (858, 860), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((928, 942), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (940, 942), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((944, 963), 'wtforms.validators.EqualTo', 'EqualTo', (['"""password"""'], {}), "('password')\n", (951, 963), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((1130, 1174), 'app_source.models.User.query.filter_by', 'User.query.filter_by', ([], {'username': 'username.data'}), '(username=username.data)\n', (1150, 1174), False, 'from app_source.models import User\n'), ((1963, 1977), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1975, 1977), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((2026, 2040), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (2038, 2040), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((2112, 2126), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (2124, 2126), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((2183, 2197), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (2195, 2197), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((2256, 2276), 'wtforms.validators.Length', 'Length', ([], {'min': '(5)', 'max': '(5)'}), '(min=5, max=5)\n', (2262, 2276), False, 'from wtforms.validators import Length\n'), ((2337, 2351), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (2349, 2351), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((4067, 4081), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (4079, 4081), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((4177, 4191), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (4189, 4191), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((4239, 4253), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (4251, 4253), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((4316, 4330), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (4328, 4330), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((6321, 6335), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (6333, 6335), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n')]
|
from rivescript import RiveScript
DEBUG_RIVESCRIPT=False
BRAIN='brain'
rs = None
def rs_init():
global rs
print("Loading rs brain")
rs = RiveScript(DEBUG_RIVESCRIPT, utf8=True)
rs.load_directory(BRAIN)
rs.sort_replies()
print("rs brain loaded")
def reply(user, msg):
global rs
return rs.reply(user, msg)
|
[
"rivescript.RiveScript"
] |
[((152, 191), 'rivescript.RiveScript', 'RiveScript', (['DEBUG_RIVESCRIPT'], {'utf8': '(True)'}), '(DEBUG_RIVESCRIPT, utf8=True)\n', (162, 191), False, 'from rivescript import RiveScript\n')]
|
import logistic
import pandas
import neural
from sqlalchemy import create_engine
from sys import argv, exit
READ_NON_NULL_ENTRIES = """
SELECT * FROM articles
WHERE mean_word_length IS NOT NULL
AND mean_sentence_length IS NOT NULL
AND stddev_word_length IS NOT NULL
AND stddev_sentence_length IS NOT NULL;
"""
def main():
"""format: main.py db_hostname db_username db_password"""
try:
connect_string = f'postgresql+psycopg2://{argv[2]}:{argv[3]}@{argv[1]}/nlp_experiment'
except IndexError as e:
print('Use the format: main.py db_hostname db_username db_password')
exit(1)
engine = create_engine(connect_string)
with engine.connect() as connection:
articles = pandas.read_sql_query(
READ_NON_NULL_ENTRIES,
connection
)
logistic_accuracy_and_coeffs = logistic.find_accuracy(articles)
print("Logistic accuracy was", logistic_accuracy_and_coeffs[0])
print("Logistic coefficients were\n", logistic_accuracy_and_coeffs[1])
if __name__ == '__main__':
main()
|
[
"sqlalchemy.create_engine",
"sys.exit",
"logistic.find_accuracy",
"pandas.read_sql_query"
] |
[((627, 656), 'sqlalchemy.create_engine', 'create_engine', (['connect_string'], {}), '(connect_string)\n', (640, 656), False, 'from sqlalchemy import create_engine\n'), ((717, 773), 'pandas.read_sql_query', 'pandas.read_sql_query', (['READ_NON_NULL_ENTRIES', 'connection'], {}), '(READ_NON_NULL_ENTRIES, connection)\n', (738, 773), False, 'import pandas\n'), ((847, 879), 'logistic.find_accuracy', 'logistic.find_accuracy', (['articles'], {}), '(articles)\n', (869, 879), False, 'import logistic\n'), ((605, 612), 'sys.exit', 'exit', (['(1)'], {}), '(1)\n', (609, 612), False, 'from sys import argv, exit\n')]
|
# Copyright (c) 2012 - 2015 <NAME>, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from __future__ import print_function
import click
from .utils import base_url_and_api
@click.command()
@click.option('--result', help="The result to set. Should probably be 'unstable'", default='unstable')
@click.option('--java', help="Alternative 'java' executable", default='java')
@click.option('--direct-url', help="Jenkins URL. Default is JENKINS_URL/HUDSON_URL env var value. Use this argument if JENKINS_URL is a proxy [default: None]")
@click.option('--username', help="Name of jenkins user with access to the job")
@click.option('--password', help="Password of jenkins user with access to the job.")
def set_build_result(username, password, result, direct_url=None, java='java'):
"""Change the result of a Jenkins job.
Note: set_build_result can only be done from within the job, not after the job has finished.
Note: Only available if URL is set in `Jenkins <http://jenkins-ci.org/>`_ system configuration.
This command uses the Jenkins `cli` to change the result. It requires a java executable to run the Jenkins `cli`.
Please note that in some versions of jenkins the cli is broken, it has no manifest file!
"""
# %(file)s [--result <result>] [--java <java>] [--direct-url <direct_url>] [(--username <user_name> --password <password>)]
base_url, api = base_url_and_api(direct_url)
jenkins = api.Jenkins(base_url, username=username, password=password)
jenkins.set_build_result(result, java, cli_call=True)
|
[
"click.option",
"click.command"
] |
[((215, 230), 'click.command', 'click.command', ([], {}), '()\n', (228, 230), False, 'import click\n'), ((232, 338), 'click.option', 'click.option', (['"""--result"""'], {'help': '"""The result to set. Should probably be \'unstable\'"""', 'default': '"""unstable"""'}), '(\'--result\', help=\n "The result to set. Should probably be \'unstable\'", default=\'unstable\')\n', (244, 338), False, 'import click\n'), ((335, 411), 'click.option', 'click.option', (['"""--java"""'], {'help': '"""Alternative \'java\' executable"""', 'default': '"""java"""'}), '(\'--java\', help="Alternative \'java\' executable", default=\'java\')\n', (347, 411), False, 'import click\n'), ((413, 581), 'click.option', 'click.option', (['"""--direct-url"""'], {'help': '"""Jenkins URL. Default is JENKINS_URL/HUDSON_URL env var value. Use this argument if JENKINS_URL is a proxy [default: None]"""'}), "('--direct-url', help=\n 'Jenkins URL. Default is JENKINS_URL/HUDSON_URL env var value. Use this argument if JENKINS_URL is a proxy [default: None]'\n )\n", (425, 581), False, 'import click\n'), ((573, 651), 'click.option', 'click.option', (['"""--username"""'], {'help': '"""Name of jenkins user with access to the job"""'}), "('--username', help='Name of jenkins user with access to the job')\n", (585, 651), False, 'import click\n'), ((653, 741), 'click.option', 'click.option', (['"""--password"""'], {'help': '"""Password of jenkins user with access to the job."""'}), "('--password', help=\n 'Password of jenkins user with access to the job.')\n", (665, 741), False, 'import click\n')]
|
## THIS FUNCTION IS UNUSED - THE ACTIVE VERSION LIES IN hs.py
from numba import double, jit, njit, vectorize
from numba import int32, float32, uint8, float64, int64, boolean
import numpy as np
import time
# Apply a line and step function
from numpy import cos, sin, radians
@njit#@vectorize(["boolean (float32, float32, float32, float32)"])
def line_test(x,y, r, theta):
# function to test if a pixel lies above or below a line segment
# Find start and end points of line segments for different cases of theta and r
# Line segment to interogation point
q1x = x
q1y = y
# Vector magnitude cases
theta = theta % 360
if r == 0:
r = 1e-8
# Rotation cases
if theta == 0. or theta == 360.: # vertical to right
x1 = r
x2 = q1x
if x2 > x1:
return False
else:
return True
elif theta == 90.: # horizontal line above
y1 = r
y2 = q1y
if y2>y1:
return False
else:
return True
elif theta == 180.: # vertical to left
x1 = -r
x2 = q1x
if x2 > x1:
return True
else:
return False
elif theta == 270.: # horizontal below
y1 = -r
y2 = q1y
if y2 < y1:
return False
else:
return True
elif theta>0 and theta<180:
theta = radians(theta)
# Tangent line segment
t1x = r*cos(theta)
t1y = r*sin(theta)
m = -1*(cos(theta)/sin(theta))
c = t1y - m*t1x
y1 = q1y
y2 = m*q1x + c
if y1>y2:
return False
else:
return True
elif theta>180 and theta<360:
theta = radians(theta)
# Tangent line segment
t1x = r*cos(theta)
t1y = r*sin(theta)
m = -1*cos(theta)/sin(theta)
c = t1y - m*t1x
y1 = q1y
y2 = m*q1x + c
if y1<y2:
return False
else:
return True
def gen_hsfilter(a,b, r, theta):
# preallocate arrays
subsets = np.stack((a,b),axis=2)
filter_arr = np.zeros((subsets.shape[0],subsets.shape[0]), dtype=np.bool)
xc = filter_arr.shape[0]/2
yc = filter_arr.shape[1]/2
xc = xc
yc = yc
r = np.array([r],dtype=np.float32)
theta = np.array([theta],dtype=np.float32)
# Create x and y coordinates which are centred
x_length = np.linspace(-xc, xc,subsets.shape[0], dtype=np.float32)
y_length = np.linspace(-yc,yc,subsets.shape[0], dtype=np.float32)
xs,ys = np.meshgrid(x_length,y_length)
hsfilter = filt_loop(subsets, r[0], theta[0], xs, ys, filter_arr)
return hsfilter
@njit
def filt_loop(subsets, r, t, xs, ys, filter_arr):
# cols = np.arange(subsets.shape[0])
# rows = np.arange(subsets.shape[0])
#print(ys.dtype)
#filter_arr = np.empty((subsets.shape[0],subsets.shape[0]), dtype=np.bool)
xs = xs.astype(np.float32)
ys = ys.astype(np.float32)
flag = np.bool
# iterate pixel by pixel
for col in range(subsets.shape[0]):
for row in range(subsets.shape[1]):
#rasters through columns and rows for a given coordinate in xy
# Note that y axis is mirrored
x = xs[row, col]
y = np.multiply(ys[row, col],-1)
# Test if pixel is beyond the discontinuity line
filter_arr[row,col] = line_test(float32(x), float32(y), float32(r), float32(t))
return filter_arr
|
[
"numpy.stack",
"numpy.radians",
"numpy.meshgrid",
"numpy.multiply",
"numba.float32",
"numpy.zeros",
"numpy.sin",
"numpy.array",
"numpy.linspace",
"numpy.cos"
] |
[((2085, 2109), 'numpy.stack', 'np.stack', (['(a, b)'], {'axis': '(2)'}), '((a, b), axis=2)\n', (2093, 2109), True, 'import numpy as np\n'), ((2125, 2186), 'numpy.zeros', 'np.zeros', (['(subsets.shape[0], subsets.shape[0])'], {'dtype': 'np.bool'}), '((subsets.shape[0], subsets.shape[0]), dtype=np.bool)\n', (2133, 2186), True, 'import numpy as np\n'), ((2280, 2311), 'numpy.array', 'np.array', (['[r]'], {'dtype': 'np.float32'}), '([r], dtype=np.float32)\n', (2288, 2311), True, 'import numpy as np\n'), ((2323, 2358), 'numpy.array', 'np.array', (['[theta]'], {'dtype': 'np.float32'}), '([theta], dtype=np.float32)\n', (2331, 2358), True, 'import numpy as np\n'), ((2424, 2480), 'numpy.linspace', 'np.linspace', (['(-xc)', 'xc', 'subsets.shape[0]'], {'dtype': 'np.float32'}), '(-xc, xc, subsets.shape[0], dtype=np.float32)\n', (2435, 2480), True, 'import numpy as np\n'), ((2495, 2551), 'numpy.linspace', 'np.linspace', (['(-yc)', 'yc', 'subsets.shape[0]'], {'dtype': 'np.float32'}), '(-yc, yc, subsets.shape[0], dtype=np.float32)\n', (2506, 2551), True, 'import numpy as np\n'), ((2562, 2593), 'numpy.meshgrid', 'np.meshgrid', (['x_length', 'y_length'], {}), '(x_length, y_length)\n', (2573, 2593), True, 'import numpy as np\n'), ((3280, 3309), 'numpy.multiply', 'np.multiply', (['ys[row, col]', '(-1)'], {}), '(ys[row, col], -1)\n', (3291, 3309), True, 'import numpy as np\n'), ((3415, 3425), 'numba.float32', 'float32', (['x'], {}), '(x)\n', (3422, 3425), False, 'from numba import int32, float32, uint8, float64, int64, boolean\n'), ((3427, 3437), 'numba.float32', 'float32', (['y'], {}), '(y)\n', (3434, 3437), False, 'from numba import int32, float32, uint8, float64, int64, boolean\n'), ((3439, 3449), 'numba.float32', 'float32', (['r'], {}), '(r)\n', (3446, 3449), False, 'from numba import int32, float32, uint8, float64, int64, boolean\n'), ((3451, 3461), 'numba.float32', 'float32', (['t'], {}), '(t)\n', (3458, 3461), False, 'from numba import int32, float32, uint8, float64, int64, boolean\n'), ((1396, 1410), 'numpy.radians', 'radians', (['theta'], {}), '(theta)\n', (1403, 1410), False, 'from numpy import cos, sin, radians\n'), ((1458, 1468), 'numpy.cos', 'cos', (['theta'], {}), '(theta)\n', (1461, 1468), False, 'from numpy import cos, sin, radians\n'), ((1485, 1495), 'numpy.sin', 'sin', (['theta'], {}), '(theta)\n', (1488, 1495), False, 'from numpy import cos, sin, radians\n'), ((1730, 1744), 'numpy.radians', 'radians', (['theta'], {}), '(theta)\n', (1737, 1744), False, 'from numpy import cos, sin, radians\n'), ((1512, 1522), 'numpy.cos', 'cos', (['theta'], {}), '(theta)\n', (1515, 1522), False, 'from numpy import cos, sin, radians\n'), ((1523, 1533), 'numpy.sin', 'sin', (['theta'], {}), '(theta)\n', (1526, 1533), False, 'from numpy import cos, sin, radians\n'), ((1792, 1802), 'numpy.cos', 'cos', (['theta'], {}), '(theta)\n', (1795, 1802), False, 'from numpy import cos, sin, radians\n'), ((1819, 1829), 'numpy.sin', 'sin', (['theta'], {}), '(theta)\n', (1822, 1829), False, 'from numpy import cos, sin, radians\n'), ((1856, 1866), 'numpy.sin', 'sin', (['theta'], {}), '(theta)\n', (1859, 1866), False, 'from numpy import cos, sin, radians\n'), ((1845, 1855), 'numpy.cos', 'cos', (['theta'], {}), '(theta)\n', (1848, 1855), False, 'from numpy import cos, sin, radians\n')]
|
# Environment Setup
import os
import shutil
from settings import settings
def main():
os.makedirs(settings['train_data_folder'], exist_ok=True)
products_folder = os.path.join(settings['train_data_folder'], 'products')
reviews_folder = os.path.join(settings['train_data_folder'], 'reviews')
os.makedirs(products_folder, exist_ok=True)
os.makedirs(reviews_folder, exist_ok=True)
csv_path = os.path.join(settings['train_data_folder'], 'train_pairs.csv')
myfile = open(csv_path, "w")
myfile.write(','.join(['review', 'product', 'asin', 'review_i'])+'\n')
txt_path = os.path.join(settings['train_data_folder'], 'train_pairs.txt')
txtfile = open(txt_path, "w")
pairs_list = os.listdir(settings['filtered_pairs_folder'])
for i, pair in enumerate(pairs_list):
print('\r%i/%i Preparing pair %s ' % (i+1, len(pairs_list), pair), end='')
asin = os.path.splitext(pair)[0].split('_')[0]
review_i = os.path.splitext(pair)[0].split('_')[1]
review_path = os.path.join(settings['data_folder'], asin)
product_filename = '%s_1.jpg' % str(i).zfill(5)
review_filename = '%s_0.jpg' % str(i).zfill(5)
if os.path.exists(review_path):
shutil.copy2(os.path.join(review_path, 'product.jpg'),
os.path.join(products_folder, product_filename))
shutil.copy2(os.path.join(review_path, '_%s.jpg' % review_i),
os.path.join(reviews_folder, review_filename))
myfile.write(','.join([review_filename, product_filename, asin, str(review_i)])+'\n')
txtfile.write('%s %s\n' % (review_filename, product_filename))
else:
print('ERROR: Cannot find %s' % review_path)
print('\nDone.')
myfile.close()
txtfile.close()
if __name__ == '__main__':
main()
|
[
"os.makedirs",
"os.path.exists",
"os.path.splitext",
"os.path.join",
"os.listdir"
] |
[((93, 150), 'os.makedirs', 'os.makedirs', (["settings['train_data_folder']"], {'exist_ok': '(True)'}), "(settings['train_data_folder'], exist_ok=True)\n", (104, 150), False, 'import os\n'), ((173, 228), 'os.path.join', 'os.path.join', (["settings['train_data_folder']", '"""products"""'], {}), "(settings['train_data_folder'], 'products')\n", (185, 228), False, 'import os\n'), ((250, 304), 'os.path.join', 'os.path.join', (["settings['train_data_folder']", '"""reviews"""'], {}), "(settings['train_data_folder'], 'reviews')\n", (262, 304), False, 'import os\n'), ((309, 352), 'os.makedirs', 'os.makedirs', (['products_folder'], {'exist_ok': '(True)'}), '(products_folder, exist_ok=True)\n', (320, 352), False, 'import os\n'), ((357, 399), 'os.makedirs', 'os.makedirs', (['reviews_folder'], {'exist_ok': '(True)'}), '(reviews_folder, exist_ok=True)\n', (368, 399), False, 'import os\n'), ((416, 478), 'os.path.join', 'os.path.join', (["settings['train_data_folder']", '"""train_pairs.csv"""'], {}), "(settings['train_data_folder'], 'train_pairs.csv')\n", (428, 478), False, 'import os\n'), ((603, 665), 'os.path.join', 'os.path.join', (["settings['train_data_folder']", '"""train_pairs.txt"""'], {}), "(settings['train_data_folder'], 'train_pairs.txt')\n", (615, 665), False, 'import os\n'), ((718, 763), 'os.listdir', 'os.listdir', (["settings['filtered_pairs_folder']"], {}), "(settings['filtered_pairs_folder'])\n", (728, 763), False, 'import os\n'), ((1033, 1076), 'os.path.join', 'os.path.join', (["settings['data_folder']", 'asin'], {}), "(settings['data_folder'], asin)\n", (1045, 1076), False, 'import os\n'), ((1200, 1227), 'os.path.exists', 'os.path.exists', (['review_path'], {}), '(review_path)\n', (1214, 1227), False, 'import os\n'), ((1254, 1294), 'os.path.join', 'os.path.join', (['review_path', '"""product.jpg"""'], {}), "(review_path, 'product.jpg')\n", (1266, 1294), False, 'import os\n'), ((1321, 1368), 'os.path.join', 'os.path.join', (['products_folder', 'product_filename'], {}), '(products_folder, product_filename)\n', (1333, 1368), False, 'import os\n'), ((1395, 1442), 'os.path.join', 'os.path.join', (['review_path', "('_%s.jpg' % review_i)"], {}), "(review_path, '_%s.jpg' % review_i)\n", (1407, 1442), False, 'import os\n'), ((1469, 1514), 'os.path.join', 'os.path.join', (['reviews_folder', 'review_filename'], {}), '(reviews_folder, review_filename)\n', (1481, 1514), False, 'import os\n'), ((912, 934), 'os.path.splitext', 'os.path.splitext', (['pair'], {}), '(pair)\n', (928, 934), False, 'import os\n'), ((971, 993), 'os.path.splitext', 'os.path.splitext', (['pair'], {}), '(pair)\n', (987, 993), False, 'import os\n')]
|
import unittest
import main
class TestResult(unittest.TestCase):
def test_res(self):
self.assertEqual(main.hello(), "Hello, Jenkins")
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"main.hello"
] |
[((200, 215), 'unittest.main', 'unittest.main', ([], {}), '()\n', (213, 215), False, 'import unittest\n'), ((119, 131), 'main.hello', 'main.hello', ([], {}), '()\n', (129, 131), False, 'import main\n')]
|
import time
import os
import struct
import array
from fcntl import ioctl
import signal
import sys
from spotmicro.utilities.log import Logger
log = Logger().setup_logger('Remote controller')
class RemoteControllerController:
def __init__(self, communication_queues):
try:
log.debug('Starting controller...')
signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
# We'll store the states here.
self.connected_device = False
self.axis_states = {}
self.button_states = {}
self.button_map = []
self.axis_map = []
self.jsdev = None
self.previous_fvalue = 0
self._abort_queue = communication_queues['abort_controller']
self._motion_queue = communication_queues['motion_controller']
self._lcd_screen_queue = communication_queues['lcd_screen_controller']
self._lcd_screen_queue.put('remote_controller_connected SEARCHING')
log.info('Controller started')
except Exception as e:
log.error('No Remote Controller detected')
self._lcd_screen_queue.put('remote_controller_connected NOK')
sys.exit(1)
def exit_gracefully(self, signum, frame):
log.info('Terminated')
sys.exit(0)
def do_process_events_from_queues(self):
remote_controller_connected_already = False
while True:
if self.connected_device and not remote_controller_connected_already:
self._abort_queue.put('activate_servos')
self._lcd_screen_queue.put('remote_controller_connected OK')
remote_controller_connected_already = True
else:
self._abort_queue.put('abort')
self._lcd_screen_queue.put('remote_controller_connected SEARCHING')
remote_controller_connected_already = False
self.check_for_connected_devices()
time.sleep(3)
continue
# Main event loop
while True:
try:
evbuf = self.jsdev.read(8)
if evbuf:
buftime, value, type, number = struct.unpack('IhBB', evbuf)
if type & 0x80:
continue
if type & 0x01:
button = self.button_map[number]
if button:
self.button_states[button] = value
if type & 0x02:
axis = self.axis_map[number]
if axis:
fvalue = round(value / 32767.0, 1)
if self.previous_fvalue == fvalue:
continue
self.axis_states[axis] = fvalue
self.previous_fvalue = fvalue
states = {}
states.update(self.button_states)
states.update(self.axis_states)
# log.debug(self.states)
self._motion_queue.put(states)
except Exception as e:
log.error('Problem with the remote controller, seems we lost connection with it')
# self._lcd_screen_queue.put('Line2 No controller')
self._abort_queue.put('abort')
remote_controller_connected_already = False
self.check_for_connected_devices()
break
def check_for_connected_devices(self):
log.info('Looking for connected devices')
self.connected_device = False
for fn in os.listdir('/dev/input'):
if fn.startswith('js'):
self.connected_device = True
# These constants were borrowed from linux/input.h
axis_names = {
0x00: 'lx',
0x01: 'ly',
0x02: 'lz',
0x03: 'rx',
0x04: 'ry',
0x05: 'rz',
0x06: 'trottle',
0x07: 'rudder',
0x08: 'wheel',
0x09: 'gas',
0x0a: 'brake',
0x10: 'hat0x',
0x11: 'hat0y',
0x12: 'hat1x',
0x13: 'hat1y',
0x14: 'hat2x',
0x15: 'hat2y',
0x16: 'hat3x',
0x17: 'hat3y',
0x18: 'pressure',
0x19: 'distance',
0x1a: 'tilt_x',
0x1b: 'tilt_y',
0x1c: 'tool_width',
0x20: 'volume',
0x28: 'misc',
}
button_names = {
0x120: 'trigger',
0x121: 'thumb',
0x122: 'thumb2',
0x123: 'top',
0x124: 'top2',
0x125: 'pinkie',
0x126: 'base',
0x127: 'base2',
0x128: 'base3',
0x129: 'base4',
0x12a: 'base5',
0x12b: 'base6',
0x12f: 'dead',
0x130: 'a',
0x131: 'b',
0x132: 'c',
0x133: 'x',
0x134: 'y',
0x135: 'z',
0x136: 'tl',
0x137: 'tr',
0x138: 'tl2',
0x139: 'tr2',
0x13a: 'select',
0x13b: 'start',
0x13c: 'mode',
0x13d: 'thumbl',
0x13e: 'thumbr',
0x220: 'dpad_up',
0x221: 'dpad_down',
0x222: 'dpad_left',
0x223: 'dpad_right',
# XBox 360 controller uses these codes.
0x2c0: 'dpad_left',
0x2c1: 'dpad_right',
0x2c2: 'dpad_up',
0x2c3: 'dpad_down',
}
# Open the joystick device.
fn = '/dev/input/js0'
log.debug(('Opening %s...' % fn))
self.jsdev = open(fn, 'rb')
# Get the device name.
# buf = bytearray(63)
buf = array.array('B', [0] * 64)
ioctl(self.jsdev, 0x80006a13 + (0x10000 * len(buf)), buf) # JSIOCGNAME(len)
js_name = buf.tostring().rstrip(b'\x00').decode('utf-8')
log.info(('Connected to device: %s' % js_name))
# Get number of axes and buttons.
buf = array.array('B', [0])
ioctl(self.jsdev, 0x80016a11, buf) # JSIOCGAXES
num_axes = buf[0]
buf = array.array('B', [0])
ioctl(self.jsdev, 0x80016a12, buf) # JSIOCGBUTTONS
num_buttons = buf[0]
# Get the axis map.
buf = array.array('B', [0] * 0x40)
ioctl(self.jsdev, 0x80406a32, buf) # JSIOCGAXMAP
for axis in buf[:num_axes]:
axis_name = axis_names.get(axis, 'unknown(0x%02x)' % axis)
self.axis_map.append(axis_name)
self.axis_states[axis_name] = 0.0
# Get the button map.
buf = array.array('H', [0] * 200)
ioctl(self.jsdev, 0x80406a34, buf) # JSIOCGBTNMAP
for btn in buf[:num_buttons]:
btn_name = button_names.get(btn, 'unknown(0x%03x)' % btn)
self.button_map.append(btn_name)
self.button_states[btn_name] = 0
log.info(('%d axes found: %s' % (num_axes, ', '.join(self.axis_map))))
log.info(('%d buttons found: %s' % (num_buttons, ', '.join(self.button_map))))
break
|
[
"fcntl.ioctl",
"struct.unpack",
"time.sleep",
"array.array",
"signal.signal",
"spotmicro.utilities.log.Logger",
"os.listdir",
"sys.exit"
] |
[((148, 156), 'spotmicro.utilities.log.Logger', 'Logger', ([], {}), '()\n', (154, 156), False, 'from spotmicro.utilities.log import Logger\n'), ((1381, 1392), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1389, 1392), False, 'import sys\n'), ((3865, 3889), 'os.listdir', 'os.listdir', (['"""/dev/input"""'], {}), "('/dev/input')\n", (3875, 3889), False, 'import os\n'), ((350, 400), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'self.exit_gracefully'], {}), '(signal.SIGINT, self.exit_gracefully)\n', (363, 400), False, 'import signal\n'), ((413, 464), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'self.exit_gracefully'], {}), '(signal.SIGTERM, self.exit_gracefully)\n', (426, 464), False, 'import signal\n'), ((1283, 1294), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1291, 1294), False, 'import sys\n'), ((2065, 2078), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2075, 2078), False, 'import time\n'), ((6679, 6705), 'array.array', 'array.array', (['"""B"""', '([0] * 64)'], {}), "('B', [0] * 64)\n", (6690, 6705), False, 'import array\n'), ((7009, 7030), 'array.array', 'array.array', (['"""B"""', '[0]'], {}), "('B', [0])\n", (7020, 7030), False, 'import array\n'), ((7047, 7081), 'fcntl.ioctl', 'ioctl', (['self.jsdev', '(2147576337)', 'buf'], {}), '(self.jsdev, 2147576337, buf)\n', (7052, 7081), False, 'from fcntl import ioctl\n'), ((7153, 7174), 'array.array', 'array.array', (['"""B"""', '[0]'], {}), "('B', [0])\n", (7164, 7174), False, 'import array\n'), ((7191, 7225), 'fcntl.ioctl', 'ioctl', (['self.jsdev', '(2147576338)', 'buf'], {}), '(self.jsdev, 2147576338, buf)\n', (7196, 7225), False, 'from fcntl import ioctl\n'), ((7339, 7365), 'array.array', 'array.array', (['"""B"""', '([0] * 64)'], {}), "('B', [0] * 64)\n", (7350, 7365), False, 'import array\n'), ((7384, 7418), 'fcntl.ioctl', 'ioctl', (['self.jsdev', '(2151705138)', 'buf'], {}), '(self.jsdev, 2151705138, buf)\n', (7389, 7418), False, 'from fcntl import ioctl\n'), ((7725, 7752), 'array.array', 'array.array', (['"""H"""', '([0] * 200)'], {}), "('H', [0] * 200)\n", (7736, 7752), False, 'import array\n'), ((7769, 7803), 'fcntl.ioctl', 'ioctl', (['self.jsdev', '(2151705140)', 'buf'], {}), '(self.jsdev, 2151705140, buf)\n', (7774, 7803), False, 'from fcntl import ioctl\n'), ((2313, 2341), 'struct.unpack', 'struct.unpack', (['"""IhBB"""', 'evbuf'], {}), "('IhBB', evbuf)\n", (2326, 2341), False, 'import struct\n')]
|
#!/usr/bin/python3
""" objects that handle all default RestFul API actions for Place - Amenity """
from models.place import Place
from models.amenity import Amenity
from models import storage
from api.v1.views import app_views
from os import environ
from flask import abort, jsonify, make_response, request
from flasgger.utils import swag_from
@app_views.route('places/<place_id>/amenities', methods=['GET'],
strict_slashes=False)
@swag_from('documentation/place_amenity/get_places_amenities.yml',
methods=['GET'])
def get_place_amenities(place_id):
"""
Retrieves the list of all Amenity objects of a Place
"""
place = storage.get(Place, place_id)
if not place:
abort(404)
if environ.get('HBNB_TYPE_STORAGE') == "db":
amenities = [amenity.to_dict() for amenity in place.amenities]
else:
amenities = [storage.get(Amenity, amenity_id).to_dict()
for amenity_id in place.amenity_ids]
return jsonify(amenities)
@app_views.route('/places/<place_id>/amenities/<amenity_id>',
methods=['DELETE'], strict_slashes=False)
@swag_from('documentation/place_amenity/delete_place_amenities.yml',
methods=['DELETE'])
def delete_place_amenity(place_id, amenity_id):
"""
Deletes a Amenity object of a Place
"""
place = storage.get(Place, place_id)
if not place:
abort(404)
amenity = storage.get(Amenity, amenity_id)
if not amenity:
abort(404)
if environ.get('HBNB_TYPE_STORAGE') == "db":
if amenity not in place.amenities:
abort(404)
place.amenities.remove(amenity)
else:
if amenity_id not in place.amenity_ids:
abort(404)
place.amenity_ids.remove(amenity_id)
storage.save()
return make_response(jsonify({}), 200)
@app_views.route('/places/<place_id>/amenities/<amenity_id>', methods=['POST'],
strict_slashes=False)
@swag_from('documentation/place_amenity/post_place_amenities.yml',
methods=['POST'])
def post_place_amenity(place_id, amenity_id):
"""
Link a Amenity object to a Place
"""
place = storage.get(Place, place_id)
if not place:
abort(404)
amenity = storage.get(Amenity, amenity_id)
if not amenity:
abort(404)
if environ.get('HBNB_TYPE_STORAGE') == "db":
if amenity in place.amenities:
return make_response(jsonify(amenity.to_dict()), 200)
else:
place.amenities.append(amenity)
else:
if amenity_id in place.amenity_ids:
return make_response(jsonify(amenity.to_dict()), 200)
else:
place.amenity_ids.append(amenity_id)
storage.save()
return make_response(jsonify(amenity.to_dict()), 201)
|
[
"models.storage.get",
"flasgger.utils.swag_from",
"flask.abort",
"os.environ.get",
"flask.jsonify",
"models.storage.save",
"api.v1.views.app_views.route"
] |
[((347, 436), 'api.v1.views.app_views.route', 'app_views.route', (['"""places/<place_id>/amenities"""'], {'methods': "['GET']", 'strict_slashes': '(False)'}), "('places/<place_id>/amenities', methods=['GET'],\n strict_slashes=False)\n", (362, 436), False, 'from api.v1.views import app_views\n'), ((451, 538), 'flasgger.utils.swag_from', 'swag_from', (['"""documentation/place_amenity/get_places_amenities.yml"""'], {'methods': "['GET']"}), "('documentation/place_amenity/get_places_amenities.yml', methods=[\n 'GET'])\n", (460, 538), False, 'from flasgger.utils import swag_from\n'), ((1019, 1126), 'api.v1.views.app_views.route', 'app_views.route', (['"""/places/<place_id>/amenities/<amenity_id>"""'], {'methods': "['DELETE']", 'strict_slashes': '(False)'}), "('/places/<place_id>/amenities/<amenity_id>', methods=[\n 'DELETE'], strict_slashes=False)\n", (1034, 1126), False, 'from api.v1.views import app_views\n'), ((1140, 1232), 'flasgger.utils.swag_from', 'swag_from', (['"""documentation/place_amenity/delete_place_amenities.yml"""'], {'methods': "['DELETE']"}), "('documentation/place_amenity/delete_place_amenities.yml', methods\n =['DELETE'])\n", (1149, 1232), False, 'from flasgger.utils import swag_from\n'), ((1858, 1963), 'api.v1.views.app_views.route', 'app_views.route', (['"""/places/<place_id>/amenities/<amenity_id>"""'], {'methods': "['POST']", 'strict_slashes': '(False)'}), "('/places/<place_id>/amenities/<amenity_id>', methods=[\n 'POST'], strict_slashes=False)\n", (1873, 1963), False, 'from api.v1.views import app_views\n'), ((1977, 2065), 'flasgger.utils.swag_from', 'swag_from', (['"""documentation/place_amenity/post_place_amenities.yml"""'], {'methods': "['POST']"}), "('documentation/place_amenity/post_place_amenities.yml', methods=[\n 'POST'])\n", (1986, 2065), False, 'from flasgger.utils import swag_from\n'), ((665, 693), 'models.storage.get', 'storage.get', (['Place', 'place_id'], {}), '(Place, place_id)\n', (676, 693), False, 'from models import storage\n'), ((997, 1015), 'flask.jsonify', 'jsonify', (['amenities'], {}), '(amenities)\n', (1004, 1015), False, 'from flask import abort, jsonify, make_response, request\n'), ((1355, 1383), 'models.storage.get', 'storage.get', (['Place', 'place_id'], {}), '(Place, place_id)\n', (1366, 1383), False, 'from models import storage\n'), ((1437, 1469), 'models.storage.get', 'storage.get', (['Amenity', 'amenity_id'], {}), '(Amenity, amenity_id)\n', (1448, 1469), False, 'from models import storage\n'), ((1797, 1811), 'models.storage.save', 'storage.save', ([], {}), '()\n', (1809, 1811), False, 'from models import storage\n'), ((2183, 2211), 'models.storage.get', 'storage.get', (['Place', 'place_id'], {}), '(Place, place_id)\n', (2194, 2211), False, 'from models import storage\n'), ((2265, 2297), 'models.storage.get', 'storage.get', (['Amenity', 'amenity_id'], {}), '(Amenity, amenity_id)\n', (2276, 2297), False, 'from models import storage\n'), ((2739, 2753), 'models.storage.save', 'storage.save', ([], {}), '()\n', (2751, 2753), False, 'from models import storage\n'), ((721, 731), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (726, 731), False, 'from flask import abort, jsonify, make_response, request\n'), ((740, 772), 'os.environ.get', 'environ.get', (['"""HBNB_TYPE_STORAGE"""'], {}), "('HBNB_TYPE_STORAGE')\n", (751, 772), False, 'from os import environ\n'), ((1411, 1421), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (1416, 1421), False, 'from flask import abort, jsonify, make_response, request\n'), ((1499, 1509), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (1504, 1509), False, 'from flask import abort, jsonify, make_response, request\n'), ((1518, 1550), 'os.environ.get', 'environ.get', (['"""HBNB_TYPE_STORAGE"""'], {}), "('HBNB_TYPE_STORAGE')\n", (1529, 1550), False, 'from os import environ\n'), ((1837, 1848), 'flask.jsonify', 'jsonify', (['{}'], {}), '({})\n', (1844, 1848), False, 'from flask import abort, jsonify, make_response, request\n'), ((2239, 2249), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (2244, 2249), False, 'from flask import abort, jsonify, make_response, request\n'), ((2327, 2337), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (2332, 2337), False, 'from flask import abort, jsonify, make_response, request\n'), ((2346, 2378), 'os.environ.get', 'environ.get', (['"""HBNB_TYPE_STORAGE"""'], {}), "('HBNB_TYPE_STORAGE')\n", (2357, 2378), False, 'from os import environ\n'), ((1615, 1625), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (1620, 1625), False, 'from flask import abort, jsonify, make_response, request\n'), ((1736, 1746), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (1741, 1746), False, 'from flask import abort, jsonify, make_response, request\n'), ((884, 916), 'models.storage.get', 'storage.get', (['Amenity', 'amenity_id'], {}), '(Amenity, amenity_id)\n', (895, 916), False, 'from models import storage\n')]
|
from __future__ import absolute_import
import numpy as np
import os
import unittest
from numpy.testing import assert_array_almost_equal
from .. import parse_spectrum
FIXTURE_PATH = os.path.dirname(__file__)
FIXTURE_DATA = np.array([[0.4,3.2],[1.2,2.7],[2.0,5.4]])
class TextFormatTests(unittest.TestCase):
def test_tsv_data(self):
x = parse_spectrum(os.path.join(FIXTURE_PATH, 'fixture.tsv'))
assert_array_almost_equal(x, FIXTURE_DATA)
def test_csv_data(self):
x = parse_spectrum(os.path.join(FIXTURE_PATH, 'fixture.csv'))
assert_array_almost_equal(x, FIXTURE_DATA)
def test_loose_data(self):
x = parse_spectrum(os.path.join(FIXTURE_PATH, 'fixture.txt'))
assert_array_almost_equal(x, FIXTURE_DATA)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"os.path.dirname",
"numpy.array",
"numpy.testing.assert_array_almost_equal",
"os.path.join"
] |
[((183, 208), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (198, 208), False, 'import os\n'), ((224, 270), 'numpy.array', 'np.array', (['[[0.4, 3.2], [1.2, 2.7], [2.0, 5.4]]'], {}), '([[0.4, 3.2], [1.2, 2.7], [2.0, 5.4]])\n', (232, 270), True, 'import numpy as np\n'), ((764, 779), 'unittest.main', 'unittest.main', ([], {}), '()\n', (777, 779), False, 'import unittest\n'), ((407, 449), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['x', 'FIXTURE_DATA'], {}), '(x, FIXTURE_DATA)\n', (432, 449), False, 'from numpy.testing import assert_array_almost_equal\n'), ((548, 590), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['x', 'FIXTURE_DATA'], {}), '(x, FIXTURE_DATA)\n', (573, 590), False, 'from numpy.testing import assert_array_almost_equal\n'), ((691, 733), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['x', 'FIXTURE_DATA'], {}), '(x, FIXTURE_DATA)\n', (716, 733), False, 'from numpy.testing import assert_array_almost_equal\n'), ((360, 401), 'os.path.join', 'os.path.join', (['FIXTURE_PATH', '"""fixture.tsv"""'], {}), "(FIXTURE_PATH, 'fixture.tsv')\n", (372, 401), False, 'import os\n'), ((501, 542), 'os.path.join', 'os.path.join', (['FIXTURE_PATH', '"""fixture.csv"""'], {}), "(FIXTURE_PATH, 'fixture.csv')\n", (513, 542), False, 'import os\n'), ((644, 685), 'os.path.join', 'os.path.join', (['FIXTURE_PATH', '"""fixture.txt"""'], {}), "(FIXTURE_PATH, 'fixture.txt')\n", (656, 685), False, 'import os\n')]
|
"""
The ``ui.NamedFrame`` class is a variation of the ``ui.Frame`` which lets you
assign a name to the frame. Naming a frame allows you to refer to that frame
by name in Javascript code, and as the target for a hyperlink.
"""
from pyjamas.ui.SimplePanel import SimplePanel
from pyjamas.ui.VerticalPanel import VerticalPanel
from pyjamas.ui.NamedFrame import NamedFrame
from pyjamas.ui.HTML import HTML
class NamedFrameDemo(SimplePanel):
def __init__(self):
SimplePanel.__init__(self)
vPanel = VerticalPanel(Spacing=5)
frame = NamedFrame("myFrame",
Width="100%",
Height="200px")
vPanel.add(frame)
vPanel.add(HTML('<a href="http://google.com" target="myFrame">Google</a>'))
vPanel.add(HTML('<a href="http://yahoo.com" target="myFrame">Yahoo</a>'))
vPanel.add(HTML('<a href="http://pyjs.org" target="myFrame">Pyjamas</a>'))
self.add(vPanel)
|
[
"pyjamas.ui.VerticalPanel.VerticalPanel",
"pyjamas.ui.NamedFrame.NamedFrame",
"pyjamas.ui.HTML.HTML",
"pyjamas.ui.SimplePanel.SimplePanel.__init__"
] |
[((471, 497), 'pyjamas.ui.SimplePanel.SimplePanel.__init__', 'SimplePanel.__init__', (['self'], {}), '(self)\n', (491, 497), False, 'from pyjamas.ui.SimplePanel import SimplePanel\n'), ((516, 540), 'pyjamas.ui.VerticalPanel.VerticalPanel', 'VerticalPanel', ([], {'Spacing': '(5)'}), '(Spacing=5)\n', (529, 540), False, 'from pyjamas.ui.VerticalPanel import VerticalPanel\n'), ((558, 609), 'pyjamas.ui.NamedFrame.NamedFrame', 'NamedFrame', (['"""myFrame"""'], {'Width': '"""100%"""', 'Height': '"""200px"""'}), "('myFrame', Width='100%', Height='200px')\n", (568, 609), False, 'from pyjamas.ui.NamedFrame import NamedFrame\n'), ((712, 775), 'pyjamas.ui.HTML.HTML', 'HTML', (['"""<a href="http://google.com" target="myFrame">Google</a>"""'], {}), '(\'<a href="http://google.com" target="myFrame">Google</a>\')\n', (716, 775), False, 'from pyjamas.ui.HTML import HTML\n'), ((796, 857), 'pyjamas.ui.HTML.HTML', 'HTML', (['"""<a href="http://yahoo.com" target="myFrame">Yahoo</a>"""'], {}), '(\'<a href="http://yahoo.com" target="myFrame">Yahoo</a>\')\n', (800, 857), False, 'from pyjamas.ui.HTML import HTML\n'), ((878, 940), 'pyjamas.ui.HTML.HTML', 'HTML', (['"""<a href="http://pyjs.org" target="myFrame">Pyjamas</a>"""'], {}), '(\'<a href="http://pyjs.org" target="myFrame">Pyjamas</a>\')\n', (882, 940), False, 'from pyjamas.ui.HTML import HTML\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __init__ import *
from GUI_BASE import GUIbase
#print base.messenger.toggleVerbose()
COMBO_TEXT = \
"""\
COMBO
{0}\
"""
class Play(GUIbase):
def __init__(self):
GUIbase.__init__(self)
#self.clear_status_bar['value'] = 0
#self.setup_play_GUI()
#taskMgr.doMethodLater(.1, self.setup_play_GUI, "setup_play_GUI")
def setup_play_GUI(self, task=None):
self.GUI_OBJ = render2d.attachNewNode("GUI_OBJ_PLAY")
self.highest_combo_count = 0
self.combo_count = 0
self.miss_count = 0
self.hit_count = 0
self.score_val = 0
self.combo_Lbl = OnscreenText(text='',
mayChange=1,
parent=render,
font=self.font_koverwatch,
fg=(0, 0, 0, 1),
pos=(0, 35, 20),
scale=(6, 8, 0)
)
self.combo_Lbl.setHpr(0, 182.5, 0)
self.combo_Lbl.setTwoSided(True)
self.h_com_Lbl = OnscreenText(text='COMBO: 000',
mayChange=1,
parent=self.GUI_OBJ,
font=self.font_koverwatch,
fg=(1, 1, 1, 1),
pos=(-.75, .75, 0),
scale=(.1))
self.miss_Lbl = OnscreenText(text='MISS : 000',
mayChange=1,
parent=self.GUI_OBJ,
font=self.font_koverwatch,
fg=(1, 1, 1, 1),
pos=(-.75, .6, 0),
scale=(.1))
self.score_Lbl = OnscreenText(text='SCORE: %06d'%self.score_val,
mayChange=1,
parent=self.GUI_OBJ,
font=self.font_koverwatch,
fg=(1, 1, 1, 1),
pos=(.75, .75, 0),
scale=(.1))
def set_combo_text(self, text):
self.combo_Lbl.setText(text)
LerpColorScaleInterval(self.combo_Lbl, 1.5, (0, 0, 0, 0), (0, 0, 0, 1), name='combo_txt_effect').start()
if self.combo_count > self.highest_combo_count:
self.highest_combo_count = self.combo_count
self.h_com_Lbl.setText('COMBO : %03d'%self.highest_combo_count)
def return_combo_count(self):
return self.combo_count
def miss_updater(self, val):
self.miss_count += val
self.miss_Lbl.setText('MISS : %03d'%self.miss_count)
def score_updater(self, val):
self.score_val += val
self.score_Lbl.setText('SCORE: %06d'%self.score_val)
#Remove play GUI resources
def unload_play_GUI(self, task=None):
target_node = render2d.findAllMatches("GUI_OBJ_PLAY")
for i in target_node:
LerpColorScaleInterval(i, .5, (0, 0, 0, 0)).start()
def remove_task(task):
for i in target_node:
i.remove_node()
taskMgr.doMethodLater(1, remove_task, "unload_play_GUI")
|
[
"GUI_BASE.GUIbase.__init__"
] |
[((217, 239), 'GUI_BASE.GUIbase.__init__', 'GUIbase.__init__', (['self'], {}), '(self)\n', (233, 239), False, 'from GUI_BASE import GUIbase\n')]
|
from typing import Dict, List, Union, Any
import numpy as np
import numpy.linalg as la
from graphik.robots import RobotPlanar
from graphik.graphs.graph_base import ProblemGraph
from graphik.utils import *
from liegroups.numpy import SE2, SO2
import networkx as nx
from numpy import cos, pi
from math import sqrt
class ProblemGraphPlanar(ProblemGraph):
def __init__(self, robot: RobotPlanar, params: Dict = {}):
super(ProblemGraphPlanar, self).__init__(robot, params)
#
base = self.base_subgraph()
structure = self.structure_subgraph()
composition = nx.compose(base, structure)
self.add_nodes_from(composition.nodes(data=True))
self.add_edges_from(composition.edges(data=True))
self.set_limits()
self.root_angle_limits()
def base_subgraph(self) -> nx.DiGraph:
base = nx.DiGraph([("p0", "x"), ("p0", "y"), ("x", "y")])
# Invert x axis because of the way joint limits are set up, makes no difference
base.add_nodes_from(
[
("p0", {POS: np.array([0, 0]), TYPE: [BASE, ROBOT]}),
("x", {POS: np.array([-1, 0]), TYPE: [BASE]}),
("y", {POS: np.array([0, 1]), TYPE: [BASE]}),
]
)
for u, v in base.edges():
base[u][v][DIST] = la.norm(base.nodes[u][POS] - base.nodes[v][POS])
base[u][v][LOWER] = base[u][v][DIST]
base[u][v][UPPER] = base[u][v][DIST]
base[u][v][BOUNDED] = []
return base
def structure_subgraph(self) -> nx.DiGraph:
robot = self.robot
end_effectors = self.robot.end_effectors
kinematic_map = self.robot.kinematic_map
structure = nx.empty_graph(create_using=nx.DiGraph)
for ee in end_effectors:
k_map = kinematic_map[ROOT][ee]
for idx in range(len(k_map)):
cur = k_map[idx]
cur_pos = robot.nodes[cur]["T0"].trans
# Add nodes for joint and edge between them
structure.add_nodes_from([(cur, {POS: cur_pos, TYPE: [ROBOT]})])
if cur == ROOT:
structure.nodes[cur][TYPE] += [BASE]
# If there exists a preceeding joint, connect it to new
if idx != 0:
pred = k_map[idx - 1]
dist = la.norm(
structure.nodes[cur][POS] - structure.nodes[pred][POS]
)
structure.add_edge(
pred,
cur,
**{DIST: dist, LOWER: dist, UPPER: dist, BOUNDED: []},
)
if cur in self.robot.end_effectors:
structure.nodes[cur][TYPE] += [END_EFFECTOR]
structure.nodes[pred][TYPE] += [END_EFFECTOR]
# Delete positions used for weights
for u in structure.nodes:
del structure.nodes[u][POS]
return structure
def root_angle_limits(self):
ax = "x"
S = self.structure
l1 = la.norm(self.nodes[ax][POS])
for node in S.successors(ROOT):
if DIST in S[ROOT][node]:
l2 = S[ROOT][node][DIST]
lb = self.robot.lb[node]
ub = self.robot.ub[node]
lim = max(abs(ub), abs(lb))
# Assumes bounds are less than pi in magnitude
self.add_edge(ax, node)
self[ax][node][UPPER] = l1 + l2
self[ax][node][LOWER] = sqrt(
l1 ** 2 + l2 ** 2 - 2 * l1 * l2 * cos(pi - lim)
)
self[ax][node][BOUNDED] = BELOW
def set_limits(self):
"""
Sets known bounds on the distances between joints.
This is induced by link length and joint limits.
"""
S = self.structure
for u in S:
# direct successors are fully known
for v in (suc for suc in S.successors(u) if suc):
self[u][v][UPPER] = S[u][v][DIST]
self[u][v][LOWER] = S[u][v][DIST]
for v in (des for des in level2_descendants(S, u) if des):
ids = self.robot.kinematic_map[u][v] # TODO generate this at init
l1 = self.robot.l[ids[1]]
l2 = self.robot.l[ids[2]]
lb = self.robot.lb[ids[2]] # symmetric limit
ub = self.robot.ub[ids[2]] # symmetric limit
lim = max(abs(ub), abs(lb))
self.add_edge(u, v)
self[u][v][UPPER] = l1 + l2
self[u][v][LOWER] = sqrt(
l1 ** 2 + l2 ** 2 - 2 * l1 * l2 * cos(pi - lim)
)
self[u][v][BOUNDED] = BELOW
def _pose_goal(self, T_goal: Dict[str, SE2]) -> Dict[str, ArrayLike]:
pos = {}
for u, T_goal_u in T_goal.items():
for v in self.structure.predecessors(u):
if DIST in self[v][u]:
d = self[v][u][DIST]
z = T_goal_u.rot.as_matrix()[0:2, 0]
pos[u] = T_goal_u.trans
pos[v] = T_goal_u.trans - z * d
return pos
def joint_variables(self, G: nx.Graph) -> Dict[str, float]:
"""
Finds the set of decision variables corresponding to the
graph realization G.
:param G: networkx.DiGraph with known vertex positions
:returns: array of joint variables t
:rtype: np.ndarray
"""
joint_variables = {}
# resolve rotation of entire point set
R_, t_ = best_fit_transform(np.vstack((G.nodes[ROOT][POS],
G.nodes["x"][POS],
G.nodes["y"][POS])) ,
np.vstack(([0,0], [-1,0], [0,1])))
R = {ROOT: SO2.identity()}
for u, v, dat in self.structure.edges(data=DIST):
if dat:
diff_uv = R_.dot(G.nodes[v][POS] - G.nodes[u][POS])
len_uv = np.linalg.norm(diff_uv)
sol = R[u].as_matrix().T.dot(diff_uv/len_uv)
theta_idx = np.math.atan2(sol[1], sol[0])
joint_variables[v] = wraptopi(theta_idx)
Rz = SO2.from_angle(theta_idx)
R[v] = R[u].dot(Rz)
return joint_variables
def get_pose(
self, joint_angles: Dict[str, float], query_node: Union[List[str], str]
) -> Union[Dict[str, SE2], SE2]:
return self.robot.pose(joint_angles, query_node)
|
[
"numpy.math.atan2",
"liegroups.numpy.SO2.identity",
"numpy.linalg.norm",
"liegroups.numpy.SO2.from_angle",
"networkx.compose",
"networkx.empty_graph",
"numpy.array",
"numpy.cos",
"networkx.DiGraph",
"numpy.vstack"
] |
[((597, 624), 'networkx.compose', 'nx.compose', (['base', 'structure'], {}), '(base, structure)\n', (607, 624), True, 'import networkx as nx\n'), ((860, 910), 'networkx.DiGraph', 'nx.DiGraph', (["[('p0', 'x'), ('p0', 'y'), ('x', 'y')]"], {}), "([('p0', 'x'), ('p0', 'y'), ('x', 'y')])\n", (870, 910), True, 'import networkx as nx\n'), ((1728, 1767), 'networkx.empty_graph', 'nx.empty_graph', ([], {'create_using': 'nx.DiGraph'}), '(create_using=nx.DiGraph)\n', (1742, 1767), True, 'import networkx as nx\n'), ((3121, 3149), 'numpy.linalg.norm', 'la.norm', (['self.nodes[ax][POS]'], {}), '(self.nodes[ax][POS])\n', (3128, 3149), True, 'import numpy.linalg as la\n'), ((1328, 1376), 'numpy.linalg.norm', 'la.norm', (['(base.nodes[u][POS] - base.nodes[v][POS])'], {}), '(base.nodes[u][POS] - base.nodes[v][POS])\n', (1335, 1376), True, 'import numpy.linalg as la\n'), ((5681, 5750), 'numpy.vstack', 'np.vstack', (["(G.nodes[ROOT][POS], G.nodes['x'][POS], G.nodes['y'][POS])"], {}), "((G.nodes[ROOT][POS], G.nodes['x'][POS], G.nodes['y'][POS]))\n", (5690, 5750), True, 'import numpy as np\n'), ((5883, 5919), 'numpy.vstack', 'np.vstack', (['([0, 0], [-1, 0], [0, 1])'], {}), '(([0, 0], [-1, 0], [0, 1]))\n', (5892, 5919), True, 'import numpy as np\n'), ((5938, 5952), 'liegroups.numpy.SO2.identity', 'SO2.identity', ([], {}), '()\n', (5950, 5952), False, 'from liegroups.numpy import SE2, SO2\n'), ((6126, 6149), 'numpy.linalg.norm', 'np.linalg.norm', (['diff_uv'], {}), '(diff_uv)\n', (6140, 6149), True, 'import numpy as np\n'), ((6239, 6268), 'numpy.math.atan2', 'np.math.atan2', (['sol[1]', 'sol[0]'], {}), '(sol[1], sol[0])\n', (6252, 6268), True, 'import numpy as np\n'), ((6347, 6372), 'liegroups.numpy.SO2.from_angle', 'SO2.from_angle', (['theta_idx'], {}), '(theta_idx)\n', (6361, 6372), False, 'from liegroups.numpy import SE2, SO2\n'), ((2378, 2441), 'numpy.linalg.norm', 'la.norm', (['(structure.nodes[cur][POS] - structure.nodes[pred][POS])'], {}), '(structure.nodes[cur][POS] - structure.nodes[pred][POS])\n', (2385, 2441), True, 'import numpy.linalg as la\n'), ((1072, 1088), 'numpy.array', 'np.array', (['[0, 0]'], {}), '([0, 0])\n', (1080, 1088), True, 'import numpy as np\n'), ((1141, 1158), 'numpy.array', 'np.array', (['[-1, 0]'], {}), '([-1, 0])\n', (1149, 1158), True, 'import numpy as np\n'), ((1204, 1220), 'numpy.array', 'np.array', (['[0, 1]'], {}), '([0, 1])\n', (1212, 1220), True, 'import numpy as np\n'), ((3647, 3660), 'numpy.cos', 'cos', (['(pi - lim)'], {}), '(pi - lim)\n', (3650, 3660), False, 'from numpy import cos, pi\n'), ((4733, 4746), 'numpy.cos', 'cos', (['(pi - lim)'], {}), '(pi - lim)\n', (4736, 4746), False, 'from numpy import cos, pi\n')]
|
# -*- coding: utf-8 -*-
"""
link_fastq_juno
~~~~~~~~~~~~~~~
:Description: console script for running process_fastq on manifest level on juno
"""
"""
Created on August 05, 2019
Description: console script for running process_fastq on manifest level on juno
@author: <NAME>
"""
import os
import sys
import logging
import time
import subprocess
import shlex
import re
try:
import click
except ImportError as e:
print(
"cli: click is not installed, please install pandas as it is one of \
the requirements."
)
exit(1)
try:
import click_log
except ImportError as e:
print(
"cli: click-log is not installed, please install pandas as it is one \
of the requirements."
)
exit(1)
try:
import pandas as pd
except ImportError as e:
print(
"helper: pandas is not installed, please install pandas as it is one of the requirements"
)
exit(1)
__all__ = []
__version_info__ = ("1", "0", "0")
__version__ = ".".join(__version_info__)
__date__ = "2019-08-05"
__updated__ = "2019-08-05"
# Making logging possible
logger = logging.getLogger("link_fastq")
click_log.basic_config(logger)
click_log.ColorFormatter.colors["info"] = dict(fg="green")
@click.command()
@click.option(
"--manifest-file",
"-m",
required=True,
type=click.Path(exists=True),
help="Manifest file having information about run id and sample id to get the fastq files (eg: -m Project_05500_GB_manifest.xslx)",
)
@click.option(
"--request-id",
"-p",
required=True,
default=None,
type=click.STRING,
help="IGO request id to get the fastq files. (eg:-p Project_05500_GB or -p 05500_GB)",
)
@click.option(
"--fastq-path",
"-fp",
required=True,
type=click.Path(),
help="Full path to fastq files",
)
@click.option(
"--output-path",
"-op",
required=True,
type=click.Path(),
help="Full path to where we link the output files",
)
@click.option(
"--cutadapt-path",
"-cp",
required=True,
type=click.Path(exists=True),
help="Full path to location of cutadapt executable",
)
@click.option(
"--process-fastq-path",
"-pfp",
required=True,
type=click.Path(exists=True),
help="Full path to location of cutadapt executable",
)
@click.option(
"--expected-read-length",
"-l",
required=False,
default=101,
type=click.INT,
help="Expected read length from the fastq file",
)
@click.version_option(version=__version__, prog_name="link_fastq_juno")
@click_log.simple_verbosity_option(logger)
def main(
manifest_file,
fastq_path,
expected_read_length,
output_path,
cutadapt_path,
process_fastq_path,
request_id,
):
logger_output = os.path.join(output_path, "link_fastq.log")
fh = logging.FileHandler(logger_output)
formatter = logging.Formatter(
fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
datefmt="%m/%d/%Y %I:%M:%S %p",
)
fh.setFormatter(formatter)
logger.addHandler(fh)
logger.info("==================================================")
logger.info(">>> Running link_fastq for: %s <<<", request_id)
logger.info("==================================================")
t1_start = time.perf_counter()
t2_start = time.process_time()
# Code to traverse all samples
p_dataframe = read_excel(manifest_file)
for index, row in p_dataframe.iterrows():
sample_id = row["INVESTIGATOR_SAMPLE_ID"]
if hasattr(row, "Include_run_ID"):
if ";" in row["Include_run_ID"]:
run_id = row["Include_run_ID"].split(";")
else:
run_id = row["Include_run_ID"]
else:
if ";" in row["INCLUDE_RUN_ID"]:
run_id = row["INCLUDE_RUN_ID"].split(";")
else:
run_id = row["INCLUDE_RUN_ID"]
logger.info(
"link_fastq_juno: run: processing %s, on follwoing runs: %s",
sample_id,
run_id,
)
if isinstance(run_id, list):
run_id = list(filter(None, run_id))
if len(run_id) > 1:
pass
else:
run_id = run_id[0]
if isinstance(run_id, list):
process_fastq_cmd = (
process_fastq_path
+ " -l "
+ str(expected_read_length)
+ " -s "
+ sample_id
+ " -r "
+ " -r ".join(run_id)
+ " "
+ " -p "
+ request_id
+ " -fp "
+ fastq_path
+ " -op "
+ output_path
+ " -cp "
+ cutadapt_path
+ " --verbosity DEBUG"
)
else:
process_fastq_cmd = (
process_fastq_path
+ " -l "
+ str(expected_read_length)
+ " -s "
+ sample_id
+ " -r "
+ run_id
+ " -p "
+ request_id
+ " -fp "
+ fastq_path
+ " -op "
+ output_path
+ " -cp "
+ cutadapt_path
+ " --verbosity DEBUG"
)
bsub_cmd = (
"bsub -cwd . "
+ "-J "
+ "link_fastq_"
+ sample_id
+ " -app anyOS"
+ " -R 'select[mem > 12]'"
+ " -R 'rusage[mem=12]'"
+ " -R 'select[type==CentOS7]'"
+ " -M 12"
+ " -n 1"
+ ' -W 360 "'
+ process_fastq_cmd
+ '"'
)
logger.debug(
"link_fastq: run: the commandline is %s",
bsub_cmd.encode("unicode_escape").decode("utf-8"),
)
lsf_job_id = bsub(bsub_cmd)
logger.info(
"Job submitted to lsf for sample %s, job id:%s", sample_id, lsf_job_id
)
# Code done
t1_stop = time.perf_counter()
t2_stop = time.process_time()
logger.info("--------------------------------------------------")
logger.info("Elapsed time: %.1f [min]" % ((t1_stop - t1_start) / 60))
logger.info("CPU process time: %.1f [min]" % ((t2_stop - t2_start) / 60))
logger.info("--------------------------------------------------")
return 0
def read_excel(file):
logger.info("link_fastq: read_excel: Reading the excel file: %s", file)
pdataframe = pd.read_excel(
file, sheet_name=0, keep_default_na="True", index_col=0)
logger.info("link_fastq: read_excel: Finished reading excel file: %s", file)
return pdataframe
def bsub(bsub_cmd):
"""
Execute lsf bsub
:param bsubline:
:return:
"""
args = shlex.split(bsub_cmd)
try:
proc = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
proc.wait()
retcode = proc.returncode
if retcode >= 0:
output = proc.stdout.readline().decode("utf-8")
logger.info("link_fastq_juno: bsub: %s", output)
lsf_job_id = re.findall("\d+", output)
except IOError as e:
e = sys.exc_info()[0]
logging.info(
"Running of bsub command: %s \n has failed. The exception produced is %s Thus we will exit",
bsub_cmd,
e,
)
sys.exit(1)
return lsf_job_id
if __name__ == "__main__":
sys.exit(main())
|
[
"click.version_option",
"click.option",
"logging.Formatter",
"sys.exc_info",
"click.Path",
"os.path.join",
"click_log.simple_verbosity_option",
"logging.FileHandler",
"time.process_time",
"shlex.split",
"click.command",
"re.findall",
"subprocess.Popen",
"time.perf_counter",
"pandas.read_excel",
"click_log.basic_config",
"sys.exit",
"logging.info",
"logging.getLogger"
] |
[((1106, 1137), 'logging.getLogger', 'logging.getLogger', (['"""link_fastq"""'], {}), "('link_fastq')\n", (1123, 1137), False, 'import logging\n'), ((1138, 1168), 'click_log.basic_config', 'click_log.basic_config', (['logger'], {}), '(logger)\n', (1160, 1168), False, 'import click_log\n'), ((1231, 1246), 'click.command', 'click.command', ([], {}), '()\n', (1244, 1246), False, 'import click\n'), ((1486, 1670), 'click.option', 'click.option', (['"""--request-id"""', '"""-p"""'], {'required': '(True)', 'default': 'None', 'type': 'click.STRING', 'help': '"""IGO request id to get the fastq files. (eg:-p Project_05500_GB or -p 05500_GB)"""'}), "('--request-id', '-p', required=True, default=None, type=click.\n STRING, help=\n 'IGO request id to get the fastq files. (eg:-p Project_05500_GB or -p 05500_GB)'\n )\n", (1498, 1670), False, 'import click\n'), ((2286, 2428), 'click.option', 'click.option', (['"""--expected-read-length"""', '"""-l"""'], {'required': '(False)', 'default': '(101)', 'type': 'click.INT', 'help': '"""Expected read length from the fastq file"""'}), "('--expected-read-length', '-l', required=False, default=101,\n type=click.INT, help='Expected read length from the fastq file')\n", (2298, 2428), False, 'import click\n'), ((2453, 2523), 'click.version_option', 'click.version_option', ([], {'version': '__version__', 'prog_name': '"""link_fastq_juno"""'}), "(version=__version__, prog_name='link_fastq_juno')\n", (2473, 2523), False, 'import click\n'), ((2525, 2566), 'click_log.simple_verbosity_option', 'click_log.simple_verbosity_option', (['logger'], {}), '(logger)\n', (2558, 2566), False, 'import click_log\n'), ((2737, 2780), 'os.path.join', 'os.path.join', (['output_path', '"""link_fastq.log"""'], {}), "(output_path, 'link_fastq.log')\n", (2749, 2780), False, 'import os\n'), ((2790, 2824), 'logging.FileHandler', 'logging.FileHandler', (['logger_output'], {}), '(logger_output)\n', (2809, 2824), False, 'import logging\n'), ((2841, 2960), 'logging.Formatter', 'logging.Formatter', ([], {'fmt': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""', 'datefmt': '"""%m/%d/%Y %I:%M:%S %p"""'}), "(fmt=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s', datefmt=\n '%m/%d/%Y %I:%M:%S %p')\n", (2858, 2960), False, 'import logging\n'), ((3252, 3271), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (3269, 3271), False, 'import time\n'), ((3287, 3306), 'time.process_time', 'time.process_time', ([], {}), '()\n', (3304, 3306), False, 'import time\n'), ((6062, 6081), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (6079, 6081), False, 'import time\n'), ((6096, 6115), 'time.process_time', 'time.process_time', ([], {}), '()\n', (6113, 6115), False, 'import time\n'), ((6538, 6608), 'pandas.read_excel', 'pd.read_excel', (['file'], {'sheet_name': '(0)', 'keep_default_na': '"""True"""', 'index_col': '(0)'}), "(file, sheet_name=0, keep_default_na='True', index_col=0)\n", (6551, 6608), True, 'import pandas as pd\n'), ((6825, 6846), 'shlex.split', 'shlex.split', (['bsub_cmd'], {}), '(bsub_cmd)\n', (6836, 6846), False, 'import shlex\n'), ((1323, 1346), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (1333, 1346), False, 'import click\n'), ((1757, 1769), 'click.Path', 'click.Path', ([], {}), '()\n', (1767, 1769), False, 'import click\n'), ((1885, 1897), 'click.Path', 'click.Path', ([], {}), '()\n', (1895, 1897), False, 'import click\n'), ((2034, 2057), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (2044, 2057), False, 'import click\n'), ((2201, 2224), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (2211, 2224), False, 'import click\n'), ((6871, 6943), 'subprocess.Popen', 'subprocess.Popen', (['args'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT'}), '(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n', (6887, 6943), False, 'import subprocess\n'), ((7182, 7208), 're.findall', 're.findall', (['"""\\\\d+"""', 'output'], {}), "('\\\\d+', output)\n", (7192, 7208), False, 'import re\n'), ((7271, 7402), 'logging.info', 'logging.info', (['"""Running of bsub command: %s \n has failed. The exception produced is %s Thus we will exit"""', 'bsub_cmd', 'e'], {}), '(\n """Running of bsub command: %s \n has failed. The exception produced is %s Thus we will exit"""\n , bsub_cmd, e)\n', (7283, 7402), False, 'import logging\n'), ((7445, 7456), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7453, 7456), False, 'import sys\n'), ((7245, 7259), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (7257, 7259), False, 'import sys\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun May 2 19:07:01 2021
@author: wyattpetryshen
"""
# Code templates for Ornstein-Uhlenbeck process and Brownian motion are from IPython Interactive Computing and Visualization Cookbook, Second Edition (2018), by <NAME>.
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from scipy import stats
import time as timetime
import random
import itertools
#Calculate angle between vectors
def unit_vector(vector):
""" Returns the unit vector of the vector. """
return vector / np.linalg.norm(vector)
def vectorTransform(end_Point,old_origin):
""" Returns vector translated to origin."""
newP = np.subtract(end_Point,old_origin)
return newP
def angle_between(v1, v2):
""" Returns the angle in degrees between vectors 'v1' and 'v2'."""
v1_u = unit_vector(v1)
v2_u = unit_vector(v2)
return np.degrees(np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0)))
#Equation for Ornstein-Uhlenbeck process
def binVals(array,step):
""" Bins array and calculates mean for specified bin size."""
start = 0
step = step
stop = step
iterations = int(len(array)/step)
meanvals = []
for i in np.arange(0,iterations):
tempbin = array[start:stop]
meanbin = np.mean(tempbin)
start = start + step
stop = stop + step
meanvals.append(meanbin)
return(meanvals)
#Code for figure 1 in supplementary information
#Change the parameters accordingly
#Sine wave
sample_rate = 1000
time = np.arange(0, 10, 1/sample_rate)
frequency = 0.1
amplitude = 4
theta = 0
sinewave = amplitude * np.sin(2 * np.pi * frequency * time + theta)
##Model parameters
sigma = 1 #standard deviation
mu = 0 #mean
tau = 0.05 #time constant
##simulation parameters
dt = 0.0001 #Time step
T = 1 #Total time
n = int(T/dt) #Number of steps
t = np.linspace(0., T, n) #Vector of times
##Calculated randomized variables
sigma_bis = sigma * np.sqrt(2. / tau)
sqrtdt = np.sqrt(dt)
#Plot of Sine wave
plt.plot(time,sinewave)
plt.title(r'SineWave with amplitude = {}, frequency = {}'.format(amplitude,frequency))
plt.axis([0, 10, -4, 4])
#Random Drift
for iters in range(100):
##Store results
x = np.zeros(n)
#Euler-Maruyama method
for i in range(n - 1):
x[i + 1] = x[i] + dt * (-(x[i] - sinewave[i]) / tau) + sigma_bis * sqrtdt * np.random.randn()
array = x
plt.plot(time,array,linewidth=0.1)
plt.title(r'OH Drift with amplitude = {}, frequency = {}'.format(amplitude,frequency))
#Time-averaged drift
for iters in range(100):
##Store results
x = np.zeros(n)
#Euler-Maruyama method
for i in range(n - 1):
x[i + 1] = x[i] + dt * (-(x[i] - sinewave[i]) / tau) + sigma_bis * sqrtdt * np.random.randn()
array = x
meanarray = binVals(array,int(sample_rate))
plt.plot(time[int(sample_rate/2):-1:int(sample_rate)],meanarray,linewidth=0.1)
plt.scatter(time[int(sample_rate/2):-1:int(sample_rate)],meanarray,linewidth=0.1)
plt.title(r'OH Drift time-averaged with amplitude = {}, frequency = {}'.format(amplitude,frequency))
plt.axis([0, 10, -6, 6])
#plt.plot(time,x)
#Iterate OH means and calculate the angle between vectors
start_time = timetime.time()
angle_list = []
for iters in range(100):
x1 = np.zeros(n)
x2 = np.zeros(n)
for i in range(n - 1):
x1[i + 1] = x1[i] + dt * (-(x1[i] - sinewave[i]) / tau) + sigma_bis * sqrtdt * np.random.randn()
x2[i + 1] = x2[i] + dt * (-(x2[i] - sinewave[i]) / tau) + sigma_bis * sqrtdt * np.random.randn()
meanarray1, meanarray2 = binVals(x1,int(sample_rate)),binVals(x2,int(sample_rate))
for j in np.arange(1,len(meanarray1)):
if j != len(meanarray1)-1:
Idx_O = j
Idx_E = j+1
v1 = vectorTransform((time[int(sample_rate/2):-1:int(sample_rate)][Idx_O],meanarray1[Idx_O]),(time[int(sample_rate/2):-1:int(sample_rate)][Idx_E],meanarray1[Idx_E]))
v2 = vectorTransform((time[int(sample_rate/2):-1:int(sample_rate)][Idx_O],meanarray2[Idx_O]),(time[int(sample_rate/2):-1:int(sample_rate)][Idx_E],meanarray2[Idx_E]))
vector_angle = angle_between(v1,v2)
angle_list.append(vector_angle)
else:
pass
plt.hist(angle_list, bins = np.arange(0,180,5))
plt.xlabel('Angle')
plt.ylabel('Probability')
plt.title(r'Histogram of OH Trait Drift a=4, frequency=0.1')
print("--- %s seconds ---" % (timetime.time() - start_time))
###Brownian motion
#simulation parameters
n = 100000 #time steps
#Two one dimensional cases that can be combined into two dimensional case
x = np.cumsum(np.random.randn(n))
y = np.cumsum(np.random.randn(n))
xP = np.cumsum(np.random.randn(n))
yP = np.cumsum(np.random.randn(n))
# We add 10 intermediary points between two
# successive points. We interpolate x and y.
k = 50
x2 = np.interp(np.arange(n * k), np.arange(n) * k, x)
y2 = np.interp(np.arange(n * k), np.arange(n) * k, y)
xP2 = np.interp(np.arange(n * k), np.arange(n) * k, xP)
yP2 = np.interp(np.arange(n * k), np.arange(n) * k, yP)
# Now, we draw our points with a gradient of colors.
fig, ax = plt.subplots(1, 1, figsize=(8, 8))
plt.scatter(x2, y2, c=range(n * k), linewidths=0,
marker='o', s=3, cmap=plt.cm.jet,)
plt.axis('equal')
plt.scatter(xP2, yP2, c = range(n*k), linewidths=0,
marker='o', s=3, cmap=plt.cm.jet,)
plt.plot(xP, yP)
plt.plot(x2, y2)
#Brownian Time-averaged drift for single lineages
for iters in range(1000):
##Store results
n = 1000 #time steps
#Two one dimensional cases that can be combined into two dimensional case
x = np.cumsum(np.random.randn(n))
y = np.cumsum(np.random.randn(n))
#Find mean values
meanx = binVals(x,int(100))
meany = binVals(y,int(100))
#plot
plt.plot(meanx,meany,linewidth=0.5)
plt.scatter(meanx,meany,linewidth=0.1, s = 4)
plt.title(r'Brownian Drift time-averaged with equal rates')
#plt.axis([0, 10, -6, 6])
#Iterate BM means and calculate the angle between vectors
start_time = timetime.time()
BW_angle_list = []
for iters in range(10000):
runs = 1000 #time steps
Tavg = 100 #Average years; for random number use random.randrange(i,j)
rate_One = 1 #rate change
rate_Two = 10 #rate change
x, y = np.cumsum(np.random.randn(runs * rate_One)), np.cumsum(np.random.randn(runs * rate_One))
x2, y2 = np.cumsum(np.random.randn(runs * rate_Two)), np.cumsum(np.random.randn(runs * rate_Two))
meanx,meany = binVals(x,int(Tavg)*rate_One),binVals(y,int(Tavg)*rate_One)
meanx2,meany2 = binVals(x2,int(Tavg)*rate_Two),binVals(y2,int(Tavg)*rate_Two)
for j in np.arange(1,len(meanx)):
if j != len(meanx)-1:
Idx_O = j
Idx_E = j+1
v1 = vectorTransform((meanx[Idx_O],meany[Idx_O]),(meanx[Idx_E],meany[Idx_E]))
v2 = vectorTransform((meanx2[Idx_O],meany2[Idx_O]),(meanx2[Idx_E],meany2[Idx_E]))
vector_angle = angle_between(v1,v2)
BW_angle_list.append(vector_angle)
else:
pass
plt.hist(BW_angle_list, bins = np.arange(0,180,1))
plt.xlabel('Angle')
plt.ylabel('Probability')
plt.title(r'Histogram of BW Parallelism')
print("--- %s seconds ---" % (timetime.time() - start_time))
|
[
"matplotlib.pyplot.title",
"numpy.subtract",
"matplotlib.pyplot.plot",
"numpy.random.randn",
"matplotlib.pyplot.scatter",
"numpy.zeros",
"matplotlib.pyplot.axis",
"time.time",
"numpy.sin",
"numpy.arange",
"numpy.linalg.norm",
"numpy.linspace",
"numpy.mean",
"numpy.dot",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.subplots",
"numpy.sqrt"
] |
[((1568, 1601), 'numpy.arange', 'np.arange', (['(0)', '(10)', '(1 / sample_rate)'], {}), '(0, 10, 1 / sample_rate)\n', (1577, 1601), True, 'import numpy as np\n'), ((1904, 1926), 'numpy.linspace', 'np.linspace', (['(0.0)', 'T', 'n'], {}), '(0.0, T, n)\n', (1915, 1926), True, 'import numpy as np\n'), ((2026, 2037), 'numpy.sqrt', 'np.sqrt', (['dt'], {}), '(dt)\n', (2033, 2037), True, 'import numpy as np\n'), ((2058, 2082), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'sinewave'], {}), '(time, sinewave)\n', (2066, 2082), True, 'import matplotlib.pyplot as plt\n'), ((2169, 2193), 'matplotlib.pyplot.axis', 'plt.axis', (['[0, 10, -4, 4]'], {}), '([0, 10, -4, 4])\n', (2177, 2193), True, 'import matplotlib.pyplot as plt\n'), ((3277, 3292), 'time.time', 'timetime.time', ([], {}), '()\n', (3290, 3292), True, 'import time as timetime\n'), ((4351, 4370), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Angle"""'], {}), "('Angle')\n", (4361, 4370), True, 'import matplotlib.pyplot as plt\n'), ((4371, 4396), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Probability"""'], {}), "('Probability')\n", (4381, 4396), True, 'import matplotlib.pyplot as plt\n'), ((4397, 4456), 'matplotlib.pyplot.title', 'plt.title', (['"""Histogram of OH Trait Drift a=4, frequency=0.1"""'], {}), "('Histogram of OH Trait Drift a=4, frequency=0.1')\n", (4406, 4456), True, 'import matplotlib.pyplot as plt\n'), ((5181, 5215), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(8, 8)'}), '(1, 1, figsize=(8, 8))\n', (5193, 5215), True, 'import matplotlib.pyplot as plt\n'), ((5312, 5329), 'matplotlib.pyplot.axis', 'plt.axis', (['"""equal"""'], {}), "('equal')\n", (5320, 5329), True, 'import matplotlib.pyplot as plt\n'), ((5430, 5446), 'matplotlib.pyplot.plot', 'plt.plot', (['xP', 'yP'], {}), '(xP, yP)\n', (5438, 5446), True, 'import matplotlib.pyplot as plt\n'), ((5447, 5463), 'matplotlib.pyplot.plot', 'plt.plot', (['x2', 'y2'], {}), '(x2, y2)\n', (5455, 5463), True, 'import matplotlib.pyplot as plt\n'), ((6094, 6109), 'time.time', 'timetime.time', ([], {}), '()\n', (6107, 6109), True, 'import time as timetime\n'), ((7157, 7176), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Angle"""'], {}), "('Angle')\n", (7167, 7176), True, 'import matplotlib.pyplot as plt\n'), ((7177, 7202), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Probability"""'], {}), "('Probability')\n", (7187, 7202), True, 'import matplotlib.pyplot as plt\n'), ((7203, 7243), 'matplotlib.pyplot.title', 'plt.title', (['"""Histogram of BW Parallelism"""'], {}), "('Histogram of BW Parallelism')\n", (7212, 7243), True, 'import matplotlib.pyplot as plt\n'), ((716, 750), 'numpy.subtract', 'np.subtract', (['end_Point', 'old_origin'], {}), '(end_Point, old_origin)\n', (727, 750), True, 'import numpy as np\n'), ((1240, 1264), 'numpy.arange', 'np.arange', (['(0)', 'iterations'], {}), '(0, iterations)\n', (1249, 1264), True, 'import numpy as np\n'), ((1663, 1707), 'numpy.sin', 'np.sin', (['(2 * np.pi * frequency * time + theta)'], {}), '(2 * np.pi * frequency * time + theta)\n', (1669, 1707), True, 'import numpy as np\n'), ((1999, 2017), 'numpy.sqrt', 'np.sqrt', (['(2.0 / tau)'], {}), '(2.0 / tau)\n', (2006, 2017), True, 'import numpy as np\n'), ((2262, 2273), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (2270, 2273), True, 'import numpy as np\n'), ((2448, 2484), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'array'], {'linewidth': '(0.1)'}), '(time, array, linewidth=0.1)\n', (2456, 2484), True, 'import matplotlib.pyplot as plt\n'), ((2649, 2660), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (2657, 2660), True, 'import numpy as np\n'), ((3157, 3181), 'matplotlib.pyplot.axis', 'plt.axis', (['[0, 10, -6, 6]'], {}), '([0, 10, -6, 6])\n', (3165, 3181), True, 'import matplotlib.pyplot as plt\n'), ((3343, 3354), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (3351, 3354), True, 'import numpy as np\n'), ((3364, 3375), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (3372, 3375), True, 'import numpy as np\n'), ((4674, 4692), 'numpy.random.randn', 'np.random.randn', (['n'], {}), '(n)\n', (4689, 4692), True, 'import numpy as np\n'), ((4708, 4726), 'numpy.random.randn', 'np.random.randn', (['n'], {}), '(n)\n', (4723, 4726), True, 'import numpy as np\n'), ((4744, 4762), 'numpy.random.randn', 'np.random.randn', (['n'], {}), '(n)\n', (4759, 4762), True, 'import numpy as np\n'), ((4779, 4797), 'numpy.random.randn', 'np.random.randn', (['n'], {}), '(n)\n', (4794, 4797), True, 'import numpy as np\n'), ((4912, 4928), 'numpy.arange', 'np.arange', (['(n * k)'], {}), '(n * k)\n', (4921, 4928), True, 'import numpy as np\n'), ((4966, 4982), 'numpy.arange', 'np.arange', (['(n * k)'], {}), '(n * k)\n', (4975, 4982), True, 'import numpy as np\n'), ((5021, 5037), 'numpy.arange', 'np.arange', (['(n * k)'], {}), '(n * k)\n', (5030, 5037), True, 'import numpy as np\n'), ((5077, 5093), 'numpy.arange', 'np.arange', (['(n * k)'], {}), '(n * k)\n', (5086, 5093), True, 'import numpy as np\n'), ((5841, 5878), 'matplotlib.pyplot.plot', 'plt.plot', (['meanx', 'meany'], {'linewidth': '(0.5)'}), '(meanx, meany, linewidth=0.5)\n', (5849, 5878), True, 'import matplotlib.pyplot as plt\n'), ((5881, 5926), 'matplotlib.pyplot.scatter', 'plt.scatter', (['meanx', 'meany'], {'linewidth': '(0.1)', 's': '(4)'}), '(meanx, meany, linewidth=0.1, s=4)\n', (5892, 5926), True, 'import matplotlib.pyplot as plt\n'), ((5931, 5989), 'matplotlib.pyplot.title', 'plt.title', (['"""Brownian Drift time-averaged with equal rates"""'], {}), "('Brownian Drift time-averaged with equal rates')\n", (5940, 5989), True, 'import matplotlib.pyplot as plt\n'), ((590, 612), 'numpy.linalg.norm', 'np.linalg.norm', (['vector'], {}), '(vector)\n', (604, 612), True, 'import numpy as np\n'), ((1319, 1335), 'numpy.mean', 'np.mean', (['tempbin'], {}), '(tempbin)\n', (1326, 1335), True, 'import numpy as np\n'), ((4331, 4351), 'numpy.arange', 'np.arange', (['(0)', '(180)', '(5)'], {}), '(0, 180, 5)\n', (4340, 4351), True, 'import numpy as np\n'), ((4930, 4942), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (4939, 4942), True, 'import numpy as np\n'), ((4984, 4996), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (4993, 4996), True, 'import numpy as np\n'), ((5039, 5051), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (5048, 5051), True, 'import numpy as np\n'), ((5095, 5107), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (5104, 5107), True, 'import numpy as np\n'), ((5683, 5701), 'numpy.random.randn', 'np.random.randn', (['n'], {}), '(n)\n', (5698, 5701), True, 'import numpy as np\n'), ((5721, 5739), 'numpy.random.randn', 'np.random.randn', (['n'], {}), '(n)\n', (5736, 5739), True, 'import numpy as np\n'), ((7137, 7157), 'numpy.arange', 'np.arange', (['(0)', '(180)', '(1)'], {}), '(0, 180, 1)\n', (7146, 7157), True, 'import numpy as np\n'), ((4488, 4503), 'time.time', 'timetime.time', ([], {}), '()\n', (4501, 4503), True, 'import time as timetime\n'), ((6341, 6373), 'numpy.random.randn', 'np.random.randn', (['(runs * rate_One)'], {}), '(runs * rate_One)\n', (6356, 6373), True, 'import numpy as np\n'), ((6386, 6418), 'numpy.random.randn', 'np.random.randn', (['(runs * rate_One)'], {}), '(runs * rate_One)\n', (6401, 6418), True, 'import numpy as np\n'), ((6443, 6475), 'numpy.random.randn', 'np.random.randn', (['(runs * rate_Two)'], {}), '(runs * rate_Two)\n', (6458, 6475), True, 'import numpy as np\n'), ((6488, 6520), 'numpy.random.randn', 'np.random.randn', (['(runs * rate_Two)'], {}), '(runs * rate_Two)\n', (6503, 6520), True, 'import numpy as np\n'), ((7275, 7290), 'time.time', 'timetime.time', ([], {}), '()\n', (7288, 7290), True, 'import time as timetime\n'), ((959, 977), 'numpy.dot', 'np.dot', (['v1_u', 'v2_u'], {}), '(v1_u, v2_u)\n', (965, 977), True, 'import numpy as np\n'), ((2412, 2429), 'numpy.random.randn', 'np.random.randn', ([], {}), '()\n', (2427, 2429), True, 'import numpy as np\n'), ((2799, 2816), 'numpy.random.randn', 'np.random.randn', ([], {}), '()\n', (2814, 2816), True, 'import numpy as np\n'), ((3490, 3507), 'numpy.random.randn', 'np.random.randn', ([], {}), '()\n', (3505, 3507), True, 'import numpy as np\n'), ((3595, 3612), 'numpy.random.randn', 'np.random.randn', ([], {}), '()\n', (3610, 3612), True, 'import numpy as np\n')]
|
from setuptools import setup, find_packages
try:
from pypandoc import convert
def read_markdown(file: str) -> str:
return convert(file, "rst")
except ImportError:
def read_markdown(file: str) -> str:
return open(file, "r").read()
setup(
name="gitsubrepo",
version="1.1.0",
packages=find_packages(exclude=["tests"]),
install_requires=open("requirements.txt", "r").readlines(),
url="https://github.com/wtsi-hgi/python-git-subrepo",
license="MIT",
description="Provides access to `git subrepo` in Python",
long_description=read_markdown("README.md")
)
|
[
"pypandoc.convert",
"setuptools.find_packages"
] |
[((139, 159), 'pypandoc.convert', 'convert', (['file', '"""rst"""'], {}), "(file, 'rst')\n", (146, 159), False, 'from pypandoc import convert\n'), ((324, 356), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests']"}), "(exclude=['tests'])\n", (337, 356), False, 'from setuptools import setup, find_packages\n')]
|
import logging
import time
from django.conf import settings
from django.core.cache import cache
log = logging.getLogger(__name__)
def get_cached_with_mtime(cache_key, getter, max_mtime=60, default=None, expiry=86400):
"""
Get something with a maximum modification time.
I.e. if the data stored in the cache is older than max_mtime seconds (or does not
exist), it attempts to call getter() for a new value.
However, if the
:param cache_key: Cache key string
:type cache_key: str
:param getter: Getter function
:type getter: function
:param max_mtime: Maximum modification time, in seconds
:type max_mtime: int
:param default: Default value, if nothing is in the cache
:type default: object
:param expiry: Maximum expiry for the cache entity, in seconds
:type expiry: int
:return: data, from the getter or the cache
:rtype: object
"""
cached_data = cache.get(cache_key)
if cached_data is None or (time.time() - cached_data['mtime']) > max_mtime:
try:
cached_data = {
'mtime': time.time(),
'data': getter(),
}
cache.set(cache_key, cached_data, expiry)
except Exception:
if settings.DEBUG:
raise
log.warn('error fetching in get_cached_with_mtime(%s)', cache_key, exc_info=True)
if cached_data is None:
# If we didn't have anything cached to begin with,
# at least cache something for a while to avoid hammering the original `getter()`
cached_data = {'mtime': time.time(), 'data': default}
cache.set(cache_key, cached_data, max_mtime)
return cached_data['data']
|
[
"django.core.cache.cache.set",
"django.core.cache.cache.get",
"logging.getLogger",
"time.time"
] |
[((104, 131), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (121, 131), False, 'import logging\n'), ((930, 950), 'django.core.cache.cache.get', 'cache.get', (['cache_key'], {}), '(cache_key)\n', (939, 950), False, 'from django.core.cache import cache\n'), ((1170, 1211), 'django.core.cache.cache.set', 'cache.set', (['cache_key', 'cached_data', 'expiry'], {}), '(cache_key, cached_data, expiry)\n', (1179, 1211), False, 'from django.core.cache import cache\n'), ((982, 993), 'time.time', 'time.time', ([], {}), '()\n', (991, 993), False, 'import time\n'), ((1097, 1108), 'time.time', 'time.time', ([], {}), '()\n', (1106, 1108), False, 'import time\n'), ((1672, 1716), 'django.core.cache.cache.set', 'cache.set', (['cache_key', 'cached_data', 'max_mtime'], {}), '(cache_key, cached_data, max_mtime)\n', (1681, 1716), False, 'from django.core.cache import cache\n'), ((1626, 1637), 'time.time', 'time.time', ([], {}), '()\n', (1635, 1637), False, 'import time\n')]
|
import pygame
from pygame.font import Font, SysFont
from pygame.locals import *
from ui.utils.sound import Sound
from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget
from ui import colours
class ModernButton(LcarsWidget):
def __init__(self, colour, pos, text, handler=None, rectSize=None, icon=None):
if rectSize == None:
image = pygame.image.load("assets/button_modern.png").convert_alpha()
size = (image.get_rect().width, image.get_rect().height)
else:
size = rectSize
image = pygame.Surface(rectSize)
image.fill(colour)
#image=image.convert_alpha()
self.colour = colours.TRANSPARENT
self.image = image
self.font = Font("assets/YukonTech.ttf", 20)
textImage = self.font.render(text, False, colours.BLUEDARK)
image = image.blit(textImage,
(image.get_rect().width - textImage.get_rect().width - 4,
image.get_rect().height - textImage.get_rect().height - 5))
LcarsWidget.__init__(self, colour, pos, size, handler)
self.applyColour(colour)
self.highlighted = False
self.beep = Sound("assets/audio/panel/202.wav")
def handleEvent(self, event, clock):
image2 = pygame.image.load("assets/button_modern_down.png")
if (event.type == MOUSEBUTTONDOWN and self.rect.collidepoint(event.pos) and self.visible == True):
self.highlighted = True
self.beep.play()
self.image = image2.convert_alpha()
image = pygame.image.load("assets/button_modern.png")
if (event.type == MOUSEBUTTONUP and self.highlighted and self.visible == True):
self.image = image.convert_alpha()
return LcarsWidget.handleEvent(self, event, clock)
class ModernElbowTop(GeneralWidget):
def __init__(self, colour, pos, text, handler=None, rectSize=None, icon=None):
if rectSize == None:
image = pygame.image.load("assets/elbow_top.png").convert_alpha()
size = (image.get_rect().width, image.get_rect().height)
else:
size = rectSize
image = pygame.Surface(rectSize).convert_alpha()
#image.fill(colours.TRANSPARENT)
#image=image.convert_alpha()
self.colour = colours.TRANSPARENT
self.image = image
self.font = Font("assets/YukonTech.ttf", 20)
textImage = self.font.render(text, False, colours.BLUEDARK)
image = image.blit(textImage,
(image.get_rect().width - textImage.get_rect().width - 170,
image.get_rect().height - textImage.get_rect().height - 10))
GeneralWidget.__init__(self, colour, pos, size, handler)
self.highlighted = False
self.beep = Sound("assets/audio/panel/202.wav")
def handleEvent(self, event, clock):
image2 = pygame.image.load("assets/elbow_top_up.png")
if (event.type == MOUSEBUTTONDOWN and self.rect.collidepoint(event.pos) and self.visible == True):
self.image = image2.convert_alpha()
self.highlighted = True
self.beep.play()
if (event.type == MOUSEBUTTONUP and self.highlighted and self.visible == True):
self.applyColour(self.colour)
return GeneralWidget.handleEvent(self, event, clock)
class ModernElbowBottom(GeneralWidget):
def __init__(self, colour, pos, text, handler=None, rectSize=None, icon=None):
if rectSize == None:
image = pygame.image.load("assets/elbow_bottom.png").convert_alpha()
size = (image.get_rect().width, image.get_rect().height)
else:
size = rectSize
image = pygame.Surface(rectSize).convert_alpha()
#image.fill(colours.TRANSPARENT)
#image=image.convert_alpha()
self.colour = colours.TRANSPARENT
self.image = image
self.font = Font("assets/YukonTech.ttf", 20)
textImage = self.font.render(text, False, colours.BLUEDARK)
image = image.blit(textImage,
(image.get_rect().width - textImage.get_rect().width - 185,
image.get_rect().height - textImage.get_rect().height - 30))
#self.image1 = image
GeneralWidget.__init__(self, colour, pos, size, handler)
self.highlighted = False
self.beep = Sound("assets/audio/panel/202.wav")
def handleEvent(self, event, clock):
image2 = pygame.image.load("assets/elbow_bottom_down.png")
if (event.type == MOUSEBUTTONDOWN and self.rect.collidepoint(event.pos) and self.visible == True):
self.image = image2.convert_alpha()
self.highlighted = True
self.beep.play()
if (event.type == MOUSEBUTTONUP and self.highlighted and self.visible == True):
#self.image = self.image1
self.applyColour(self.colour)
return GeneralWidget.handleEvent(self, event, clock)
class UltimateButton(LcarsWidget):
"""
THE ULTIMATE BUTTON DOES EVERYTHING YOU NEED
ARGUMENTS:
*Required
-position | (x coord, y coord)
*Optional
-size | (width, height) default is (80, 40) or size of image_set[0]
-text | "string" default is None
-colour_set | [colour, colour_highlighted, colour_pressed] default is [white, grey/blue, blue]
-image_set | [image, image_highlighted, image_pressed] default is [None, None, None]
-text_colour| (r, g, b) or from colours file default is black
-font | pygame.Font default is MicroTech
-handler | event handler function default is None
BUGS:
-- 3 images (image set) - text disappears when clicking
"""
#DEFAULTS
imageonly=False
#init
def __init__(self, pos, text=None, colour_set=[colours.WHITE, colours.GREY_BLUE, colours.BLUE], image_set=[None,None,None], text_colour=colours.BLACK, font=None, size=(80,40), handler=None):
#Set button attributes
#all buttons
self.colour=colour_set[0]
self.colour_highlighted=colour_set[1]
self.colour_pressed=colour_set[2]
self.size = size
self.text_colour = text_colour
#image buttons only
if not image_set[0] == None:
self.image_normal=image_set[0].convert_alpha()
image=self.image_normal
self.image=image
self.size = (image.get_rect().width, image.get_rect().height)
if not image_set[1] == None:
self.image_highlighted=image_set[1].convert_alpha()
if not image_set[2] == None:
self.image_pressed=image_set[2].convert_alpha()
self.imageonly=True
#Create surface for non-image button
if not self.imageonly:
if image_set[0]==None:
image = pygame.Surface(self.size)
image.fill(self.colour)
self.image=image
#apply colour to white button
else:
self.image=image
self.applyColour(self.colour)
#Create text image if text included
if not text==None:
if font == None:
self.font=Font("assets/MicroTech.ttf", 18)
else:
self.font = font
textImage = self.font.render(text, False, self.text_colour)
textrect = textImage.get_rect()
image = image.blit(textImage,
(image.get_rect().width - textImage.get_rect().width - 52,
image.get_rect().height - textImage.get_rect().height - 10))
#Make widget
LcarsWidget.__init__(self, self.colour, pos, self.size, handler)
if image_set[2] == None:
self.applyColour(self.colour)
else:
self.imageonly=True
self.highlighted = False
self.beep = Sound("assets/audio/panel/202.wav")
#handle events
def handleEvent(self, event, clock):
if (event.type == MOUSEBUTTONDOWN and self.rect.collidepoint(event.pos) and self.visible == True):
if self.imageonly:
self.image = self.image_pressed
else:
self.applyColour(self.colour_pressed)
self.highlighted = True
self.beep.play()
#need to add mouseover highlight functionality here
if (event.type == MOUSEBUTTONUP and self.highlighted and self.visible == True):
if self.imageonly:
self.image = self.image_normal
else:
self.applyColour(self.colour)
return LcarsWidget.handleEvent(self, event, clock)
class SideButton(GeneralWidget):
"""Button - either rounded or rectangular if rectSize is spcified"""
def __init__(self, colour, pos, text, handler=None, rectSize=None, icon=None):
if rectSize == None:
image = pygame.image.load("assets/button_modern.png").convert_alpha()
size = (image.get_rect().width, image.get_rect().height)
else:
size = rectSize
image = pygame.Surface(rectSize).convert_alpha()
image.fill(colours.TRANSPARENT)
#self.colour = colour
#self.image = image
#self.font = Font("assets/swiss911.ttf", 18)
#textImage = self.font.render(text, False, colours.BLACK)
#image = image.blit(textImage,
# (image.get_rect().width - textImage.get_rect().width - 10,
# image.get_rect().height - textImage.get_rect().height - 5))
self.colour = colours.TRANSPARENT
self.image = image
self.font = Font("assets/YukonTech.ttf", 20)
textImage = self.font.render(text, False, colours.BLUEDARK)
image = image.blit(textImage,
(image.get_rect().width - textImage.get_rect().width - 4,
image.get_rect().height - textImage.get_rect().height - 5))
GeneralWidget.__init__(self, colour, pos, size, handler)
self.highlighted = False
self.beep = Sound("assets/audio/panel/202.wav")
def handleEvent(self, event, clock):
#image2 = pygame.image.load("assets/reset_small.png")
if (event.type == MOUSEBUTTONDOWN and self.rect.collidepoint(event.pos) and self.visible == True):
self.applyColour(colours.WHITE)
self.highlighted = True
self.beep.play()
if (event.type == MOUSEBUTTONUP and self.highlighted and self.visible == True):
self.applyColour(self.colour)
return GeneralWidget.handleEvent(self, event, clock)
class LogoutButton(GeneralWidget):
"""Button - either rounded or rectangular if rectSize is spcified"""
def __init__(self, colour, pos, text, handler=None, rectSize=None, icon=None):
if rectSize == None:
image = pygame.image.load("assets/button_modern.png").convert_alpha()
size = (image.get_rect().width, image.get_rect().height)
else:
size = rectSize
image = pygame.Surface(rectSize).convert_alpha()
image.fill(colour)
self.image = image
self.font = Font("assets/YukonTech.ttf", 19)
textImage = self.font.render(text, False, colours.BLUEDARK)
image = image.blit(textImage,
(image.get_rect().width - textImage.get_rect().width - 5,
image.get_rect().height - textImage.get_rect().height))
GeneralWidget.__init__(self, colour, pos, size, handler)
self.highlighted = False
self.beep = Sound("assets/audio/panel/202.wav")
def handleEvent(self, event, clock):
#image2 = pygame.image.load("assets/reset_small.png")
if (event.type == MOUSEBUTTONDOWN and self.rect.collidepoint(event.pos) and self.visible == True):
self.applyColour(colours.WHITE)
self.highlighted = True
self.beep.play()
if (event.type == MOUSEBUTTONUP and self.highlighted and self.visible == True):
self.applyColour(colours.TRANSPARENT)
return GeneralWidget.handleEvent(self, event, clock)
class LcarsElbow(LcarsWidget):
"""The LCARS corner elbow - not currently used"""
STYLE_BOTTOM_LEFT = 0
STYLE_TOP_LEFT = 1
STYLE_BOTTOM_RIGHT = 2
STYLE_TOP_RIGHT = 3
def __init__(self, colour, style, pos, text, group_number, handler):
image = pygame.image.load("assets/elbow_top.png").convert_alpha()
# alpha=255
# image.fill((255, 255, 255, alpha), None, pygame.BLEND_RGBA_MULT)
if (style == LcarsElbow.STYLE_BOTTOM_LEFT):
image = pygame.transform.flip(image, False, True)
elif (style == LcarsElbow.STYLE_BOTTOM_RIGHT):
image = pygame.transform.rotate(image, 180)
elif (style == LcarsElbow.STYLE_TOP_RIGHT):
image = pygame.transform.flip(image, True, False)
self.image = image
size = (image.get_rect().width, image.get_rect().height)
LcarsWidget.__init__(self, colour, pos, size, handler)
self.applyColour(colours.WHITE)
def handleEvent(self, event, clock):
#image2 = pygame.image.load("assets/reset_small.png")
if (event.type == MOUSEBUTTONDOWN and self.rect.collidepoint(event.pos) and self.visible == True):
self.applyColour(colours.WHITE)
self.highlighted = True
#self.beep.play()
if (event.type == MOUSEBUTTONUP and self.highlighted and self.visible == True):
self.applyColour(colours.TRANSPARENT)
return GeneralWidget.handleEvent(self, event, clock)
class LcarsTab(LcarsWidget):
"""Tab widget (like radio button) - not currently used nor implemented"""
STYLE_LEFT = 1
STYLE_RIGHT = 2
def __init__(self, colour, style, pos, handler=None):
image = pygame.image.load("assets/tab.png").convert()
if (style == LcarsTab.STYLE_RIGHT):
image = pygame.transform.flip(image, False, True)
size = (image.get_rect().width, image.get_rect().height)
LcarsWidget.__init__(self, colour, pos, size, handler)
self.image = image
self.applyColour(colour)
class LcarsButton(LcarsWidget):
"""Button - either rounded or rectangular if rectSize is spcified"""
def __init__(self, colour, pos, text, handler=None, rectSize=None, icon=None):
if rectSize == None:
image = pygame.image.load("assets/button.png").convert_alpha()
size = (image.get_rect().width, image.get_rect().height)
else:
size = rectSize
image = pygame.Surface(rectSize)
image.fill(colour)
#image=image.convert_alpha()
self.colour = colour
self.image = image
self.font = Font("assets/YukonTech.ttf", 18)
textImage = self.font.render(text, False, colours.BLACK)
image = image.blit(textImage,
(image.get_rect().width - textImage.get_rect().width - 10,
image.get_rect().height - textImage.get_rect().height - 5))
LcarsWidget.__init__(self, colour, pos, size, handler)
self.applyColour(colour)
self.highlighted = False
self.beep = Sound("assets/audio/panel/202.wav")
def handleEvent(self, event, clock):
if (event.type == MOUSEBUTTONDOWN and self.rect.collidepoint(event.pos) and self.visible == True):
self.applyColour(colours.WHITE)
self.highlighted = True
self.beep.play()
if (event.type == MOUSEBUTTONUP and self.highlighted and self.visible == True):
self.applyColour(self.colour)
return LcarsWidget.handleEvent(self, event, clock)
class PowerButton(PowerWidget):
"""Button - either rounded or rectangular if rectSize is spcified"""
def __init__(self, colour, pos, text, handler=None, rectSize=None, icon=None):
if rectSize == None:
image = pygame.image.load("assets/power_small_cyantest.png")
size = (image.get_rect().width, image.get_rect().height)
else:
size = rectSize
image = pygame.Surface(rectSize).convert_alpha()
image.fill(colour)
self.colour = colour
self.image = image.convert_alpha()
#font = Font("assets/swiss911.ttf", 8)
#textImage = font.render(text, False, colours.BLACK)
#image.blit(textImage,
# (image.get_rect().width - textImage.get_rect().width - 10,
# image.get_rect().height - textImage.get_rect().height - 5))
#if not icon==None:
# image.blit(icon, (10,10))
PowerWidget.__init__(self, colour, pos, size, handler)
#self.applyColour(colour)
self.highlighted = False
self.beep = Sound("assets/audio/panel/202.wav")
def handleEvent(self, event, clock):
image2 = pygame.image.load("assets/power_small.png")
if (event.type == MOUSEBUTTONDOWN and self.rect.collidepoint(event.pos) and self.visible == True):
#self.applyColour(colours.WHITE)
self.highlighted = True
self.beep.play()
self.image = image2.convert_alpha()
#size = (image.get_rect().width, image.get_rect().height)
#self.colour = colour
image = pygame.image.load("assets/power_small_cyantest.png")
if (event.type == MOUSEBUTTONUP and self.highlighted and self.visible == True):
self.image = image.convert_alpha()
return PowerWidget.handleEvent(self, event, clock)
class ResetButton(ResetWidget):
"""Button - either rounded or rectangular if rectSize is spcified"""
def __init__(self, colour, pos, text, handler=None, rectSize=None, icon=None):
if rectSize == None:
image = pygame.image.load("assets/reset_small_cyantest.png").convert_alpha()
size = (image.get_rect().width, image.get_rect().height)
else:
size = rectSize
image = pygame.Surface(rectSize).convert_alpha()
image.fill(colour)
self.colour = colour
self.image = image
PowerWidget.__init__(self, colour, pos, size, handler)
self.highlighted = False
self.beep = Sound("assets/audio/panel/202.wav")
def handleEvent(self, event, clock):
image2 = pygame.image.load("assets/reset_small.png")
if (event.type == MOUSEBUTTONDOWN and self.rect.collidepoint(event.pos) and self.visible == True):
self.highlighted = True
self.beep.play()
self.image = image2.convert_alpha()
image = pygame.image.load("assets/reset_small_cyantest.png")
if (event.type == MOUSEBUTTONUP and self.highlighted and self.visible == True):
self.image = image.convert_alpha()
return PowerWidget.handleEvent(self, event, clock)
class LcarsText(LcarsWidget):
"""Text that can be placed anywhere"""
def __init__(self, colour, pos, message, size=1.0, background=None, handler=None):
self.colour = colour
self.background = background
self.font = Font("assets/MicroTech.ttf", int(15 * size))
self.renderText(message)
# center the text if needed
if (pos[1] < 0):
pos = (pos[0], 400 - self.image.get_rect().width / 2)
LcarsWidget.__init__(self, colour, pos, None, handler)
def renderText(self, message):
if (self.background == None):
self.image = self.font.render(message, True, self.colour)
else:
self.image = self.font.render(message, True, self.colour, self.background)
def setText(self, newText):
self.renderText(newText)
class DescText(LcarsWidget):
"""Text that can be placed anywhere"""
def __init__(self, colour, pos, message, size=1.6, background=None, handler=None):
self.colour = colour
self.background = background
self.font = Font("assets/Doboto.ttf", int(15 * size))
self.renderText(message)
# center the text if needed
if (pos[1] < 0):
pos = (pos[0], 400 - self.image.get_rect().width / 2)
LcarsWidget.__init__(self, colour, pos, None, handler)
def renderText(self, message):
if (self.background == None):
self.image = self.font.render(message, True, self.colour)
else:
self.image = self.font.render(message, True, self.colour, self.background)
def setText(self, newText):
self.renderText(newText)
class YukonText(LcarsWidget):
"""Text that can be placed anywhere"""
def __init__(self, colour, pos, message, size=1.0, background=None, handler=None):
self.colour = colour
self.background = background
self.font = Font("assets/YukonTech.ttf", int(20 * size))
self.renderText(message)
# center the text if needed
if (pos[1] < 0):
pos = (pos[0], 400 - self.image.get_rect().width / 2)
GeneralWidget.__init__(self, colour, pos, None, handler)
def renderText(self, message):
if (self.background == None):
self.image = self.font.render(message, True, self.colour)
else:
self.image = self.font.render(message, True, self.colour, self.background)
def setText(self, newText):
self.renderText(newText)
class LcarsBlockLarge(SideButton):
"""Left navigation block - large version"""
def __init__(self, colour, pos, text, handler=None):
size = (98, 147)
SideButton.__init__(self, colour, pos, text, handler, size)
class LcarsBlockMedium(SideButton):
"""Left navigation block - medium version"""
def __init__(self, colour, pos, text, handler=None):
size = (98, 62)
SideButton.__init__(self, colour, pos, text, handler, size)
class LcarsBlockSmall(SideButton):
"""Left navigation block - small version"""
def __init__(self, colour, pos, text, handler=None):
size = (98, 34)
SideButton.__init__(self, colour, pos, text, handler, size)
class SideBlockSmall(LogoutButton):
"""Left navigation block - small version"""
def __init__(self, colour, pos, text, handler=None):
size = (92, 15)
LogoutButton.__init__(self, colour, pos, text, handler, size)
class RelayPowerButton(PowerButton):
def __init__(self, colour, pos, text, relayController, handler=None, rectSize=None, icon=None):
self.relay=relayController
PowerButton.__init__(self, colour, pos, text, handler, icon=icon)
class RelayResetButton(ResetButton):
def __init__(self, colour, pos, text, relayController, handler=None, rectSize=None, icon=None):
self.relay=relayController
ResetButton.__init__(self, colour, pos, text, handler, icon=icon)
class ClusterButton(UltimateButton):
colour_set = [colours.WHITE, colours.GREY_BLUE, colours.BLUE]
def __init__(self, pos, text, group_number, handler=None, colour_set=colour_set, image_set=[None, None, None]):
self.group_number=group_number
UltimateButton.__init__(self, pos, text, colour_set=colour_set, image_set=image_set, handler=handler)
|
[
"pygame.transform.flip",
"ui.widgets.sprite.PowerWidget.__init__",
"pygame.transform.rotate",
"pygame.Surface",
"ui.widgets.sprite.PowerWidget.handleEvent",
"ui.widgets.sprite.LcarsWidget.__init__",
"ui.widgets.sprite.LcarsWidget.handleEvent",
"pygame.font.Font",
"pygame.image.load",
"ui.utils.sound.Sound",
"ui.widgets.sprite.GeneralWidget.__init__",
"ui.widgets.sprite.GeneralWidget.handleEvent"
] |
[((766, 798), 'pygame.font.Font', 'Font', (['"""assets/YukonTech.ttf"""', '(20)'], {}), "('assets/YukonTech.ttf', 20)\n", (770, 798), False, 'from pygame.font import Font, SysFont\n'), ((1077, 1131), 'ui.widgets.sprite.LcarsWidget.__init__', 'LcarsWidget.__init__', (['self', 'colour', 'pos', 'size', 'handler'], {}), '(self, colour, pos, size, handler)\n', (1097, 1131), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((1218, 1253), 'ui.utils.sound.Sound', 'Sound', (['"""assets/audio/panel/202.wav"""'], {}), "('assets/audio/panel/202.wav')\n", (1223, 1253), False, 'from ui.utils.sound import Sound\n'), ((1313, 1363), 'pygame.image.load', 'pygame.image.load', (['"""assets/button_modern_down.png"""'], {}), "('assets/button_modern_down.png')\n", (1330, 1363), False, 'import pygame\n'), ((1601, 1646), 'pygame.image.load', 'pygame.image.load', (['"""assets/button_modern.png"""'], {}), "('assets/button_modern.png')\n", (1618, 1646), False, 'import pygame\n'), ((1809, 1852), 'ui.widgets.sprite.LcarsWidget.handleEvent', 'LcarsWidget.handleEvent', (['self', 'event', 'clock'], {}), '(self, event, clock)\n', (1832, 1852), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((2429, 2461), 'pygame.font.Font', 'Font', (['"""assets/YukonTech.ttf"""', '(20)'], {}), "('assets/YukonTech.ttf', 20)\n", (2433, 2461), False, 'from pygame.font import Font, SysFont\n'), ((2739, 2795), 'ui.widgets.sprite.GeneralWidget.__init__', 'GeneralWidget.__init__', (['self', 'colour', 'pos', 'size', 'handler'], {}), '(self, colour, pos, size, handler)\n', (2761, 2795), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((2849, 2884), 'ui.utils.sound.Sound', 'Sound', (['"""assets/audio/panel/202.wav"""'], {}), "('assets/audio/panel/202.wav')\n", (2854, 2884), False, 'from ui.utils.sound import Sound\n'), ((2946, 2990), 'pygame.image.load', 'pygame.image.load', (['"""assets/elbow_top_up.png"""'], {}), "('assets/elbow_top_up.png')\n", (2963, 2990), False, 'import pygame\n'), ((3377, 3422), 'ui.widgets.sprite.GeneralWidget.handleEvent', 'GeneralWidget.handleEvent', (['self', 'event', 'clock'], {}), '(self, event, clock)\n', (3402, 3422), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((4013, 4045), 'pygame.font.Font', 'Font', (['"""assets/YukonTech.ttf"""', '(20)'], {}), "('assets/YukonTech.ttf', 20)\n", (4017, 4045), False, 'from pygame.font import Font, SysFont\n'), ((4347, 4403), 'ui.widgets.sprite.GeneralWidget.__init__', 'GeneralWidget.__init__', (['self', 'colour', 'pos', 'size', 'handler'], {}), '(self, colour, pos, size, handler)\n', (4369, 4403), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((4457, 4492), 'ui.utils.sound.Sound', 'Sound', (['"""assets/audio/panel/202.wav"""'], {}), "('assets/audio/panel/202.wav')\n", (4462, 4492), False, 'from ui.utils.sound import Sound\n'), ((4554, 4603), 'pygame.image.load', 'pygame.image.load', (['"""assets/elbow_bottom_down.png"""'], {}), "('assets/elbow_bottom_down.png')\n", (4571, 4603), False, 'import pygame\n'), ((5028, 5073), 'ui.widgets.sprite.GeneralWidget.handleEvent', 'GeneralWidget.handleEvent', (['self', 'event', 'clock'], {}), '(self, event, clock)\n', (5053, 5073), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((8043, 8107), 'ui.widgets.sprite.LcarsWidget.__init__', 'LcarsWidget.__init__', (['self', 'self.colour', 'pos', 'self.size', 'handler'], {}), '(self, self.colour, pos, self.size, handler)\n', (8063, 8107), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((8282, 8317), 'ui.utils.sound.Sound', 'Sound', (['"""assets/audio/panel/202.wav"""'], {}), "('assets/audio/panel/202.wav')\n", (8287, 8317), False, 'from ui.utils.sound import Sound\n'), ((9055, 9098), 'ui.widgets.sprite.LcarsWidget.handleEvent', 'LcarsWidget.handleEvent', (['self', 'event', 'clock'], {}), '(self, event, clock)\n', (9078, 9098), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((10095, 10127), 'pygame.font.Font', 'Font', (['"""assets/YukonTech.ttf"""', '(20)'], {}), "('assets/YukonTech.ttf', 20)\n", (10099, 10127), False, 'from pygame.font import Font, SysFont\n'), ((10402, 10458), 'ui.widgets.sprite.GeneralWidget.__init__', 'GeneralWidget.__init__', (['self', 'colour', 'pos', 'size', 'handler'], {}), '(self, colour, pos, size, handler)\n', (10424, 10458), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((10512, 10547), 'ui.utils.sound.Sound', 'Sound', (['"""assets/audio/panel/202.wav"""'], {}), "('assets/audio/panel/202.wav')\n", (10517, 10547), False, 'from ui.utils.sound import Sound\n'), ((11047, 11092), 'ui.widgets.sprite.GeneralWidget.handleEvent', 'GeneralWidget.handleEvent', (['self', 'event', 'clock'], {}), '(self, event, clock)\n', (11072, 11092), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((11669, 11701), 'pygame.font.Font', 'Font', (['"""assets/YukonTech.ttf"""', '(19)'], {}), "('assets/YukonTech.ttf', 19)\n", (11673, 11701), False, 'from pygame.font import Font, SysFont\n'), ((11972, 12028), 'ui.widgets.sprite.GeneralWidget.__init__', 'GeneralWidget.__init__', (['self', 'colour', 'pos', 'size', 'handler'], {}), '(self, colour, pos, size, handler)\n', (11994, 12028), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((12082, 12117), 'ui.utils.sound.Sound', 'Sound', (['"""assets/audio/panel/202.wav"""'], {}), "('assets/audio/panel/202.wav')\n", (12087, 12117), False, 'from ui.utils.sound import Sound\n'), ((12612, 12657), 'ui.widgets.sprite.GeneralWidget.handleEvent', 'GeneralWidget.handleEvent', (['self', 'event', 'clock'], {}), '(self, event, clock)\n', (12637, 12657), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((13545, 13599), 'ui.widgets.sprite.LcarsWidget.__init__', 'LcarsWidget.__init__', (['self', 'colour', 'pos', 'size', 'handler'], {}), '(self, colour, pos, size, handler)\n', (13565, 13599), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((14143, 14188), 'ui.widgets.sprite.GeneralWidget.handleEvent', 'GeneralWidget.handleEvent', (['self', 'event', 'clock'], {}), '(self, event, clock)\n', (14168, 14188), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((14650, 14704), 'ui.widgets.sprite.LcarsWidget.__init__', 'LcarsWidget.__init__', (['self', 'colour', 'pos', 'size', 'handler'], {}), '(self, colour, pos, size, handler)\n', (14670, 14704), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((15364, 15396), 'pygame.font.Font', 'Font', (['"""assets/YukonTech.ttf"""', '(18)'], {}), "('assets/YukonTech.ttf', 18)\n", (15368, 15396), False, 'from pygame.font import Font, SysFont\n'), ((15673, 15727), 'ui.widgets.sprite.LcarsWidget.__init__', 'LcarsWidget.__init__', (['self', 'colour', 'pos', 'size', 'handler'], {}), '(self, colour, pos, size, handler)\n', (15693, 15727), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((15814, 15849), 'ui.utils.sound.Sound', 'Sound', (['"""assets/audio/panel/202.wav"""'], {}), "('assets/audio/panel/202.wav')\n", (15819, 15849), False, 'from ui.utils.sound import Sound\n'), ((16266, 16309), 'ui.widgets.sprite.LcarsWidget.handleEvent', 'LcarsWidget.handleEvent', (['self', 'event', 'clock'], {}), '(self, event, clock)\n', (16289, 16309), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((17264, 17318), 'ui.widgets.sprite.PowerWidget.__init__', 'PowerWidget.__init__', (['self', 'colour', 'pos', 'size', 'handler'], {}), '(self, colour, pos, size, handler)\n', (17284, 17318), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((17406, 17441), 'ui.utils.sound.Sound', 'Sound', (['"""assets/audio/panel/202.wav"""'], {}), "('assets/audio/panel/202.wav')\n", (17411, 17441), False, 'from ui.utils.sound import Sound\n'), ((17501, 17544), 'pygame.image.load', 'pygame.image.load', (['"""assets/power_small.png"""'], {}), "('assets/power_small.png')\n", (17518, 17544), False, 'import pygame\n'), ((17937, 17989), 'pygame.image.load', 'pygame.image.load', (['"""assets/power_small_cyantest.png"""'], {}), "('assets/power_small_cyantest.png')\n", (17954, 17989), False, 'import pygame\n'), ((18152, 18195), 'ui.widgets.sprite.PowerWidget.handleEvent', 'PowerWidget.handleEvent', (['self', 'event', 'clock'], {}), '(self, event, clock)\n', (18175, 18195), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((18785, 18839), 'ui.widgets.sprite.PowerWidget.__init__', 'PowerWidget.__init__', (['self', 'colour', 'pos', 'size', 'handler'], {}), '(self, colour, pos, size, handler)\n', (18805, 18839), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((18893, 18928), 'ui.utils.sound.Sound', 'Sound', (['"""assets/audio/panel/202.wav"""'], {}), "('assets/audio/panel/202.wav')\n", (18898, 18928), False, 'from ui.utils.sound import Sound\n'), ((18988, 19031), 'pygame.image.load', 'pygame.image.load', (['"""assets/reset_small.png"""'], {}), "('assets/reset_small.png')\n", (19005, 19031), False, 'import pygame\n'), ((19269, 19321), 'pygame.image.load', 'pygame.image.load', (['"""assets/reset_small_cyantest.png"""'], {}), "('assets/reset_small_cyantest.png')\n", (19286, 19321), False, 'import pygame\n'), ((19484, 19527), 'ui.widgets.sprite.PowerWidget.handleEvent', 'PowerWidget.handleEvent', (['self', 'event', 'clock'], {}), '(self, event, clock)\n', (19507, 19527), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((20028, 20082), 'ui.widgets.sprite.LcarsWidget.__init__', 'LcarsWidget.__init__', (['self', 'colour', 'pos', 'None', 'handler'], {}), '(self, colour, pos, None, handler)\n', (20048, 20082), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((20890, 20944), 'ui.widgets.sprite.LcarsWidget.__init__', 'LcarsWidget.__init__', (['self', 'colour', 'pos', 'None', 'handler'], {}), '(self, colour, pos, None, handler)\n', (20910, 20944), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((21756, 21812), 'ui.widgets.sprite.GeneralWidget.__init__', 'GeneralWidget.__init__', (['self', 'colour', 'pos', 'None', 'handler'], {}), '(self, colour, pos, None, handler)\n', (21778, 21812), False, 'from ui.widgets.sprite import LcarsWidget, PowerWidget, ResetWidget, GeneralWidget\n'), ((579, 603), 'pygame.Surface', 'pygame.Surface', (['rectSize'], {}), '(rectSize)\n', (593, 603), False, 'import pygame\n'), ((13169, 13210), 'pygame.transform.flip', 'pygame.transform.flip', (['image', '(False)', '(True)'], {}), '(image, False, True)\n', (13190, 13210), False, 'import pygame\n'), ((14526, 14567), 'pygame.transform.flip', 'pygame.transform.flip', (['image', '(False)', '(True)'], {}), '(image, False, True)\n', (14547, 14567), False, 'import pygame\n'), ((15190, 15214), 'pygame.Surface', 'pygame.Surface', (['rectSize'], {}), '(rectSize)\n', (15204, 15214), False, 'import pygame\n'), ((16558, 16610), 'pygame.image.load', 'pygame.image.load', (['"""assets/power_small_cyantest.png"""'], {}), "('assets/power_small_cyantest.png')\n", (16575, 16610), False, 'import pygame\n'), ((7202, 7227), 'pygame.Surface', 'pygame.Surface', (['self.size'], {}), '(self.size)\n', (7216, 7227), False, 'import pygame\n'), ((7589, 7621), 'pygame.font.Font', 'Font', (['"""assets/MicroTech.ttf"""', '(18)'], {}), "('assets/MicroTech.ttf', 18)\n", (7593, 7621), False, 'from pygame.font import Font, SysFont\n'), ((12944, 12985), 'pygame.image.load', 'pygame.image.load', (['"""assets/elbow_top.png"""'], {}), "('assets/elbow_top.png')\n", (12961, 12985), False, 'import pygame\n'), ((13286, 13321), 'pygame.transform.rotate', 'pygame.transform.rotate', (['image', '(180)'], {}), '(image, 180)\n', (13309, 13321), False, 'import pygame\n'), ((14416, 14451), 'pygame.image.load', 'pygame.image.load', (['"""assets/tab.png"""'], {}), "('assets/tab.png')\n", (14433, 14451), False, 'import pygame\n'), ((386, 431), 'pygame.image.load', 'pygame.image.load', (['"""assets/button_modern.png"""'], {}), "('assets/button_modern.png')\n", (403, 431), False, 'import pygame\n'), ((2023, 2064), 'pygame.image.load', 'pygame.image.load', (['"""assets/elbow_top.png"""'], {}), "('assets/elbow_top.png')\n", (2040, 2064), False, 'import pygame\n'), ((2212, 2236), 'pygame.Surface', 'pygame.Surface', (['rectSize'], {}), '(rectSize)\n', (2226, 2236), False, 'import pygame\n'), ((3604, 3648), 'pygame.image.load', 'pygame.image.load', (['"""assets/elbow_bottom.png"""'], {}), "('assets/elbow_bottom.png')\n", (3621, 3648), False, 'import pygame\n'), ((3796, 3820), 'pygame.Surface', 'pygame.Surface', (['rectSize'], {}), '(rectSize)\n', (3810, 3820), False, 'import pygame\n'), ((9340, 9385), 'pygame.image.load', 'pygame.image.load', (['"""assets/button_modern.png"""'], {}), "('assets/button_modern.png')\n", (9357, 9385), False, 'import pygame\n'), ((9533, 9557), 'pygame.Surface', 'pygame.Surface', (['rectSize'], {}), '(rectSize)\n', (9547, 9557), False, 'import pygame\n'), ((11351, 11396), 'pygame.image.load', 'pygame.image.load', (['"""assets/button_modern.png"""'], {}), "('assets/button_modern.png')\n", (11368, 11396), False, 'import pygame\n'), ((11544, 11568), 'pygame.Surface', 'pygame.Surface', (['rectSize'], {}), '(rectSize)\n', (11558, 11568), False, 'import pygame\n'), ((13394, 13435), 'pygame.transform.flip', 'pygame.transform.flip', (['image', '(True)', '(False)'], {}), '(image, True, False)\n', (13415, 13435), False, 'import pygame\n'), ((15004, 15042), 'pygame.image.load', 'pygame.image.load', (['"""assets/button.png"""'], {}), "('assets/button.png')\n", (15021, 15042), False, 'import pygame\n'), ((16742, 16766), 'pygame.Surface', 'pygame.Surface', (['rectSize'], {}), '(rectSize)\n', (16756, 16766), False, 'import pygame\n'), ((18443, 18495), 'pygame.image.load', 'pygame.image.load', (['"""assets/reset_small_cyantest.png"""'], {}), "('assets/reset_small_cyantest.png')\n", (18460, 18495), False, 'import pygame\n'), ((18643, 18667), 'pygame.Surface', 'pygame.Surface', (['rectSize'], {}), '(rectSize)\n', (18657, 18667), False, 'import pygame\n')]
|
from ev3sim.visual.manager import ScreenObjectManager
from ev3sim.validation.bot_files import BotValidator
import os
import pygame
import pygame_gui
import yaml
from ev3sim.file_helper import find_abs, find_abs_directory
from ev3sim.validation.batch_files import BatchValidator
from ev3sim.visual.menus.base_menu import BaseMenu
from ev3sim.visual.settings.main_settings import main_settings
from ev3sim.search_locations import (
asset_locations,
batch_locations,
bot_locations,
code_locations,
config_locations,
preset_locations,
)
class MainMenu(BaseMenu):
SLIDE_NUMS = 4
SLIDE_TIME = 5
def buttonPos(self, i):
return (
(self._size[0] - self.button_size[0]) / 2,
(self._size[1] - self.button_size[1]) / 2
+ self.button_size[1] * (1.5 * i - (2.5 if self.show_custom else 2))
+ 50,
)
def playSim(self, preset):
abs_path = find_abs(preset, allowed_areas=preset_locations())
with open(abs_path, "r") as f:
preset_config = yaml.safe_load(f)
sim_path = find_abs(preset_config["sim_location"], allowed_areas=batch_locations())
with open(sim_path, "r") as f:
sim_config = yaml.safe_load(f)
to_remove = []
for index in range(len(sim_config["bots"])):
# Try loading this bot.
try:
with open(os.path.join(find_abs(sim_config["bots"][index], bot_locations()), "config.bot"), "r") as f:
bot_config = yaml.safe_load(f)
if not BotValidator.validate_json(bot_config):
to_remove.append(index)
if bot_config.get("type", "python") == "python":
fname = bot_config.get("script", "code.py")
else:
fname = bot_config.get("script", "program.ev3")
if not os.path.exists(os.path.join(find_abs(sim_config["bots"][index], bot_locations()), fname)):
def action():
with open(os.path.join(find_abs(sim_config["bots"][index], bot_locations()), fname), "w") as f:
f.write("# Put your code here!\n")
ScreenObjectManager.instance.forceCloseError(
f"Your bot {sim_config['bots'][index]} does not contain the file {fname}. You may have renamed or deleted it by accident. In order to use this bot, you need to add this file back. Click \"Add {fname}\" to create this file, or do it manually.",
(f"Add {fname}", action),
)
return
except:
to_remove.append(index)
if to_remove:
for index in to_remove[::-1]:
del sim_config["bots"][index]
with open(sim_path, "w") as f:
f.write(yaml.dump(sim_config))
if not sim_config["bots"]:
# We cannot play, there no are valid bots.
return ScreenObjectManager.instance.pushScreen(
ScreenObjectManager.SCREEN_BOTS,
batch_file=sim_path,
next=ScreenObjectManager.instance.SCREEN_SIM,
next_kwargs={"batch": sim_path},
)
return ScreenObjectManager.instance.pushScreen(
ScreenObjectManager.instance.SCREEN_SIM,
batch=sim_path,
)
def iconPos(self, buttonPos, buttonSize, iconSize):
return (
buttonPos[0] + buttonSize[0] / 2 - iconSize[0] / 2,
buttonPos[1] + buttonSize[1] * 0.2,
)
def generateObjects(self):
from ev3sim.visual.manager import ScreenObjectManager
self.show_custom = False
# First, check if there are any valid batches in the custom folder.
for rel_dir in batch_locations():
# Only consider custom sims.
if not rel_dir.startswith("workspace/custom/"):
continue
try:
actual_dir = find_abs_directory(rel_dir)
except:
continue
for _ in BatchValidator.all_valid_in_dir(actual_dir):
self.show_custom = True
break
# In order to respect theme changes, objects must be built in initWithKwargs
self.bg = pygame_gui.elements.UIPanel(
relative_rect=pygame.Rect(0, 0, *self._size),
starting_layer_height=-1,
manager=self,
object_id=pygame_gui.core.ObjectID("background"),
)
self._all_objs.append(self.bg)
self.title = pygame_gui.elements.UITextBox(
relative_rect=pygame.Rect(0, 0, -1, -1),
html_text="EV3<i>Sim</i>",
manager=self,
object_id=pygame_gui.core.ObjectID("title"),
)
self.title.set_position(((self._size[0] - self.title.rect.width) / 2, 50))
self._all_objs.append(self.title)
self.button_size = (
(self._size[0] / 4, self._size[1] / 10) if self.show_custom else (self._size[0] / 4, self._size[1] / 8)
)
settings_size = self.button_size[0] * 0.3, self.button_size[1]
bot_size = settings_size
settings_icon_size = settings_size[1] * 0.6, settings_size[1] * 0.6
bot_icon_size = bot_size[1] * 0.6, bot_size[1] * 0.6
settings_icon_path = find_abs("ui/settings.png", allowed_areas=asset_locations())
bot_icon_path = find_abs("ui/bot.png", allowed_areas=asset_locations())
self.soccer_button = pygame_gui.elements.UIButton(
relative_rect=pygame.Rect(*self.buttonPos(0), *self.button_size),
text="Soccer",
manager=self,
object_id=pygame_gui.core.ObjectID("soccer_button", "menu_button"),
)
self.addButtonEvent("soccer_button", lambda: self.playSim("soccer.yaml"))
self._all_objs.append(self.soccer_button)
soccer_settings_button_pos = [self.buttonPos(0)[0] + self.button_size[0] + 20, self.buttonPos(0)[1]]
self.soccer_settings_button = pygame_gui.elements.UIButton(
relative_rect=pygame.Rect(*soccer_settings_button_pos, *settings_size),
text="",
manager=self,
object_id=pygame_gui.core.ObjectID("soccer-settings", "settings_buttons"),
)
self.addButtonEvent("soccer-settings", self.clickSimSettings, "soccer.yaml")
self.soccer_settings_icon = pygame_gui.elements.UIImage(
relative_rect=pygame.Rect(
*self.iconPos(soccer_settings_button_pos, settings_size, settings_icon_size), *settings_icon_size
),
image_surface=pygame.image.load(settings_icon_path),
manager=self,
object_id=pygame_gui.core.ObjectID("soccer-settings-icon"),
)
self._all_objs.append(self.soccer_settings_button)
self._all_objs.append(self.soccer_settings_icon)
soccer_bot_button_pos = [
self.buttonPos(0)[0] + self.button_size[0] + settings_size[0] + 40,
self.buttonPos(0)[1],
]
self.soccer_bot_button = pygame_gui.elements.UIButton(
relative_rect=pygame.Rect(*soccer_bot_button_pos, *bot_size),
text="",
manager=self,
object_id=pygame_gui.core.ObjectID("soccer-bot", "settings_buttons"),
)
self.addButtonEvent("soccer-bot", self.clickSimBots, "soccer.yaml")
self.soccer_bot_icon = pygame_gui.elements.UIImage(
relative_rect=pygame.Rect(*self.iconPos(soccer_bot_button_pos, bot_size, bot_icon_size), *bot_icon_size),
image_surface=pygame.image.load(bot_icon_path),
manager=self,
object_id=pygame_gui.core.ObjectID("soccer-bot-icon"),
)
self._all_objs.append(self.soccer_bot_button)
self._all_objs.append(self.soccer_bot_icon)
self.rescue_button = pygame_gui.elements.UIButton(
relative_rect=pygame.Rect(*self.buttonPos(1), *self.button_size),
text="Rescue",
manager=self,
object_id=pygame_gui.core.ObjectID("rescue_button", "menu_button"),
)
self.addButtonEvent("rescue_button", lambda: self.playSim("rescue.yaml"))
self._all_objs.append(self.rescue_button)
rescue_settings_button_pos = [self.buttonPos(1)[0] + self.button_size[0] + 20, self.buttonPos(1)[1]]
self.rescue_settings_button = pygame_gui.elements.UIButton(
relative_rect=pygame.Rect(*rescue_settings_button_pos, *settings_size),
text="",
manager=self,
object_id=pygame_gui.core.ObjectID("rescue-settings", "settings_buttons"),
)
self.addButtonEvent("rescue-settings", self.clickSimSettings, "rescue.yaml")
self.rescue_settings_icon = pygame_gui.elements.UIImage(
relative_rect=pygame.Rect(
*self.iconPos(rescue_settings_button_pos, settings_size, settings_icon_size), *settings_icon_size
),
image_surface=pygame.image.load(settings_icon_path),
manager=self,
object_id=pygame_gui.core.ObjectID("rescue-settings-icon"),
)
self._all_objs.append(self.rescue_settings_button)
self._all_objs.append(self.rescue_settings_icon)
rescue_bot_button_pos = [
self.buttonPos(1)[0] + self.button_size[0] + settings_size[0] + 40,
self.buttonPos(1)[1],
]
self.rescue_bot_button = pygame_gui.elements.UIButton(
relative_rect=pygame.Rect(*rescue_bot_button_pos, *bot_size),
text="",
manager=self,
object_id=pygame_gui.core.ObjectID("rescue-bot", "settings_buttons"),
)
self.addButtonEvent("rescue-bot", self.clickSimBots, "rescue.yaml")
self.rescue_bot_icon = pygame_gui.elements.UIImage(
relative_rect=pygame.Rect(*self.iconPos(rescue_bot_button_pos, bot_size, bot_icon_size), *bot_icon_size),
image_surface=pygame.image.load(bot_icon_path),
manager=self,
object_id=pygame_gui.core.ObjectID("rescue-bot-icon"),
)
self._all_objs.append(self.rescue_bot_button)
self._all_objs.append(self.rescue_bot_icon)
if self.show_custom:
self.custom_button = pygame_gui.elements.UIButton(
relative_rect=pygame.Rect(*self.buttonPos(2), *self.button_size),
text="Custom",
manager=self,
object_id=pygame_gui.core.ObjectID("custom_button", "menu_button"),
)
self.addButtonEvent("custom_button", self.clickCustom)
self._all_objs.append(self.custom_button)
self.bot_button = pygame_gui.elements.UIButton(
relative_rect=pygame.Rect(*self.buttonPos(3 if self.show_custom else 2), *self.button_size),
text="Bots",
manager=self,
object_id=pygame_gui.core.ObjectID("bots_button", "menu_button"),
)
self.addButtonEvent(
"bots_button",
lambda: ScreenObjectManager.instance.pushScreen(ScreenObjectManager.SCREEN_BOTS),
)
self._all_objs.append(self.bot_button)
self.settings_button = pygame_gui.elements.UIButton(
relative_rect=pygame.Rect(*self.buttonPos(4 if self.show_custom else 3), *self.button_size),
text="Settings",
manager=self,
object_id=pygame_gui.core.ObjectID("main_settings_button", "menu_button"),
)
def clickSettings():
ScreenObjectManager.instance.pushScreen(
ScreenObjectManager.SCREEN_SETTINGS,
file=find_abs("user_config.yaml", config_locations()),
settings=main_settings,
)
ScreenObjectManager.instance.screens[ScreenObjectManager.SCREEN_SETTINGS].clearEvents()
self.addButtonEvent("main_settings_button", clickSettings)
self._all_objs.append(self.settings_button)
super().generateObjects()
def swapSlides(self):
self.remaining = 0
self.slide_index += 1
self.slide_index %= self.SLIDE_NUMS
self.slide_surface_prev = pygame.image.load(
find_abs(f"bg_slide{(self.slide_index - 1) % self.SLIDE_NUMS}.png", asset_locations())
)
self.slide_surface_next = pygame.image.load(find_abs(f"bg_slide{self.slide_index}.png", asset_locations()))
MAX_ALPHA = 0.4
FADE_PCT = 0.55
def update(self, time_delta: float):
super().update(time_delta)
self.remaining += time_delta
if self.remaining >= self.SLIDE_TIME:
self.swapSlides()
bg_image = pygame.Surface(self._size, depth=32)
bg_image.fill(pygame.Color(16, 16, 16))
prop_time = self.remaining / self.SLIDE_TIME
alpha_prev = int(
(self.FADE_PCT - prop_time) / self.FADE_PCT * 255 * self.MAX_ALPHA if prop_time < self.FADE_PCT else 0
)
alpha_next = int(
(prop_time - (1 - self.FADE_PCT)) / self.FADE_PCT * 255 * self.MAX_ALPHA if prop_time > 0.25 else 0
)
self.slide_surface_prev = pygame.transform.smoothscale(self.slide_surface_prev, self._size)
self.slide_surface_next = pygame.transform.smoothscale(self.slide_surface_next, self._size)
self.slide_surface_prev.set_alpha(alpha_prev)
self.slide_surface_next.set_alpha(alpha_next)
bg_image.blit(self.slide_surface_prev, pygame.Rect(0, 0, *self._size))
bg_image.blit(self.slide_surface_next, pygame.Rect(0, 0, *self._size))
self.bg.set_image(bg_image)
def clickSimSettings(self, preset):
import importlib
abs_path = find_abs(preset, allowed_areas=preset_locations())
with open(abs_path, "r") as f:
preset_config = yaml.safe_load(f)
sim_path = find_abs(preset_config["sim_location"], allowed_areas=batch_locations())
mname, cname = preset_config["visual_settings"].rsplit(".", 1)
klass = getattr(importlib.import_module(mname), cname)
ScreenObjectManager.instance.pushScreen(
ScreenObjectManager.SCREEN_SETTINGS,
file=sim_path,
settings=klass,
allows_filename_change=False,
extension="sim",
)
def clickSimBots(self, preset):
abs_path = find_abs(preset, allowed_areas=preset_locations())
with open(abs_path, "r") as f:
preset_config = yaml.safe_load(f)
sim_path = find_abs(preset_config["sim_location"], allowed_areas=batch_locations())
ScreenObjectManager.instance.pushScreen(
ScreenObjectManager.SCREEN_BOTS,
batch_file=sim_path,
)
def clickCustom(self):
ScreenObjectManager.instance.pushScreen(ScreenObjectManager.SCREEN_BATCH)
def draw_ui(self, window_surface: pygame.surface.Surface):
super().draw_ui(window_surface)
def onPop(self):
pass
def initWithKwargs(self, **kwargs):
super().initWithKwargs(**kwargs)
self.slide_index = 0
self.swapSlides()
|
[
"pygame.Rect",
"yaml.dump",
"yaml.safe_load",
"pygame_gui.core.ObjectID",
"ev3sim.search_locations.batch_locations",
"ev3sim.search_locations.preset_locations",
"ev3sim.validation.bot_files.BotValidator.validate_json",
"ev3sim.validation.batch_files.BatchValidator.all_valid_in_dir",
"ev3sim.file_helper.find_abs_directory",
"ev3sim.search_locations.config_locations",
"pygame.Surface",
"pygame.transform.smoothscale",
"importlib.import_module",
"ev3sim.visual.manager.ScreenObjectManager.instance.forceCloseError",
"pygame.image.load",
"ev3sim.search_locations.bot_locations",
"pygame.Color",
"ev3sim.search_locations.asset_locations",
"ev3sim.visual.manager.ScreenObjectManager.instance.pushScreen"
] |
[((3276, 3377), 'ev3sim.visual.manager.ScreenObjectManager.instance.pushScreen', 'ScreenObjectManager.instance.pushScreen', (['ScreenObjectManager.instance.SCREEN_SIM'], {'batch': 'sim_path'}), '(ScreenObjectManager.instance.\n SCREEN_SIM, batch=sim_path)\n', (3315, 3377), False, 'from ev3sim.visual.manager import ScreenObjectManager\n'), ((3831, 3848), 'ev3sim.search_locations.batch_locations', 'batch_locations', ([], {}), '()\n', (3846, 3848), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((12738, 12774), 'pygame.Surface', 'pygame.Surface', (['self._size'], {'depth': '(32)'}), '(self._size, depth=32)\n', (12752, 12774), False, 'import pygame\n'), ((13209, 13274), 'pygame.transform.smoothscale', 'pygame.transform.smoothscale', (['self.slide_surface_prev', 'self._size'], {}), '(self.slide_surface_prev, self._size)\n', (13237, 13274), False, 'import pygame\n'), ((13309, 13374), 'pygame.transform.smoothscale', 'pygame.transform.smoothscale', (['self.slide_surface_next', 'self._size'], {}), '(self.slide_surface_next, self._size)\n', (13337, 13374), False, 'import pygame\n'), ((14133, 14296), 'ev3sim.visual.manager.ScreenObjectManager.instance.pushScreen', 'ScreenObjectManager.instance.pushScreen', (['ScreenObjectManager.SCREEN_SETTINGS'], {'file': 'sim_path', 'settings': 'klass', 'allows_filename_change': '(False)', 'extension': '"""sim"""'}), "(ScreenObjectManager.SCREEN_SETTINGS,\n file=sim_path, settings=klass, allows_filename_change=False, extension=\n 'sim')\n", (14172, 14296), False, 'from ev3sim.visual.manager import ScreenObjectManager\n'), ((14651, 14748), 'ev3sim.visual.manager.ScreenObjectManager.instance.pushScreen', 'ScreenObjectManager.instance.pushScreen', (['ScreenObjectManager.SCREEN_BOTS'], {'batch_file': 'sim_path'}), '(ScreenObjectManager.SCREEN_BOTS,\n batch_file=sim_path)\n', (14690, 14748), False, 'from ev3sim.visual.manager import ScreenObjectManager\n'), ((14816, 14889), 'ev3sim.visual.manager.ScreenObjectManager.instance.pushScreen', 'ScreenObjectManager.instance.pushScreen', (['ScreenObjectManager.SCREEN_BATCH'], {}), '(ScreenObjectManager.SCREEN_BATCH)\n', (14855, 14889), False, 'from ev3sim.visual.manager import ScreenObjectManager\n'), ((1057, 1074), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (1071, 1074), False, 'import yaml\n'), ((1231, 1248), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (1245, 1248), False, 'import yaml\n'), ((3009, 3189), 'ev3sim.visual.manager.ScreenObjectManager.instance.pushScreen', 'ScreenObjectManager.instance.pushScreen', (['ScreenObjectManager.SCREEN_BOTS'], {'batch_file': 'sim_path', 'next': 'ScreenObjectManager.instance.SCREEN_SIM', 'next_kwargs': "{'batch': sim_path}"}), "(ScreenObjectManager.SCREEN_BOTS,\n batch_file=sim_path, next=ScreenObjectManager.instance.SCREEN_SIM,\n next_kwargs={'batch': sim_path})\n", (3048, 3189), False, 'from ev3sim.visual.manager import ScreenObjectManager\n'), ((4116, 4159), 'ev3sim.validation.batch_files.BatchValidator.all_valid_in_dir', 'BatchValidator.all_valid_in_dir', (['actual_dir'], {}), '(actual_dir)\n', (4147, 4159), False, 'from ev3sim.validation.batch_files import BatchValidator\n'), ((12797, 12821), 'pygame.Color', 'pygame.Color', (['(16)', '(16)', '(16)'], {}), '(16, 16, 16)\n', (12809, 12821), False, 'import pygame\n'), ((13530, 13560), 'pygame.Rect', 'pygame.Rect', (['(0)', '(0)', '*self._size'], {}), '(0, 0, *self._size)\n', (13541, 13560), False, 'import pygame\n'), ((13609, 13639), 'pygame.Rect', 'pygame.Rect', (['(0)', '(0)', '*self._size'], {}), '(0, 0, *self._size)\n', (13620, 13639), False, 'import pygame\n'), ((13881, 13898), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (13895, 13898), False, 'import yaml\n'), ((14086, 14116), 'importlib.import_module', 'importlib.import_module', (['mname'], {}), '(mname)\n', (14109, 14116), False, 'import importlib\n'), ((14533, 14550), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (14547, 14550), False, 'import yaml\n'), ((970, 988), 'ev3sim.search_locations.preset_locations', 'preset_locations', ([], {}), '()\n', (986, 988), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((1148, 1165), 'ev3sim.search_locations.batch_locations', 'batch_locations', ([], {}), '()\n', (1163, 1165), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((4022, 4049), 'ev3sim.file_helper.find_abs_directory', 'find_abs_directory', (['rel_dir'], {}), '(rel_dir)\n', (4040, 4049), False, 'from ev3sim.file_helper import find_abs, find_abs_directory\n'), ((4382, 4412), 'pygame.Rect', 'pygame.Rect', (['(0)', '(0)', '*self._size'], {}), '(0, 0, *self._size)\n', (4393, 4412), False, 'import pygame\n'), ((4500, 4538), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""background"""'], {}), "('background')\n", (4524, 4538), False, 'import pygame_gui\n'), ((4668, 4693), 'pygame.Rect', 'pygame.Rect', (['(0)', '(0)', '(-1)', '(-1)'], {}), '(0, 0, -1, -1)\n', (4679, 4693), False, 'import pygame\n'), ((4782, 4815), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""title"""'], {}), "('title')\n", (4806, 4815), False, 'import pygame_gui\n'), ((5420, 5437), 'ev3sim.search_locations.asset_locations', 'asset_locations', ([], {}), '()\n', (5435, 5437), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((5500, 5517), 'ev3sim.search_locations.asset_locations', 'asset_locations', ([], {}), '()\n', (5515, 5517), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((5732, 5788), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""soccer_button"""', '"""menu_button"""'], {}), "('soccer_button', 'menu_button')\n", (5756, 5788), False, 'import pygame_gui\n'), ((6136, 6192), 'pygame.Rect', 'pygame.Rect', (['*soccer_settings_button_pos', '*settings_size'], {}), '(*soccer_settings_button_pos, *settings_size)\n', (6147, 6192), False, 'import pygame\n'), ((6263, 6326), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""soccer-settings"""', '"""settings_buttons"""'], {}), "('soccer-settings', 'settings_buttons')\n", (6287, 6326), False, 'import pygame_gui\n'), ((6682, 6719), 'pygame.image.load', 'pygame.image.load', (['settings_icon_path'], {}), '(settings_icon_path)\n', (6699, 6719), False, 'import pygame\n'), ((6769, 6817), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""soccer-settings-icon"""'], {}), "('soccer-settings-icon')\n", (6793, 6817), False, 'import pygame_gui\n'), ((7192, 7238), 'pygame.Rect', 'pygame.Rect', (['*soccer_bot_button_pos', '*bot_size'], {}), '(*soccer_bot_button_pos, *bot_size)\n', (7203, 7238), False, 'import pygame\n'), ((7309, 7367), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""soccer-bot"""', '"""settings_buttons"""'], {}), "('soccer-bot', 'settings_buttons')\n", (7333, 7367), False, 'import pygame_gui\n'), ((7659, 7691), 'pygame.image.load', 'pygame.image.load', (['bot_icon_path'], {}), '(bot_icon_path)\n', (7676, 7691), False, 'import pygame\n'), ((7741, 7784), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""soccer-bot-icon"""'], {}), "('soccer-bot-icon')\n", (7765, 7784), False, 'import pygame_gui\n'), ((8115, 8171), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""rescue_button"""', '"""menu_button"""'], {}), "('rescue_button', 'menu_button')\n", (8139, 8171), False, 'import pygame_gui\n'), ((8519, 8575), 'pygame.Rect', 'pygame.Rect', (['*rescue_settings_button_pos', '*settings_size'], {}), '(*rescue_settings_button_pos, *settings_size)\n', (8530, 8575), False, 'import pygame\n'), ((8646, 8709), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""rescue-settings"""', '"""settings_buttons"""'], {}), "('rescue-settings', 'settings_buttons')\n", (8670, 8709), False, 'import pygame_gui\n'), ((9065, 9102), 'pygame.image.load', 'pygame.image.load', (['settings_icon_path'], {}), '(settings_icon_path)\n', (9082, 9102), False, 'import pygame\n'), ((9152, 9200), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""rescue-settings-icon"""'], {}), "('rescue-settings-icon')\n", (9176, 9200), False, 'import pygame_gui\n'), ((9575, 9621), 'pygame.Rect', 'pygame.Rect', (['*rescue_bot_button_pos', '*bot_size'], {}), '(*rescue_bot_button_pos, *bot_size)\n', (9586, 9621), False, 'import pygame\n'), ((9692, 9750), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""rescue-bot"""', '"""settings_buttons"""'], {}), "('rescue-bot', 'settings_buttons')\n", (9716, 9750), False, 'import pygame_gui\n'), ((10042, 10074), 'pygame.image.load', 'pygame.image.load', (['bot_icon_path'], {}), '(bot_icon_path)\n', (10059, 10074), False, 'import pygame\n'), ((10124, 10167), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""rescue-bot-icon"""'], {}), "('rescue-bot-icon')\n", (10148, 10167), False, 'import pygame_gui\n'), ((10975, 11029), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""bots_button"""', '"""menu_button"""'], {}), "('bots_button', 'menu_button')\n", (10999, 11029), False, 'import pygame_gui\n'), ((11117, 11189), 'ev3sim.visual.manager.ScreenObjectManager.instance.pushScreen', 'ScreenObjectManager.instance.pushScreen', (['ScreenObjectManager.SCREEN_BOTS'], {}), '(ScreenObjectManager.SCREEN_BOTS)\n', (11156, 11189), False, 'from ev3sim.visual.manager import ScreenObjectManager\n'), ((11492, 11555), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""main_settings_button"""', '"""menu_button"""'], {}), "('main_settings_button', 'menu_button')\n", (11516, 11555), False, 'import pygame_gui\n'), ((12343, 12360), 'ev3sim.search_locations.asset_locations', 'asset_locations', ([], {}), '()\n', (12358, 12360), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((12468, 12485), 'ev3sim.search_locations.asset_locations', 'asset_locations', ([], {}), '()\n', (12483, 12485), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((13794, 13812), 'ev3sim.search_locations.preset_locations', 'preset_locations', ([], {}), '()\n', (13810, 13812), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((13972, 13989), 'ev3sim.search_locations.batch_locations', 'batch_locations', ([], {}), '()\n', (13987, 13989), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((14446, 14464), 'ev3sim.search_locations.preset_locations', 'preset_locations', ([], {}), '()\n', (14462, 14464), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((14624, 14641), 'ev3sim.search_locations.batch_locations', 'batch_locations', ([], {}), '()\n', (14639, 14641), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((1530, 1547), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (1544, 1547), False, 'import yaml\n'), ((1571, 1609), 'ev3sim.validation.bot_files.BotValidator.validate_json', 'BotValidator.validate_json', (['bot_config'], {}), '(bot_config)\n', (1597, 1609), False, 'from ev3sim.validation.bot_files import BotValidator\n'), ((2227, 2551), 'ev3sim.visual.manager.ScreenObjectManager.instance.forceCloseError', 'ScreenObjectManager.instance.forceCloseError', (['f"""Your bot {sim_config[\'bots\'][index]} does not contain the file {fname}. You may have renamed or deleted it by accident. In order to use this bot, you need to add this file back. Click "Add {fname}" to create this file, or do it manually."""', "(f'Add {fname}', action)"], {}), '(\n f\'Your bot {sim_config[\\\'bots\\\'][index]} does not contain the file {fname}. You may have renamed or deleted it by accident. In order to use this bot, you need to add this file back. Click "Add {fname}" to create this file, or do it manually.\'\n , (f\'Add {fname}\', action))\n', (2271, 2551), False, 'from ev3sim.visual.manager import ScreenObjectManager\n'), ((2877, 2898), 'yaml.dump', 'yaml.dump', (['sim_config'], {}), '(sim_config)\n', (2886, 2898), False, 'import yaml\n'), ((10547, 10603), 'pygame_gui.core.ObjectID', 'pygame_gui.core.ObjectID', (['"""custom_button"""', '"""menu_button"""'], {}), "('custom_button', 'menu_button')\n", (10571, 10603), False, 'import pygame_gui\n'), ((11753, 11771), 'ev3sim.search_locations.config_locations', 'config_locations', ([], {}), '()\n', (11769, 11771), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((1453, 1468), 'ev3sim.search_locations.bot_locations', 'bot_locations', ([], {}), '()\n', (1466, 1468), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((1961, 1976), 'ev3sim.search_locations.bot_locations', 'bot_locations', ([], {}), '()\n', (1974, 1976), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n'), ((2106, 2121), 'ev3sim.search_locations.bot_locations', 'bot_locations', ([], {}), '()\n', (2119, 2121), False, 'from ev3sim.search_locations import asset_locations, batch_locations, bot_locations, code_locations, config_locations, preset_locations\n')]
|
#!/usr/bin/env python
import pickle
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
fast_file_name = 'photobleaching_mixture00_grid.csv'
slow_file_name = 'photobleaching_mixture01_grid.csv'
data_fast = np.genfromtxt(fast_file_name, delimiter = ',', skip_header = True)
data_slow = np.genfromtxt(slow_file_name, delimiter = ',', skip_header = True)
# features are both the same in each case
features = data_fast[:, :-1]
fast_targets = data_fast[:, -1]
slow_targets = data_fast[:, -1]
data = {'features': features, 'fast_targets': fast_targets, 'slow_targets': slow_targets }
print(data)
print(data['features'].shape, data['fast_targets'].shape, data['slow_targets'].shape)
with open('dataset.pkl', 'wb') as content:
pickle.dump(data, content)
|
[
"pickle.dump",
"numpy.genfromtxt"
] |
[((231, 293), 'numpy.genfromtxt', 'np.genfromtxt', (['fast_file_name'], {'delimiter': '""","""', 'skip_header': '(True)'}), "(fast_file_name, delimiter=',', skip_header=True)\n", (244, 293), True, 'import numpy as np\n'), ((310, 372), 'numpy.genfromtxt', 'np.genfromtxt', (['slow_file_name'], {'delimiter': '""","""', 'skip_header': '(True)'}), "(slow_file_name, delimiter=',', skip_header=True)\n", (323, 372), True, 'import numpy as np\n'), ((752, 778), 'pickle.dump', 'pickle.dump', (['data', 'content'], {}), '(data, content)\n', (763, 778), False, 'import pickle\n')]
|
"""
do gradients flow into vqvae codebook?
"""
import torch
from torch import nn, optim, autograd
import numpy as np
import math, time
def run():
num_codes = 5
N = 7
K = 3
np.random.seed(123)
torch.manual_seed(123)
Z = torch.from_numpy(np.random.choice(num_codes, N, replace=True))
print('Z', Z)
codebook = nn.Parameter(torch.rand(num_codes, 3))
# inputs = nn.Parametertorch.rand
codebook_out = codebook[Z]
print('codebook_out.requires_grad', codebook_out.requires_grad)
target_out = torch.rand(N, K)
loss = (codebook_out - target_out).pow(2).mean()
loss.backward()
print('codebook_out.grad', codebook_out.grad)
print('codebook.grad', codebook.grad)
if __name__ == '__main__':
run()
|
[
"torch.manual_seed",
"numpy.random.choice",
"numpy.random.seed",
"torch.rand"
] |
[((189, 208), 'numpy.random.seed', 'np.random.seed', (['(123)'], {}), '(123)\n', (203, 208), True, 'import numpy as np\n'), ((213, 235), 'torch.manual_seed', 'torch.manual_seed', (['(123)'], {}), '(123)\n', (230, 235), False, 'import torch\n'), ((534, 550), 'torch.rand', 'torch.rand', (['N', 'K'], {}), '(N, K)\n', (544, 550), False, 'import torch\n'), ((261, 305), 'numpy.random.choice', 'np.random.choice', (['num_codes', 'N'], {'replace': '(True)'}), '(num_codes, N, replace=True)\n', (277, 305), True, 'import numpy as np\n'), ((353, 377), 'torch.rand', 'torch.rand', (['num_codes', '(3)'], {}), '(num_codes, 3)\n', (363, 377), False, 'import torch\n')]
|
import pytest
from auth_api.db import db
from auth_api.models import DbCompany
from auth_api.queries import CompanyQuery
from tests.auth_api.queries.query_base import (
COMPANY_LIST,
TestQueryBase,
)
class TestCompanyQueries(TestQueryBase):
"""Test user queries."""
@pytest.mark.parametrize('company', COMPANY_LIST)
def test__has_id__id_exists__return_correct_company(
self,
seeded_session: db.Session,
company: dict,
):
"""
If company with id exists return correct company.
:param seeded_session: Mocked database session
:param company: Current company inserted into the test
"""
# -- Act -------------------------------------------------------------
fetched_company: DbCompany = CompanyQuery(seeded_session) \
.has_id(company['id']) \
.one_or_none()
# -- Assert ----------------------------------------------------------
assert fetched_company is not None
assert fetched_company.id == company['id']
def test__has_id__id_does_not_exists__return_none(
self,
seeded_session: db.Session,
):
"""
If company with id does not exist return none.
:param seeded_session: Mocked database session
"""
# -- Act -------------------------------------------------------------
fetched_company: DbCompany = CompanyQuery(seeded_session) \
.has_id("THIS_ID_DOES_NOT_EXIST") \
.one_or_none()
# -- Assert ----------------------------------------------------------
assert fetched_company is None
@pytest.mark.parametrize('company', COMPANY_LIST)
def test__has_tin__tin_exists__return_correct_company(
self,
seeded_session: db.Session,
company: dict,
):
"""
If company with tin exists return correct company.
:param seeded_session: Mocked database session
:param company: Current company inserted into the test
"""
# -- Act -------------------------------------------------------------
fetched_company: DbCompany = CompanyQuery(seeded_session) \
.has_tin(company['tin']) \
.one_or_none()
# -- Assert ----------------------------------------------------------
assert fetched_company is not None
assert fetched_company.tin == company['tin']
assert fetched_company.id == company['id']
def test__has_tin__tin_not_exists__return_none(
self,
seeded_session: db.Session,
):
"""
If company with tin that does not exists return none.
:param seeded_session: Mocked database session
"""
# -- Act -------------------------------------------------------------
fetched_company: DbCompany = CompanyQuery(seeded_session) \
.has_tin("THIS_TIN_DOES_NOT_EXISTS") \
.one_or_none()
# -- Assert ----------------------------------------------------------
assert fetched_company is None
|
[
"pytest.mark.parametrize",
"auth_api.queries.CompanyQuery"
] |
[((287, 335), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""company"""', 'COMPANY_LIST'], {}), "('company', COMPANY_LIST)\n", (310, 335), False, 'import pytest\n'), ((1660, 1708), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""company"""', 'COMPANY_LIST'], {}), "('company', COMPANY_LIST)\n", (1683, 1708), False, 'import pytest\n'), ((792, 820), 'auth_api.queries.CompanyQuery', 'CompanyQuery', (['seeded_session'], {}), '(seeded_session)\n', (804, 820), False, 'from auth_api.queries import CompanyQuery\n'), ((1428, 1456), 'auth_api.queries.CompanyQuery', 'CompanyQuery', (['seeded_session'], {}), '(seeded_session)\n', (1440, 1456), False, 'from auth_api.queries import CompanyQuery\n'), ((2168, 2196), 'auth_api.queries.CompanyQuery', 'CompanyQuery', (['seeded_session'], {}), '(seeded_session)\n', (2180, 2196), False, 'from auth_api.queries import CompanyQuery\n'), ((2863, 2891), 'auth_api.queries.CompanyQuery', 'CompanyQuery', (['seeded_session'], {}), '(seeded_session)\n', (2875, 2891), False, 'from auth_api.queries import CompanyQuery\n')]
|
import codecs
import json
from sklearn.linear_model import LogisticRegression
def build_feature_RUSSE(row):
sentence1 = row["sentence1"].strip()
sentence2 = row["sentence2"].strip()
word = row["word"].strip()
label = row.get("label")
res = f"{sentence1} {sentence2} {word}"
return res, label
def build_features_RUSSE(path, vect):
with codecs.open(path, encoding='utf-8-sig') as reader:
lines = reader.read().split("\n")
lines = list(map(json.loads, filter(None, lines)))
res = list(map(build_feature_RUSSE, lines))
texts = list(map(lambda x: x[0], res))
labels = list(map(lambda x: x[1], res))
ids = [x["idx"] for x in lines]
return (vect.transform(texts), labels), ids
def fit_RUSSE(train, labels):
clf = LogisticRegression()
return clf.fit(train, labels)
def eval_RUSSE(train_path, val_path, test_path, vect):
train, _ = build_features_RUSSE(train_path, vect)
val, _ = build_features_RUSSE(val_path, vect)
test, ids = build_features_RUSSE(test_path, vect)
clf = fit_RUSSE(*train)
try:
test_score = clf.score(*test)
except ValueError:
test_score = None
test_pred = clf.predict(test[0])
return clf, {
"train": clf.score(*train),
"val": clf.score(*val),
"test": test_score,
"test_pred": [{"idx": idx, "label": bool(label)} for idx, label in zip(ids, test_pred)]
}
|
[
"sklearn.linear_model.LogisticRegression",
"codecs.open"
] |
[((780, 800), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (798, 800), False, 'from sklearn.linear_model import LogisticRegression\n'), ((367, 406), 'codecs.open', 'codecs.open', (['path'], {'encoding': '"""utf-8-sig"""'}), "(path, encoding='utf-8-sig')\n", (378, 406), False, 'import codecs\n')]
|
import os
import sys
import re
import argparse
import neuralRisk
def init():
"""TODO: Docstring for __process_cl_args.
:returns: Function to be use
"""
parser = argparse.ArgumentParser(description='Loader for Risk Prediction')
parser.add_argument('commands', nargs='*')
parser.add_argument('--help', '-h', action='store_true')
parser.add_argument('--version', '-v', action='store_true')
parser.add_argument('--trainingset', dest='load_training_set',
help='trainingset to use')
parser.add_argument('--testingset ', dest='load_testing_set')
parser.add_argument('--playdemo', dest='load_demo_version')
parser.add_argument('--predictionset', dest='load_prediction_set')
args = parser.parse_args()
|
[
"argparse.ArgumentParser"
] |
[((180, 245), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Loader for Risk Prediction"""'}), "(description='Loader for Risk Prediction')\n", (203, 245), False, 'import argparse\n')]
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 463878308
"""
"""
random actions, total chaos
"""
board = gamma_new(6, 6, 6, 3)
assert board is not None
assert gamma_move(board, 1, 1, 2) == 1
assert gamma_move(board, 2, 3, 2) == 1
assert gamma_move(board, 3, 2, 5) == 1
assert gamma_move(board, 4, 0, 1) == 1
assert gamma_move(board, 5, 2, 5) == 0
assert gamma_move(board, 5, 2, 0) == 1
assert gamma_move(board, 6, 3, 2) == 0
assert gamma_move(board, 1, 5, 1) == 1
assert gamma_move(board, 1, 4, 5) == 1
assert gamma_golden_possible(board, 2) == 1
assert gamma_free_fields(board, 3) == 29
assert gamma_move(board, 4, 1, 4) == 1
assert gamma_free_fields(board, 4) == 28
assert gamma_move(board, 5, 4, 1) == 1
assert gamma_move(board, 5, 2, 0) == 0
assert gamma_move(board, 6, 5, 5) == 1
assert gamma_move(board, 1, 5, 1) == 0
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_move(board, 2, 4, 4) == 1
assert gamma_move(board, 3, 1, 3) == 1
assert gamma_busy_fields(board, 3) == 2
assert gamma_move(board, 5, 3, 0) == 1
assert gamma_move(board, 5, 4, 2) == 1
assert gamma_busy_fields(board, 5) == 4
assert gamma_move(board, 6, 3, 3) == 1
assert gamma_move(board, 6, 2, 4) == 1
assert gamma_move(board, 1, 2, 5) == 0
assert gamma_move(board, 2, 1, 1) == 1
assert gamma_move(board, 2, 3, 4) == 1
assert gamma_move(board, 3, 5, 0) == 1
assert gamma_move(board, 4, 3, 0) == 0
assert gamma_move(board, 5, 4, 1) == 0
assert gamma_move(board, 5, 1, 3) == 0
assert gamma_free_fields(board, 5) == 17
assert gamma_move(board, 6, 3, 0) == 0
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 2, 0, 4) == 0
assert gamma_move(board, 2, 2, 5) == 0
board964821874 = gamma_board(board)
assert board964821874 is not None
assert board964821874 == ("..3.16\n"
".4622.\n"
".3.6..\n"
".1.25.\n"
"42..51\n"
"..55.3\n")
del board964821874
board964821874 = None
assert gamma_move(board, 3, 5, 1) == 0
assert gamma_move(board, 4, 4, 5) == 0
assert gamma_move(board, 5, 2, 0) == 0
assert gamma_move(board, 6, 3, 2) == 0
assert gamma_busy_fields(board, 6) == 3
assert gamma_move(board, 1, 3, 5) == 1
assert gamma_golden_move(board, 2, 5, 3) == 0
assert gamma_move(board, 3, 4, 3) == 0
assert gamma_move(board, 4, 3, 2) == 0
assert gamma_move(board, 4, 4, 2) == 0
assert gamma_move(board, 5, 2, 0) == 0
assert gamma_move(board, 5, 0, 0) == 1
assert gamma_move(board, 6, 1, 2) == 0
assert gamma_move(board, 1, 3, 0) == 0
assert gamma_move(board, 2, 3, 1) == 1
assert gamma_move(board, 2, 3, 3) == 0
assert gamma_move(board, 3, 5, 5) == 0
assert gamma_busy_fields(board, 3) == 3
assert gamma_move(board, 4, 0, 4) == 1
assert gamma_move(board, 4, 3, 4) == 0
assert gamma_move(board, 5, 2, 3) == 0
assert gamma_move(board, 6, 2, 2) == 0
assert gamma_move(board, 6, 2, 3) == 1
assert gamma_move(board, 1, 2, 5) == 0
assert gamma_move(board, 2, 5, 1) == 0
assert gamma_move(board, 2, 4, 0) == 0
board455698278 = gamma_board(board)
assert board455698278 is not None
assert board455698278 == ("..3116\n"
"44622.\n"
".366..\n"
".1.25.\n"
"42.251\n"
"5.55.3\n")
del board455698278
board455698278 = None
assert gamma_move(board, 3, 2, 0) == 0
assert gamma_move(board, 3, 0, 2) == 0
board995577683 = gamma_board(board)
assert board995577683 is not None
assert board995577683 == ("..3116\n"
"44622.\n"
".366..\n"
".1.25.\n"
"42.251\n"
"5.55.3\n")
del board995577683
board995577683 = None
assert gamma_move(board, 4, 2, 5) == 0
assert gamma_move(board, 4, 5, 3) == 1
assert gamma_golden_possible(board, 4) == 1
assert gamma_move(board, 5, 0, 1) == 0
assert gamma_move(board, 5, 1, 5) == 0
assert gamma_move(board, 6, 1, 4) == 0
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_move(board, 2, 3, 0) == 0
assert gamma_busy_fields(board, 2) == 5
assert gamma_move(board, 3, 2, 2) == 0
assert gamma_move(board, 3, 3, 5) == 0
assert gamma_move(board, 4, 2, 2) == 0
assert gamma_move(board, 5, 3, 4) == 0
assert gamma_free_fields(board, 5) == 5
board670804334 = gamma_board(board)
assert board670804334 is not None
assert board670804334 == ("..3116\n"
"44622.\n"
".366.4\n"
".1.25.\n"
"42.251\n"
"5.55.3\n")
del board670804334
board670804334 = None
assert gamma_move(board, 6, 2, 0) == 0
assert gamma_move(board, 1, 1, 2) == 0
assert gamma_move(board, 1, 3, 4) == 0
assert gamma_busy_fields(board, 1) == 4
assert gamma_move(board, 3, 2, 5) == 0
assert gamma_move(board, 3, 4, 4) == 0
assert gamma_move(board, 4, 1, 2) == 0
assert gamma_move(board, 4, 4, 3) == 1
board397825260 = gamma_board(board)
assert board397825260 is not None
assert board397825260 == ("..3116\n"
"44622.\n"
".36644\n"
".1.25.\n"
"42.251\n"
"5.55.3\n")
del board397825260
board397825260 = None
assert gamma_move(board, 5, 4, 5) == 0
assert gamma_golden_possible(board, 5) == 1
assert gamma_move(board, 6, 5, 0) == 0
assert gamma_move(board, 6, 3, 4) == 0
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_free_fields(board, 1) == 3
assert gamma_move(board, 2, 2, 2) == 1
assert gamma_move(board, 2, 3, 4) == 0
assert gamma_free_fields(board, 2) == 3
assert gamma_move(board, 3, 1, 2) == 0
assert gamma_golden_move(board, 3, 5, 5) == 0
assert gamma_move(board, 4, 2, 5) == 0
assert gamma_move(board, 5, 3, 2) == 0
assert gamma_move(board, 5, 2, 1) == 1
assert gamma_free_fields(board, 5) == 3
assert gamma_move(board, 6, 5, 5) == 0
assert gamma_move(board, 6, 3, 4) == 0
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_move(board, 1, 5, 0) == 0
assert gamma_move(board, 2, 0, 2) == 0
assert gamma_move(board, 2, 5, 0) == 0
assert gamma_busy_fields(board, 2) == 6
assert gamma_move(board, 3, 0, 1) == 0
assert gamma_move(board, 3, 1, 0) == 0
assert gamma_move(board, 4, 2, 5) == 0
assert gamma_move(board, 4, 4, 5) == 0
assert gamma_move(board, 5, 0, 5) == 0
board495675822 = gamma_board(board)
assert board495675822 is not None
assert board495675822 == ("..3116\n"
"44622.\n"
".36644\n"
".1225.\n"
"425251\n"
"5.55.3\n")
del board495675822
board495675822 = None
assert gamma_move(board, 6, 1, 0) == 1
assert gamma_move(board, 6, 4, 5) == 0
gamma_delete(board)
|
[
"part1.gamma_new",
"part1.gamma_busy_fields",
"part1.gamma_golden_move",
"part1.gamma_golden_possible",
"part1.gamma_move",
"part1.gamma_board",
"part1.gamma_free_fields",
"part1.gamma_delete"
] |
[((283, 304), 'part1.gamma_new', 'gamma_new', (['(6)', '(6)', '(6)', '(3)'], {}), '(6, 6, 6, 3)\n', (292, 304), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1884, 1902), 'part1.gamma_board', 'gamma_board', (['board'], {}), '(board)\n', (1895, 1902), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3139, 3157), 'part1.gamma_board', 'gamma_board', (['board'], {}), '(board)\n', (3150, 3157), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3425, 3443), 'part1.gamma_board', 'gamma_board', (['board'], {}), '(board)\n', (3436, 3443), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4238, 4256), 'part1.gamma_board', 'gamma_board', (['board'], {}), '(board)\n', (4249, 4256), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4765, 4783), 'part1.gamma_board', 'gamma_board', (['board'], {}), '(board)\n', (4776, 4783), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((6067, 6085), 'part1.gamma_board', 'gamma_board', (['board'], {}), '(board)\n', (6078, 6085), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((6336, 6355), 'part1.gamma_delete', 'gamma_delete', (['board'], {}), '(board)\n', (6348, 6355), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((339, 365), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(1)', '(2)'], {}), '(board, 1, 1, 2)\n', (349, 365), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((379, 405), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(3)', '(2)'], {}), '(board, 2, 3, 2)\n', (389, 405), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((419, 445), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(2)', '(5)'], {}), '(board, 3, 2, 5)\n', (429, 445), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((459, 485), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(0)', '(1)'], {}), '(board, 4, 0, 1)\n', (469, 485), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((499, 525), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(2)', '(5)'], {}), '(board, 5, 2, 5)\n', (509, 525), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((539, 565), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(2)', '(0)'], {}), '(board, 5, 2, 0)\n', (549, 565), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((579, 605), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(3)', '(2)'], {}), '(board, 6, 3, 2)\n', (589, 605), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((619, 645), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(5)', '(1)'], {}), '(board, 1, 5, 1)\n', (629, 645), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((659, 685), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(4)', '(5)'], {}), '(board, 1, 4, 5)\n', (669, 685), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((699, 730), 'part1.gamma_golden_possible', 'gamma_golden_possible', (['board', '(2)'], {}), '(board, 2)\n', (720, 730), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((744, 771), 'part1.gamma_free_fields', 'gamma_free_fields', (['board', '(3)'], {}), '(board, 3)\n', (761, 771), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((786, 812), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(1)', '(4)'], {}), '(board, 4, 1, 4)\n', (796, 812), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((826, 853), 'part1.gamma_free_fields', 'gamma_free_fields', (['board', '(4)'], {}), '(board, 4)\n', (843, 853), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((868, 894), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(4)', '(1)'], {}), '(board, 5, 4, 1)\n', (878, 894), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((908, 934), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(2)', '(0)'], {}), '(board, 5, 2, 0)\n', (918, 934), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((948, 974), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(5)', '(5)'], {}), '(board, 6, 5, 5)\n', (958, 974), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((988, 1014), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(5)', '(1)'], {}), '(board, 1, 5, 1)\n', (998, 1014), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1028, 1054), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(1)', '(0)'], {}), '(board, 1, 1, 0)\n', (1038, 1054), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1068, 1094), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(4)', '(4)'], {}), '(board, 2, 4, 4)\n', (1078, 1094), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1108, 1134), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(1)', '(3)'], {}), '(board, 3, 1, 3)\n', (1118, 1134), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1148, 1175), 'part1.gamma_busy_fields', 'gamma_busy_fields', (['board', '(3)'], {}), '(board, 3)\n', (1165, 1175), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1189, 1215), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(3)', '(0)'], {}), '(board, 5, 3, 0)\n', (1199, 1215), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1229, 1255), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(4)', '(2)'], {}), '(board, 5, 4, 2)\n', (1239, 1255), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1269, 1296), 'part1.gamma_busy_fields', 'gamma_busy_fields', (['board', '(5)'], {}), '(board, 5)\n', (1286, 1296), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1310, 1336), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(3)', '(3)'], {}), '(board, 6, 3, 3)\n', (1320, 1336), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1350, 1376), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(2)', '(4)'], {}), '(board, 6, 2, 4)\n', (1360, 1376), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1390, 1416), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(2)', '(5)'], {}), '(board, 1, 2, 5)\n', (1400, 1416), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1430, 1456), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(1)', '(1)'], {}), '(board, 2, 1, 1)\n', (1440, 1456), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1470, 1496), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(3)', '(4)'], {}), '(board, 2, 3, 4)\n', (1480, 1496), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1510, 1536), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(5)', '(0)'], {}), '(board, 3, 5, 0)\n', (1520, 1536), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1550, 1576), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(3)', '(0)'], {}), '(board, 4, 3, 0)\n', (1560, 1576), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1590, 1616), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(4)', '(1)'], {}), '(board, 5, 4, 1)\n', (1600, 1616), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1630, 1656), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(1)', '(3)'], {}), '(board, 5, 1, 3)\n', (1640, 1656), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1670, 1697), 'part1.gamma_free_fields', 'gamma_free_fields', (['board', '(5)'], {}), '(board, 5)\n', (1687, 1697), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1712, 1738), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(3)', '(0)'], {}), '(board, 6, 3, 0)\n', (1722, 1738), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1752, 1778), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(2)', '(0)'], {}), '(board, 1, 2, 0)\n', (1762, 1778), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1792, 1818), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(0)', '(4)'], {}), '(board, 2, 0, 4)\n', (1802, 1818), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((1832, 1858), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(2)', '(5)'], {}), '(board, 2, 2, 5)\n', (1842, 1858), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2078, 2104), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(5)', '(1)'], {}), '(board, 3, 5, 1)\n', (2088, 2104), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2118, 2144), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(4)', '(5)'], {}), '(board, 4, 4, 5)\n', (2128, 2144), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2158, 2184), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(2)', '(0)'], {}), '(board, 5, 2, 0)\n', (2168, 2184), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2198, 2224), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(3)', '(2)'], {}), '(board, 6, 3, 2)\n', (2208, 2224), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2238, 2265), 'part1.gamma_busy_fields', 'gamma_busy_fields', (['board', '(6)'], {}), '(board, 6)\n', (2255, 2265), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2279, 2305), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(3)', '(5)'], {}), '(board, 1, 3, 5)\n', (2289, 2305), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2319, 2352), 'part1.gamma_golden_move', 'gamma_golden_move', (['board', '(2)', '(5)', '(3)'], {}), '(board, 2, 5, 3)\n', (2336, 2352), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2366, 2392), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(4)', '(3)'], {}), '(board, 3, 4, 3)\n', (2376, 2392), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2406, 2432), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(3)', '(2)'], {}), '(board, 4, 3, 2)\n', (2416, 2432), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2446, 2472), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(4)', '(2)'], {}), '(board, 4, 4, 2)\n', (2456, 2472), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2486, 2512), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(2)', '(0)'], {}), '(board, 5, 2, 0)\n', (2496, 2512), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2526, 2552), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(0)', '(0)'], {}), '(board, 5, 0, 0)\n', (2536, 2552), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2566, 2592), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(1)', '(2)'], {}), '(board, 6, 1, 2)\n', (2576, 2592), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2606, 2632), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(3)', '(0)'], {}), '(board, 1, 3, 0)\n', (2616, 2632), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2646, 2672), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(3)', '(1)'], {}), '(board, 2, 3, 1)\n', (2656, 2672), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2686, 2712), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(3)', '(3)'], {}), '(board, 2, 3, 3)\n', (2696, 2712), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2726, 2752), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(5)', '(5)'], {}), '(board, 3, 5, 5)\n', (2736, 2752), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2766, 2793), 'part1.gamma_busy_fields', 'gamma_busy_fields', (['board', '(3)'], {}), '(board, 3)\n', (2783, 2793), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2807, 2833), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(0)', '(4)'], {}), '(board, 4, 0, 4)\n', (2817, 2833), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2847, 2873), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(3)', '(4)'], {}), '(board, 4, 3, 4)\n', (2857, 2873), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2887, 2913), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(2)', '(3)'], {}), '(board, 5, 2, 3)\n', (2897, 2913), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2927, 2953), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(2)', '(2)'], {}), '(board, 6, 2, 2)\n', (2937, 2953), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((2967, 2993), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(2)', '(3)'], {}), '(board, 6, 2, 3)\n', (2977, 2993), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3007, 3033), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(2)', '(5)'], {}), '(board, 1, 2, 5)\n', (3017, 3033), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3047, 3073), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(5)', '(1)'], {}), '(board, 2, 5, 1)\n', (3057, 3073), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3087, 3113), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(4)', '(0)'], {}), '(board, 2, 4, 0)\n', (3097, 3113), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3333, 3359), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(2)', '(0)'], {}), '(board, 3, 2, 0)\n', (3343, 3359), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3373, 3399), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(0)', '(2)'], {}), '(board, 3, 0, 2)\n', (3383, 3399), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3619, 3645), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(2)', '(5)'], {}), '(board, 4, 2, 5)\n', (3629, 3645), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3659, 3685), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(5)', '(3)'], {}), '(board, 4, 5, 3)\n', (3669, 3685), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3699, 3730), 'part1.gamma_golden_possible', 'gamma_golden_possible', (['board', '(4)'], {}), '(board, 4)\n', (3720, 3730), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3744, 3770), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(0)', '(1)'], {}), '(board, 5, 0, 1)\n', (3754, 3770), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3784, 3810), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(1)', '(5)'], {}), '(board, 5, 1, 5)\n', (3794, 3810), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3824, 3850), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(1)', '(4)'], {}), '(board, 6, 1, 4)\n', (3834, 3850), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3864, 3890), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(2)', '(0)'], {}), '(board, 1, 2, 0)\n', (3874, 3890), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3904, 3930), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(1)', '(3)'], {}), '(board, 1, 1, 3)\n', (3914, 3930), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3944, 3970), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(3)', '(0)'], {}), '(board, 2, 3, 0)\n', (3954, 3970), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((3984, 4011), 'part1.gamma_busy_fields', 'gamma_busy_fields', (['board', '(2)'], {}), '(board, 2)\n', (4001, 4011), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4025, 4051), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(2)', '(2)'], {}), '(board, 3, 2, 2)\n', (4035, 4051), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4065, 4091), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(3)', '(5)'], {}), '(board, 3, 3, 5)\n', (4075, 4091), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4105, 4131), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(2)', '(2)'], {}), '(board, 4, 2, 2)\n', (4115, 4131), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4145, 4171), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(3)', '(4)'], {}), '(board, 5, 3, 4)\n', (4155, 4171), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4185, 4212), 'part1.gamma_free_fields', 'gamma_free_fields', (['board', '(5)'], {}), '(board, 5)\n', (4202, 4212), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4432, 4458), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(2)', '(0)'], {}), '(board, 6, 2, 0)\n', (4442, 4458), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4472, 4498), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(1)', '(2)'], {}), '(board, 1, 1, 2)\n', (4482, 4498), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4512, 4538), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(3)', '(4)'], {}), '(board, 1, 3, 4)\n', (4522, 4538), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4552, 4579), 'part1.gamma_busy_fields', 'gamma_busy_fields', (['board', '(1)'], {}), '(board, 1)\n', (4569, 4579), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4593, 4619), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(2)', '(5)'], {}), '(board, 3, 2, 5)\n', (4603, 4619), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4633, 4659), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(4)', '(4)'], {}), '(board, 3, 4, 4)\n', (4643, 4659), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4673, 4699), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(1)', '(2)'], {}), '(board, 4, 1, 2)\n', (4683, 4699), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4713, 4739), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(4)', '(3)'], {}), '(board, 4, 4, 3)\n', (4723, 4739), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4959, 4985), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(4)', '(5)'], {}), '(board, 5, 4, 5)\n', (4969, 4985), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((4999, 5030), 'part1.gamma_golden_possible', 'gamma_golden_possible', (['board', '(5)'], {}), '(board, 5)\n', (5020, 5030), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5044, 5070), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(5)', '(0)'], {}), '(board, 6, 5, 0)\n', (5054, 5070), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5084, 5110), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(3)', '(4)'], {}), '(board, 6, 3, 4)\n', (5094, 5110), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5124, 5150), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(2)', '(0)'], {}), '(board, 1, 2, 0)\n', (5134, 5150), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5164, 5191), 'part1.gamma_free_fields', 'gamma_free_fields', (['board', '(1)'], {}), '(board, 1)\n', (5181, 5191), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5205, 5231), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(2)', '(2)'], {}), '(board, 2, 2, 2)\n', (5215, 5231), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5245, 5271), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(3)', '(4)'], {}), '(board, 2, 3, 4)\n', (5255, 5271), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5285, 5312), 'part1.gamma_free_fields', 'gamma_free_fields', (['board', '(2)'], {}), '(board, 2)\n', (5302, 5312), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5326, 5352), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(1)', '(2)'], {}), '(board, 3, 1, 2)\n', (5336, 5352), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5366, 5399), 'part1.gamma_golden_move', 'gamma_golden_move', (['board', '(3)', '(5)', '(5)'], {}), '(board, 3, 5, 5)\n', (5383, 5399), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5413, 5439), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(2)', '(5)'], {}), '(board, 4, 2, 5)\n', (5423, 5439), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5453, 5479), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(3)', '(2)'], {}), '(board, 5, 3, 2)\n', (5463, 5479), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5493, 5519), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(2)', '(1)'], {}), '(board, 5, 2, 1)\n', (5503, 5519), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5533, 5560), 'part1.gamma_free_fields', 'gamma_free_fields', (['board', '(5)'], {}), '(board, 5)\n', (5550, 5560), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5574, 5600), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(5)', '(5)'], {}), '(board, 6, 5, 5)\n', (5584, 5600), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5614, 5640), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(3)', '(4)'], {}), '(board, 6, 3, 4)\n', (5624, 5640), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5654, 5680), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(0)', '(0)'], {}), '(board, 1, 0, 0)\n', (5664, 5680), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5694, 5720), 'part1.gamma_move', 'gamma_move', (['board', '(1)', '(5)', '(0)'], {}), '(board, 1, 5, 0)\n', (5704, 5720), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5734, 5760), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(0)', '(2)'], {}), '(board, 2, 0, 2)\n', (5744, 5760), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5774, 5800), 'part1.gamma_move', 'gamma_move', (['board', '(2)', '(5)', '(0)'], {}), '(board, 2, 5, 0)\n', (5784, 5800), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5814, 5841), 'part1.gamma_busy_fields', 'gamma_busy_fields', (['board', '(2)'], {}), '(board, 2)\n', (5831, 5841), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5855, 5881), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(0)', '(1)'], {}), '(board, 3, 0, 1)\n', (5865, 5881), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5895, 5921), 'part1.gamma_move', 'gamma_move', (['board', '(3)', '(1)', '(0)'], {}), '(board, 3, 1, 0)\n', (5905, 5921), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5935, 5961), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(2)', '(5)'], {}), '(board, 4, 2, 5)\n', (5945, 5961), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((5975, 6001), 'part1.gamma_move', 'gamma_move', (['board', '(4)', '(4)', '(5)'], {}), '(board, 4, 4, 5)\n', (5985, 6001), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((6015, 6041), 'part1.gamma_move', 'gamma_move', (['board', '(5)', '(0)', '(5)'], {}), '(board, 5, 0, 5)\n', (6025, 6041), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((6261, 6287), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(1)', '(0)'], {}), '(board, 6, 1, 0)\n', (6271, 6287), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n'), ((6301, 6327), 'part1.gamma_move', 'gamma_move', (['board', '(6)', '(4)', '(5)'], {}), '(board, 6, 4, 5)\n', (6311, 6327), False, 'from part1 import gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new\n')]
|
from fastapi import HTTPException, status, Query
from tortoise.exceptions import DoesNotExist
from app.data.models.book import BookDB
from app.data.repository.book import BookRepository
from app.routers.library.models import BookIn, BookOut, BookUpdate
class BookController:
# region Utility
@classmethod
async def _get_book_or_404(cls,
book_id: int
) -> BookDB:
try:
return await BookRepository.get_by_id_or_exc(book_id)
except DoesNotExist:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
detail="Book not found.")
# endregion Utility
# region CRUD
@classmethod
async def create(cls, book: BookIn) -> BookOut:
book_db = await BookRepository.create(**book.dict())
return BookOut.from_orm(book_db)
@classmethod
async def get_all(cls,
offset: int = 0,
limit: int = Query(default=100, lte=100)
) -> list[BookOut]:
books_db = await BookRepository.get_all(offset, limit)
return [BookOut.from_orm(book) for book in books_db]
@classmethod
async def get_all_ilike_by(cls,
*,
query: str = Query(default=""),
offset: int = 0,
limit: int = Query(default=100, lte=100)
) -> list[BookOut]:
books_db = await BookRepository.get_all_ilike_by(query, offset, limit)
return [BookOut.from_orm(book) for book in books_db]
@classmethod
async def get_by_id(cls,
book_id: int
) -> BookOut:
book_db = await cls._get_book_or_404(book_id)
return BookOut.from_orm(book_db)
@classmethod
async def update_book(cls,
book_id: int,
book_update: BookUpdate
) -> BookOut:
book_db = await BookController._get_book_or_404(book_id)
updated_book_db = await BookRepository.update_book(
book_db, book_update.dict(exclude_unset=True))
return BookOut.from_orm(updated_book_db)
@classmethod
async def delete_book(cls,
book_id: int
) -> None:
book_db = await BookController._get_book_or_404(book_id)
await BookRepository.delete_book(book_db)
# endregion CRUD
|
[
"app.routers.library.models.BookOut.from_orm",
"app.data.repository.book.BookRepository.get_by_id_or_exc",
"app.data.repository.book.BookRepository.get_all_ilike_by",
"app.data.repository.book.BookRepository.get_all",
"fastapi.HTTPException",
"fastapi.Query",
"app.data.repository.book.BookRepository.delete_book"
] |
[((868, 893), 'app.routers.library.models.BookOut.from_orm', 'BookOut.from_orm', (['book_db'], {}), '(book_db)\n', (884, 893), False, 'from app.routers.library.models import BookIn, BookOut, BookUpdate\n'), ((1013, 1040), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (1018, 1040), False, 'from fastapi import HTTPException, status, Query\n'), ((1339, 1356), 'fastapi.Query', 'Query', ([], {'default': '""""""'}), "(default='')\n", (1344, 1356), False, 'from fastapi import HTTPException, status, Query\n'), ((1450, 1477), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (1455, 1477), False, 'from fastapi import HTTPException, status, Query\n'), ((1860, 1885), 'app.routers.library.models.BookOut.from_orm', 'BookOut.from_orm', (['book_db'], {}), '(book_db)\n', (1876, 1885), False, 'from app.routers.library.models import BookIn, BookOut, BookUpdate\n'), ((2264, 2297), 'app.routers.library.models.BookOut.from_orm', 'BookOut.from_orm', (['updated_book_db'], {}), '(updated_book_db)\n', (2280, 2297), False, 'from app.routers.library.models import BookIn, BookOut, BookUpdate\n'), ((1108, 1145), 'app.data.repository.book.BookRepository.get_all', 'BookRepository.get_all', (['offset', 'limit'], {}), '(offset, limit)\n', (1130, 1145), False, 'from app.data.repository.book import BookRepository\n'), ((1162, 1184), 'app.routers.library.models.BookOut.from_orm', 'BookOut.from_orm', (['book'], {}), '(book)\n', (1178, 1184), False, 'from app.routers.library.models import BookIn, BookOut, BookUpdate\n'), ((1554, 1607), 'app.data.repository.book.BookRepository.get_all_ilike_by', 'BookRepository.get_all_ilike_by', (['query', 'offset', 'limit'], {}), '(query, offset, limit)\n', (1585, 1607), False, 'from app.data.repository.book import BookRepository\n'), ((1624, 1646), 'app.routers.library.models.BookOut.from_orm', 'BookOut.from_orm', (['book'], {}), '(book)\n', (1640, 1646), False, 'from app.routers.library.models import BookIn, BookOut, BookUpdate\n'), ((2502, 2537), 'app.data.repository.book.BookRepository.delete_book', 'BookRepository.delete_book', (['book_db'], {}), '(book_db)\n', (2528, 2537), False, 'from app.data.repository.book import BookRepository\n'), ((479, 519), 'app.data.repository.book.BookRepository.get_by_id_or_exc', 'BookRepository.get_by_id_or_exc', (['book_id'], {}), '(book_id)\n', (510, 519), False, 'from app.data.repository.book import BookRepository\n'), ((567, 645), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': 'status.HTTP_404_NOT_FOUND', 'detail': '"""Book not found."""'}), "(status_code=status.HTTP_404_NOT_FOUND, detail='Book not found.')\n", (580, 645), False, 'from fastapi import HTTPException, status, Query\n')]
|
from setuptools import setup, find_packages
with open("README.md", "r") as f:
LONG_DESCRIPTION = f.read()
setup(
name="bigcode-embeddings",
version="0.1.2",
description="Tool generate and visualize embeddings from bigcode",
long_description=LONG_DESCRIPTION,
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/tuvistavie/bigcode-tools/tree/master/bigcode-embeddings",
download_url="https://github.com/tuvistavie/bigcode-tools/archive/master.zip",
include_package_data=True,
zip_safe=True,
packages=find_packages(),
install_requires=["pandas", "scipy", "numpy", "scikit-learn", "matplotlib", "plotly"],
scripts=["bin/bigcode-embeddings"],
extras_require={},
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3"
],
)
|
[
"setuptools.find_packages"
] |
[((563, 578), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (576, 578), False, 'from setuptools import setup, find_packages\n')]
|
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings as django_settings
from silviacontrol.utils import debug_log
class Command(BaseCommand):
help = 'Registers functions for GPIO interrupts on pi'
def handle(self, *args, **options):
if django_settings.SIMULATE_MACHINE == False:
from silviacontrol.display_cp import SilviaDisplay
display = SilviaDisplay(0x3C)
display.showWelcome()
display.showTemperature(20, 100)
else:
debug_log("Nothing to display in simulation mode")
|
[
"silviacontrol.utils.debug_log",
"silviacontrol.display_cp.SilviaDisplay"
] |
[((429, 446), 'silviacontrol.display_cp.SilviaDisplay', 'SilviaDisplay', (['(60)'], {}), '(60)\n', (442, 446), False, 'from silviacontrol.display_cp import SilviaDisplay\n'), ((554, 604), 'silviacontrol.utils.debug_log', 'debug_log', (['"""Nothing to display in simulation mode"""'], {}), "('Nothing to display in simulation mode')\n", (563, 604), False, 'from silviacontrol.utils import debug_log\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 27 20:42:07 2018
@author: allen
"""
import datetime
import pandas as pd
from functools import lru_cache
def _to_hours_mins_secs(time_taken):
"""Convert seconds to hours, mins, and seconds."""
mins, secs = divmod(time_taken, 60)
hours, mins = divmod(mins, 60)
return hours, mins, secs
def convert_str_to_timestamp(str_dt):
return pd.Timestamp(str_dt)
def convert_timestamp_to_str(timestamp, format='%Y-%m-%d'):
return convert_timestamp_to_dt(timestamp).strftime(format)
def convert_timestamp_to_dt(timestamp):
return timestamp.to_pydatetime()
def convert_str_to_dt(str_dt, format_="%Y-%m-%d %H:%M:%S"):
"""convert str tpye to datetime"""
# "%Y-%m-%d %H:%M:%S.%f"
# "%m/%d/%Y %H:%M:%S.%f"
dt = datetime.datetime.strptime(str_dt, format_)
return dt
def convert_date_to_date_int(dt):
t = dt.year * 10000 + dt.month * 100 + dt.day
return t
def convert_date_to_int(dt):
t = dt.year * 10000 + dt.month * 100 + dt.day
t *= 1000000
return t
def convert_dt_to_int(dt):
t = convert_date_to_int(dt)
t += dt.hour * 10000 + dt.minute * 100 + dt.second
return t
def convert_int_to_date(dt_int):
dt_int = int(dt_int)
if dt_int > 100000000:
dt_int //= 1000000
return _convert_int_to_date(dt_int)
@lru_cache(None)
def _convert_int_to_date(dt_int):
year, r = divmod(dt_int, 10000)
month, day = divmod(r, 100)
return datetime.datetime(year, month, day)
@lru_cache(20480)
def convert_int_to_datetime(dt_int):
dt_int = int(dt_int)
year, r = divmod(dt_int, 10000000000)
month, r = divmod(r, 100000000)
day, r = divmod(r, 1000000)
hour, r = divmod(r, 10000)
minute, second = divmod(r, 100)
return datetime.datetime(year, month, day, hour, minute, second)
def convert_ms_int_to_datetime(ms_dt_int):
dt_int, ms_int = divmod(ms_dt_int, 1000)
dt = convert_int_to_datetime(dt_int).replace(microsecond=ms_int * 1000)
return dt
def convert_date_time_ms_int_to_datetime(date_int, time_int):
date_int, time_int = int(date_int), int(time_int)
dt = _convert_int_to_date(date_int)
hours, r = divmod(time_int, 10000000)
minutes, r = divmod(r, 100000)
seconds, millisecond = divmod(r, 1000)
return dt.replace(hour=hours, minute=minutes, second=seconds,
microsecond=millisecond * 1000)
|
[
"datetime.datetime.strptime",
"functools.lru_cache",
"pandas.Timestamp",
"datetime.datetime"
] |
[((1414, 1429), 'functools.lru_cache', 'lru_cache', (['None'], {}), '(None)\n', (1423, 1429), False, 'from functools import lru_cache\n'), ((1582, 1598), 'functools.lru_cache', 'lru_cache', (['(20480)'], {}), '(20480)\n', (1591, 1598), False, 'from functools import lru_cache\n'), ((470, 490), 'pandas.Timestamp', 'pd.Timestamp', (['str_dt'], {}), '(str_dt)\n', (482, 490), True, 'import pandas as pd\n'), ((861, 904), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['str_dt', 'format_'], {}), '(str_dt, format_)\n', (887, 904), False, 'import datetime\n'), ((1543, 1578), 'datetime.datetime', 'datetime.datetime', (['year', 'month', 'day'], {}), '(year, month, day)\n', (1560, 1578), False, 'import datetime\n'), ((1849, 1906), 'datetime.datetime', 'datetime.datetime', (['year', 'month', 'day', 'hour', 'minute', 'second'], {}), '(year, month, day, hour, minute, second)\n', (1866, 1906), False, 'import datetime\n')]
|
from abc import ABC, abstractmethod
from enum import Enum
from typing import Any, Dict, Generic, TypeVar
T = TypeVar('T')
class TelRemoteSpecProtocol(ABC):
"""
Base class defining interface between the diesel service and the auto-completion frontend.
"""
@abstractmethod
def to_remote_spec(self) -> Dict[str, Any]:
pass
class TelExpressionSpecGetter(Generic[T], ABC):
"""
Getter of any attribute from a TelFunction, from any of the arguments. Could be a phase, return type, invalid value, etc.
"""
@abstractmethod
def get(self, args, context) -> T:
"""
Get the result of the implemented operation
:param List[TelExpression] args: function arguments
:param TelRootContext context: Tel context
:return: the result of the operation
"""
pass
class ArgumentExtractorSpecKind(Enum):
SINGLE = "single"
SLICING = "slicing"
class TelArgumentExtractor(TelRemoteSpecProtocol):
"""
Extracts desired arguments from the function arguments list, depending on the implementation of the particular subclass details.
"""
@abstractmethod
def extract_arguments(self, args):
"""
Extract argument(s) from the function arguments list
:param List[TelExpression] args: function arguments
:return: List[TelExpression]: extracted function argument(s)
"""
pass
class TelSingleArgumentExtractor(TelArgumentExtractor):
def __init__(self, position: int):
self._position = position
def extract_arguments(self, args):
try:
return [args[self._position]]
except IndexError:
return []
def to_remote_spec(self) -> Dict[str, Any]:
return {"kind": ArgumentExtractorSpecKind.SINGLE, "position": self._position}
class TelSlicingArgumentExtractor(TelArgumentExtractor):
def __init__(self, start: int):
self._start = start
def extract_arguments(self, args):
try:
return args[self._start :]
except IndexError:
return []
def to_remote_spec(self) -> Dict[str, Any]:
return {"kind": ArgumentExtractorSpecKind.SLICING, "start": self._start}
EXTRACT_FIRST_ARGUMENT = TelSingleArgumentExtractor(0)
|
[
"typing.TypeVar"
] |
[((110, 122), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (117, 122), False, 'from typing import Any, Dict, Generic, TypeVar\n')]
|