code stringlengths 2k 1.04M | repo_path stringlengths 5 517 | parsed_code stringlengths 0 1.04M | quality_prob float64 0.02 0.95 | learning_prob float64 0.02 0.93 |
|---|---|---|---|---|
from typing import List
import sys
from ..day import Day
class Board:
def __init__(self, data: List[List]) -> None:
self.__data = data
self.__marked_numbers = []
self.__won = False
def mark(self, number):
if self.__won:
return
self.__marked_numbers.append(number)
def reset(self):
self.__marked_numbers = []
self.__won = False
def won(self):
if self.__won:
return True
# Check rows
def check(arr, marked_numbers):
for line in arr:
result = all(x in marked_numbers for x in line)
if result:
return True
return False
if check(self.__data, self.__marked_numbers):
self.__won = True
return True
# Check columns
size = len(self.__data[0])
columns = []
for i in range(size):
column = []
for j in range(size):
column.append(self.__data[j][i])
columns.append(column)
if check(columns, self.__marked_numbers):
self.__won = True
return True
return False
def compute(self):
s = 0
for line in self.__data:
for x in line:
if x not in self.__marked_numbers:
s += x
n = self.__marked_numbers[-1]
return n * s
def __str__(self) -> str:
out = ""
for line in self.__data:
for number in line:
if number in self.__marked_numbers:
out += " X "
else:
out += f"{number: 3d} "
out += "\n"
return out
class Bingo:
def __init__(self, input_data: str) -> None:
self.__input = input_data.splitlines()
def pop_line(self):
try:
return self.__input.pop(0)
except IndexError:
return
def parse(self):
self.__random_numbers = [int(x) for x in self.pop_line().split(",")]
self.__boards = []
while True:
lines = []
self.pop_line()
def parse_line(line):
return [int(x) for x in list(filter(None, line.split(" ")))]
for _ in range(5):
line = self.pop_line()
if not line:
return
nbs = parse_line(line)
lines.append([int(x) for x in nbs])
self.__boards.append(Board(lines))
def part1(self):
for n in self.__random_numbers:
for board in self.__boards:
board.mark(n)
if board.won():
solution = board.compute()
return solution
def part2(self):
winning = []
board_count = len(self.__boards)
for n in self.__random_numbers:
for board in self.__boards:
board.mark(n)
if board.won() and board not in winning:
winning.append(board)
if len(winning) == board_count - 1:
last_winning_board = list(
filter(lambda x: x not in winning, self.__boards)
)[0]
last_winning_board.reset()
for n in self.__random_numbers:
last_winning_board.mark(n)
if last_winning_board.won():
solution = last_winning_board.compute()
return solution
class Day4(Day):
name = "Day 4"
description = "Giant Squid"
def __init__(self, test=False) -> None:
self.getPaths(__file__)
super().__init__(test)
def part1(self):
b = Bingo(self.input_file_content)
b.parse()
return b.part1()
def part2(self):
b = Bingo(self.input_file_content)
b.parse()
return b.part2() | aoc/day4/day.py |
from typing import List
import sys
from ..day import Day
class Board:
def __init__(self, data: List[List]) -> None:
self.__data = data
self.__marked_numbers = []
self.__won = False
def mark(self, number):
if self.__won:
return
self.__marked_numbers.append(number)
def reset(self):
self.__marked_numbers = []
self.__won = False
def won(self):
if self.__won:
return True
# Check rows
def check(arr, marked_numbers):
for line in arr:
result = all(x in marked_numbers for x in line)
if result:
return True
return False
if check(self.__data, self.__marked_numbers):
self.__won = True
return True
# Check columns
size = len(self.__data[0])
columns = []
for i in range(size):
column = []
for j in range(size):
column.append(self.__data[j][i])
columns.append(column)
if check(columns, self.__marked_numbers):
self.__won = True
return True
return False
def compute(self):
s = 0
for line in self.__data:
for x in line:
if x not in self.__marked_numbers:
s += x
n = self.__marked_numbers[-1]
return n * s
def __str__(self) -> str:
out = ""
for line in self.__data:
for number in line:
if number in self.__marked_numbers:
out += " X "
else:
out += f"{number: 3d} "
out += "\n"
return out
class Bingo:
def __init__(self, input_data: str) -> None:
self.__input = input_data.splitlines()
def pop_line(self):
try:
return self.__input.pop(0)
except IndexError:
return
def parse(self):
self.__random_numbers = [int(x) for x in self.pop_line().split(",")]
self.__boards = []
while True:
lines = []
self.pop_line()
def parse_line(line):
return [int(x) for x in list(filter(None, line.split(" ")))]
for _ in range(5):
line = self.pop_line()
if not line:
return
nbs = parse_line(line)
lines.append([int(x) for x in nbs])
self.__boards.append(Board(lines))
def part1(self):
for n in self.__random_numbers:
for board in self.__boards:
board.mark(n)
if board.won():
solution = board.compute()
return solution
def part2(self):
winning = []
board_count = len(self.__boards)
for n in self.__random_numbers:
for board in self.__boards:
board.mark(n)
if board.won() and board not in winning:
winning.append(board)
if len(winning) == board_count - 1:
last_winning_board = list(
filter(lambda x: x not in winning, self.__boards)
)[0]
last_winning_board.reset()
for n in self.__random_numbers:
last_winning_board.mark(n)
if last_winning_board.won():
solution = last_winning_board.compute()
return solution
class Day4(Day):
name = "Day 4"
description = "Giant Squid"
def __init__(self, test=False) -> None:
self.getPaths(__file__)
super().__init__(test)
def part1(self):
b = Bingo(self.input_file_content)
b.parse()
return b.part1()
def part2(self):
b = Bingo(self.input_file_content)
b.parse()
return b.part2() | 0.52756 | 0.246244 |
from __future__ import print_function, division
import os
from ._node import DirNode, LinkedDir, CyclicLinkedDir
from ._path import RecursionPath, DirEntryReplacement
def assert_dir_entry_equal(de1, de2):
# TODO check has attributes
assert de1.path == de2.path
assert de1.name == de2.name
for method, kwargs in [
('is_dir', {'follow_symlinks': True}),
('is_dir', {'follow_symlinks': False}),
('is_file', {'follow_symlinks': True}),
('is_file', {'follow_symlinks': False}),
('is_symlink', {}),
('stat', {'follow_symlinks': True}),
('stat', {'follow_symlinks': False}),
('inode', {})
]:
for attempt in [1, 2]: # done two times to verify caching!
res1 = getattr(de1, method)(**kwargs)
res2 = getattr(de2, method)(**kwargs)
if not res1 == res2:
raise AssertionError(
'\nde1.{method}(**{kwargs}) == {res1} != '
'\nde2.{method}(**{kwargs}) == {res2} '
'\n(attempt: {attempt})'
'\nde1: {de1}'
'\nde2: {de2}'.format(
method=method,
kwargs=kwargs,
res1=res1,
res2=res2,
attempt=attempt,
de1=de1,
de2=de2
)
)
def assert_recursion_path_equal(p1, p2):
assert p1.root == p2.root
assert p1.relative == p2.relative
assert p1.real == p2.real
assert p1.absolute == p2.absolute
assert_dir_entry_equal(p1, p2)
def assert_dir_node_equal(dn1, dn2):
assert_recursion_path_equal(dn1.path, dn2.path)
if isinstance(dn1, LinkedDir):
assert isinstance(dn2, LinkedDir)
elif isinstance(dn1, CyclicLinkedDir):
assert isinstance(dn2, CyclicLinkedDir)
assert_recursion_path_equal(dn1.target_path, dn2.target_path)
else:
for path1, path2 in zip(dn1.files, dn2.files):
assert_recursion_path_equal(path1, path2)
for sub_dn1, sub_dn2 in zip(dn1.directories, dn2.directories):
assert_dir_node_equal(sub_dn1, sub_dn2)
def get_mock_recursion_path(relative, root=None, is_dir=False, is_symlink=False):
dir_entry = DirEntryReplacement(
path=relative,
name=os.path.basename(relative)
)
dir_entry._is_dir = is_dir
dir_entry._is_file = not is_dir
dir_entry._is_symlink = is_symlink
return RecursionPath(
root=root,
relative=relative,
real=None,
dir_entry=dir_entry
) | src/scantree/test_utils.py | from __future__ import print_function, division
import os
from ._node import DirNode, LinkedDir, CyclicLinkedDir
from ._path import RecursionPath, DirEntryReplacement
def assert_dir_entry_equal(de1, de2):
# TODO check has attributes
assert de1.path == de2.path
assert de1.name == de2.name
for method, kwargs in [
('is_dir', {'follow_symlinks': True}),
('is_dir', {'follow_symlinks': False}),
('is_file', {'follow_symlinks': True}),
('is_file', {'follow_symlinks': False}),
('is_symlink', {}),
('stat', {'follow_symlinks': True}),
('stat', {'follow_symlinks': False}),
('inode', {})
]:
for attempt in [1, 2]: # done two times to verify caching!
res1 = getattr(de1, method)(**kwargs)
res2 = getattr(de2, method)(**kwargs)
if not res1 == res2:
raise AssertionError(
'\nde1.{method}(**{kwargs}) == {res1} != '
'\nde2.{method}(**{kwargs}) == {res2} '
'\n(attempt: {attempt})'
'\nde1: {de1}'
'\nde2: {de2}'.format(
method=method,
kwargs=kwargs,
res1=res1,
res2=res2,
attempt=attempt,
de1=de1,
de2=de2
)
)
def assert_recursion_path_equal(p1, p2):
assert p1.root == p2.root
assert p1.relative == p2.relative
assert p1.real == p2.real
assert p1.absolute == p2.absolute
assert_dir_entry_equal(p1, p2)
def assert_dir_node_equal(dn1, dn2):
assert_recursion_path_equal(dn1.path, dn2.path)
if isinstance(dn1, LinkedDir):
assert isinstance(dn2, LinkedDir)
elif isinstance(dn1, CyclicLinkedDir):
assert isinstance(dn2, CyclicLinkedDir)
assert_recursion_path_equal(dn1.target_path, dn2.target_path)
else:
for path1, path2 in zip(dn1.files, dn2.files):
assert_recursion_path_equal(path1, path2)
for sub_dn1, sub_dn2 in zip(dn1.directories, dn2.directories):
assert_dir_node_equal(sub_dn1, sub_dn2)
def get_mock_recursion_path(relative, root=None, is_dir=False, is_symlink=False):
dir_entry = DirEntryReplacement(
path=relative,
name=os.path.basename(relative)
)
dir_entry._is_dir = is_dir
dir_entry._is_file = not is_dir
dir_entry._is_symlink = is_symlink
return RecursionPath(
root=root,
relative=relative,
real=None,
dir_entry=dir_entry
) | 0.2819 | 0.308464 |
r"""
=====================================================
Panel Connections (:mod:`compmech.panel.connections`)
=====================================================
.. currentmodule:: compmech.panel.connections
Connection between panel domains. Each panel domain has its own set of Bardell
approximation functions. Below it is shown the connections currently supported.
kCBFycte
---------
Connection of type::
_
|
|| --> Flange |
|| |-> Can be used to model a stiffener
====== --> Base |
_|
``ycte`` indicates the connection exists at a constant `y_1` for panel 1
(base) and `y_2` for panel 2 (flange).
kCSB
---------
Connection of type::
====== ==> base
------ --> skin
Takes into account the offset between the two mid-surfaces.
kCSSxcte
---------
Connection of type::
__________
| |
| | /^\ x2
| S2 | |
| | y2 |
| | <----
|________| (connection at x2=xcte2)
__________ (connection at x1=xcte1)
| |
| | /^\ x1
| S1 | |
| | y1 |
|________| <----
kCSSycte
---------
Connection of type::
/-> (connection at y1=ycte1)
/
/ /->(connection at y2=ycte2)
_________| |_________
| | | |
| | | |
| S1 | | S2 |
| | | |
|________| |________|
/^\ x1 /^\ x2
| |
y1 | y2 |
<---- <----
Calculating Penalty Constants
------------------------------
Function :func:'.calc_kt_kr' is based on Ref [castro2017AssemblyModels]_ and
uses a strain compatibility criterion to calculate penalty constants for
translation (``kt``) and rotatio (``kr``). The aim is to have penalty constants
that are just high enough to produce the desired compatibility, but not too
high such that numerical stability issues start to appear.
.. autofunction:: compmech.panel.connections.calc_kt_kr
"""
from . kCBFycte import *
from . kCSB import *
from . kCSSxcte import *
from . kCSSycte import *
from . penalty_constants import calc_kt_kr | compmech/panel/connections/__init__.py | r"""
=====================================================
Panel Connections (:mod:`compmech.panel.connections`)
=====================================================
.. currentmodule:: compmech.panel.connections
Connection between panel domains. Each panel domain has its own set of Bardell
approximation functions. Below it is shown the connections currently supported.
kCBFycte
---------
Connection of type::
_
|
|| --> Flange |
|| |-> Can be used to model a stiffener
====== --> Base |
_|
``ycte`` indicates the connection exists at a constant `y_1` for panel 1
(base) and `y_2` for panel 2 (flange).
kCSB
---------
Connection of type::
====== ==> base
------ --> skin
Takes into account the offset between the two mid-surfaces.
kCSSxcte
---------
Connection of type::
__________
| |
| | /^\ x2
| S2 | |
| | y2 |
| | <----
|________| (connection at x2=xcte2)
__________ (connection at x1=xcte1)
| |
| | /^\ x1
| S1 | |
| | y1 |
|________| <----
kCSSycte
---------
Connection of type::
/-> (connection at y1=ycte1)
/
/ /->(connection at y2=ycte2)
_________| |_________
| | | |
| | | |
| S1 | | S2 |
| | | |
|________| |________|
/^\ x1 /^\ x2
| |
y1 | y2 |
<---- <----
Calculating Penalty Constants
------------------------------
Function :func:'.calc_kt_kr' is based on Ref [castro2017AssemblyModels]_ and
uses a strain compatibility criterion to calculate penalty constants for
translation (``kt``) and rotatio (``kr``). The aim is to have penalty constants
that are just high enough to produce the desired compatibility, but not too
high such that numerical stability issues start to appear.
.. autofunction:: compmech.panel.connections.calc_kt_kr
"""
from . kCBFycte import *
from . kCSB import *
from . kCSSxcte import *
from . kCSSycte import *
from . penalty_constants import calc_kt_kr | 0.861115 | 0.55658 |
import matplotlib.pyplot as plt
from scipy.stats import norm
def group_res(data, group_cols, statistic):
"""Splits dataframe into dictionary based on grouping
:param data: input data to be split
:param group_cols: group columns for data
:param statistic: statistic to be calculated
:type data: pd.DataFrame
:type group_cols: list
:type statistic: function
:return: results from the grouping and the grouped data
:rtype: tuple
"""
indices = (data.reset_index()
.groupby(group_cols)["index"]
.apply(list).to_dict())
grouped_data = {}
for key, val in indices.items():
grouped_data[key] = (data
.loc[val][data.loc[val]
.columns.difference(group_cols)])
current_res = statistic(*list(grouped_data.values()))
return current_res, grouped_data
def output_res(data, output_cols):
"""Splits dataframe into X and y inputs
:param data: input data to be split
:param output_cols: output columns for data, default is none
:type data: pd.DataFrame
:type output_cols: list
:return: the dataframe split into X and y
:rtype: tuple
"""
X = data[list(data.columns.difference(output_cols))]
if len(output_cols) == 1:
y = data[output_cols[0]]
else:
y = data[output_cols]
return X, y
def bca_endpoints(z_hat_nought, a_hat, percentile):
"""Calculate an endpoint for BCa
:param z_hat_nought: bias correction
:param a_hat: acceleration component
:param percentile: percentile for the endpoint
:type z_hat_nought: float
:type a_hat: float
:type percentile: float
:return: the percentile value
:rtype: float
"""
num = z_hat_nought + norm.ppf(percentile)
den = 1 - a_hat * (z_hat_nought + norm.ppf(percentile))
a = 100 * norm.cdf(z_hat_nought + (num / den))
return a
def plot_single(data, num_plots, bins, figsize, **kwargs):
"""Create set of plots
:param data: values to plot
:param num_plots: number of plots
:param bins: number of bins for the histogram
:type data: np.array
:type num_plots: int
:type bins: int
"""
_, axes = plt.subplots(num_plots, figsize=figsize, sharey=True)
for ax, i in zip(axes, range(0, num_plots)):
current_var = data[:, i]
ax.hist(current_var, bins=bins, **kwargs) | resample/utility.py | import matplotlib.pyplot as plt
from scipy.stats import norm
def group_res(data, group_cols, statistic):
"""Splits dataframe into dictionary based on grouping
:param data: input data to be split
:param group_cols: group columns for data
:param statistic: statistic to be calculated
:type data: pd.DataFrame
:type group_cols: list
:type statistic: function
:return: results from the grouping and the grouped data
:rtype: tuple
"""
indices = (data.reset_index()
.groupby(group_cols)["index"]
.apply(list).to_dict())
grouped_data = {}
for key, val in indices.items():
grouped_data[key] = (data
.loc[val][data.loc[val]
.columns.difference(group_cols)])
current_res = statistic(*list(grouped_data.values()))
return current_res, grouped_data
def output_res(data, output_cols):
"""Splits dataframe into X and y inputs
:param data: input data to be split
:param output_cols: output columns for data, default is none
:type data: pd.DataFrame
:type output_cols: list
:return: the dataframe split into X and y
:rtype: tuple
"""
X = data[list(data.columns.difference(output_cols))]
if len(output_cols) == 1:
y = data[output_cols[0]]
else:
y = data[output_cols]
return X, y
def bca_endpoints(z_hat_nought, a_hat, percentile):
"""Calculate an endpoint for BCa
:param z_hat_nought: bias correction
:param a_hat: acceleration component
:param percentile: percentile for the endpoint
:type z_hat_nought: float
:type a_hat: float
:type percentile: float
:return: the percentile value
:rtype: float
"""
num = z_hat_nought + norm.ppf(percentile)
den = 1 - a_hat * (z_hat_nought + norm.ppf(percentile))
a = 100 * norm.cdf(z_hat_nought + (num / den))
return a
def plot_single(data, num_plots, bins, figsize, **kwargs):
"""Create set of plots
:param data: values to plot
:param num_plots: number of plots
:param bins: number of bins for the histogram
:type data: np.array
:type num_plots: int
:type bins: int
"""
_, axes = plt.subplots(num_plots, figsize=figsize, sharey=True)
for ax, i in zip(axes, range(0, num_plots)):
current_var = data[:, i]
ax.hist(current_var, bins=bins, **kwargs) | 0.829388 | 0.831383 |
import argparse
from query_type import QueryType
import socket
import time
import ipaddress
from serializer import Serializer
from deserializer import Deserializer
class DNSClient:
def __init__(self, params):
self.name = params.name
self.address = params.address
self.maxRetries = params.maxRetries
self.timeout = params.timeout
self.port = params.port
self.qtype = QueryType.A
if(params.mx):
self.qtype = QueryType.MX
elif(params.ns):
self.qtype = QueryType.NS
def makeRequest(self):
print(f"DnsClient sending request for {self.name}")
print(f'Server: {self.address}')
print(f'Request type: {str(self.qtype).split(".")[1]}')
self.requestHelper(1)
def requestHelper(self, retry):
if retry > self.maxRetries:
print(
f'ERROR\tMaximum number of retries {self.maxRetries} exceeded')
return
try:
# open socket
dnsSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
dnsSocket.settimeout(self.timeout)
dnsSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
dnsSocket.bind(('', self.port))
# send & recv
startTime = time.time_ns()
dnsSocket.sendto(Serializer().build_packet(
self.name, self.qtype), (self.address, self.port))
recvBuff, recvAddress = dnsSocket.recvfrom(512)
endTime = time.time_ns()
# close socket
dnsSocket.close()
print(
f'Response received after {(endTime - startTime)//1000000000} seconds ({retry -1} retries)')
dns_response = Deserializer().build_response(recvBuff)
if(dns_response['rcode'] == 3):
print("NOT FOUND")
return
self.beautify_dns_response(dns_response)
except socket.timeout as e:
print(f"ERROR\tSocket Timeout: {e}")
print("Reattempting request...")
self.requestHelper(retry+1)
except socket.error as e:
print(f'ERROR\tCould not create socket: {e}')
except (socket.gaierror, socket.herror) as e:
print(f"ERROR\tUnknown host: {e}")
except Exception as e:
print(e)
def beautify_dns_response(self, dns_response):
ancount = dns_response['ancount']
arcount = dns_response['arcount']
nscount = dns_response['nscount']
if(ancount + arcount + nscount <= 0):
print("NOT FOUND")
return
if(ancount > 0):
print(f"***Answer Section ({ancount} records)***")
for item in dns_response['answers']:
print(str(item))
print()
if(arcount > 0):
print(f"***Additional Section ({arcount} records)***")
for item in dns_response['additional']:
print(str(item))
print()
if(nscount > 0):
print(f"***Authoritative Section ({nscount} records)***")
for item in dns_response['authoritative']:
print(str(item)) | dns_client.py |
import argparse
from query_type import QueryType
import socket
import time
import ipaddress
from serializer import Serializer
from deserializer import Deserializer
class DNSClient:
def __init__(self, params):
self.name = params.name
self.address = params.address
self.maxRetries = params.maxRetries
self.timeout = params.timeout
self.port = params.port
self.qtype = QueryType.A
if(params.mx):
self.qtype = QueryType.MX
elif(params.ns):
self.qtype = QueryType.NS
def makeRequest(self):
print(f"DnsClient sending request for {self.name}")
print(f'Server: {self.address}')
print(f'Request type: {str(self.qtype).split(".")[1]}')
self.requestHelper(1)
def requestHelper(self, retry):
if retry > self.maxRetries:
print(
f'ERROR\tMaximum number of retries {self.maxRetries} exceeded')
return
try:
# open socket
dnsSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
dnsSocket.settimeout(self.timeout)
dnsSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
dnsSocket.bind(('', self.port))
# send & recv
startTime = time.time_ns()
dnsSocket.sendto(Serializer().build_packet(
self.name, self.qtype), (self.address, self.port))
recvBuff, recvAddress = dnsSocket.recvfrom(512)
endTime = time.time_ns()
# close socket
dnsSocket.close()
print(
f'Response received after {(endTime - startTime)//1000000000} seconds ({retry -1} retries)')
dns_response = Deserializer().build_response(recvBuff)
if(dns_response['rcode'] == 3):
print("NOT FOUND")
return
self.beautify_dns_response(dns_response)
except socket.timeout as e:
print(f"ERROR\tSocket Timeout: {e}")
print("Reattempting request...")
self.requestHelper(retry+1)
except socket.error as e:
print(f'ERROR\tCould not create socket: {e}')
except (socket.gaierror, socket.herror) as e:
print(f"ERROR\tUnknown host: {e}")
except Exception as e:
print(e)
def beautify_dns_response(self, dns_response):
ancount = dns_response['ancount']
arcount = dns_response['arcount']
nscount = dns_response['nscount']
if(ancount + arcount + nscount <= 0):
print("NOT FOUND")
return
if(ancount > 0):
print(f"***Answer Section ({ancount} records)***")
for item in dns_response['answers']:
print(str(item))
print()
if(arcount > 0):
print(f"***Additional Section ({arcount} records)***")
for item in dns_response['additional']:
print(str(item))
print()
if(nscount > 0):
print(f"***Authoritative Section ({nscount} records)***")
for item in dns_response['authoritative']:
print(str(item)) | 0.327023 | 0.049912 |
from typing import Callable, Optional
import gurobipy
import lightgbm as lgb
import opti
import pandas as pd
from mbo.algorithm import Algorithm
from entmoot.optimizer import Optimizer
from entmoot.optimizer.gurobi_utils import get_core_gurobi_model
from entmoot.space.space import Categorical, Integer, Real, Space
class EntmootOpti(Algorithm):
""""
This class serves as connector between the package mopti (https://github.com/basf/mopti) and entmoot.
Mopti is a Python package for specifying problems in a number of closely related fields, including experimental
design, multiobjective optimization, decision making and Bayesian optimization.
EntmootOpti inherits from mbo.algorithm (https://github.com/basf/mbo) and migrates problems specified in mopti to
entmoot.
:param problem: opti.Problem
contains all information about the mopti problem
: param base_est_params: dict
base estimator parameters which are handed over to entmoot's Optimizer object
: param gurobi_env: Optional[Callable]
calls a function that returns a Gurobi CloudEnv object, if None: use local license instead
"""
def __init__(self, problem: opti.Problem, base_est_params: dict = None, gurobi_env: Optional[Callable] = None):
self.problem: opti.Problem = problem
if base_est_params is None:
self._base_est_params: dict = {}
else:
self._base_est_params: dict = base_est_params
self.model: lgb.Booster = None
self.num_obj = len(self.problem.outputs.names)
# Gurobi environment handling in case you are using the Gurobi Cloud service
self.gurobi_env = gurobi_env
self.cat_names: list[str] = None
self.cat_idx: list[int] = None
if self.problem.data is None:
raise ValueError("No initial data points provided.")
dimensions: list = self._build_dimensions_list()
self.space = Space(dimensions)
self.entmoot_optimizer: Optimizer = Optimizer(
dimensions=dimensions,
base_estimator="ENTING",
n_initial_points=0,
num_obj=self.num_obj,
random_state=73,
base_estimator_kwargs=self._base_est_params,
)
self._fit_model()
def _build_dimensions_list(self) -> list:
"""
Builds a list with information (variable bounds and variable type) about input variables (decision variables)
from mopti. This is then later used by the Optimizer object.
"""
dimensions = []
for parameter in self.problem.inputs:
if isinstance(parameter, opti.Continuous):
dimensions.append(Real(*parameter.bounds, name=parameter.name))
elif isinstance(parameter, opti.Categorical):
dimensions.append(Categorical(parameter.domain, name=parameter.name))
elif isinstance(parameter, opti.Discrete):
# skopt only supports integer variables [1, 2, 3, 4], not discrete ones [1, 2, 4]
# We handle this by rounding the proposals
dimensions.append(Integer(*parameter.bounds, name=parameter.name))
return dimensions
def _fit_model(self) -> None:
"""Fit a probabilistic model to the available data."""
X = self.problem.data[self.problem.inputs.names]
if self.num_obj == 1:
y = self.problem.data[self.problem.outputs.names[0]]
else:
y = self.problem.data[self.problem.outputs.names]
self.entmoot_optimizer.tell(x=X.to_numpy().tolist(), y=y.to_numpy().tolist(), fit=True)
def predict(self, X: pd.DataFrame) -> pd.DataFrame:
"""
Yields prediction y from surrogate model(s) for provided X.
"""
return self.entmoot_optimizer.predict_with_est(X.to_numpy().tolist())
def predict_pareto_front(
self, sampling_strategy="random", num_samples=10, num_levels=10, add_model_core=None
) -> pd.DataFrame:
pf_res = self.entmoot_optimizer.predict_pareto(
sampling_strategy=sampling_strategy,
num_samples=num_samples,
num_levels=num_levels,
add_model_core=add_model_core
)
pf_list = [list(x)+y for x, y in pf_res]
pf_df = pd.DataFrame(pf_list, columns=self.problem.inputs.names + self.problem.outputs.names)
return pf_df
def propose(self, n_proposals: int = 1) -> pd.DataFrame:
"""
Suggests next proposal by optimizing the acquisition function.
"""
gurobi_model = get_core_gurobi_model(self.space)
# Migrate constraints from opti to gurobi
if self.problem.constraints:
for c in self.problem.constraints:
if isinstance(c, opti.constraint.LinearInequality):
coef = {x: a for (x, a) in zip(c.names, c.lhs)}
gurobi_model.addConstr(
(
sum(
coef[v.varname] * v
for v in gurobi_model.getVars()
if v.varname in coef
)
<= c.rhs
),
name="LinearInequalityOpti"
)
elif isinstance(c, opti.constraint.LinearEquality):
coef = {x: a for (x, a) in zip(c.names, c.lhs)}
gurobi_model.addConstr(
(
sum(
coef[v.varname] * v
for v in gurobi_model.getVars()
if v.varname in coef
)
== c.rhs
),
name="LinearEqualityOpti"
)
elif isinstance(c, opti.constraint.NChooseK):
# Big-M implementation of n-choose-k constraint
y = gurobi_model.addVars(c.names, vtype=gurobipy.GRB.BINARY)
gurobi_model.addConstrs(
(
y[v.varname] * v.lb <= v
for v in gurobi_model.getVars()
if v.varname in c.names
),
name="n-choose-k-constraint LB",
)
gurobi_model.addConstrs(
(
y[v.varname] * v.ub >= v
for v in gurobi_model.getVars()
if v.varname in c.names
),
name="n-choose-k-constraint UB",
)
gurobi_model.addConstr(
y.sum() == c.max_active, name="max active components"
)
else:
raise ValueError(f"Constraint of type {type(c)} not supported.")
X_res = self.entmoot_optimizer.ask(n_points=n_proposals)
return pd.DataFrame(X_res, columns=self.problem.inputs.names) | entmoot/optimizer/entmootopti.py | from typing import Callable, Optional
import gurobipy
import lightgbm as lgb
import opti
import pandas as pd
from mbo.algorithm import Algorithm
from entmoot.optimizer import Optimizer
from entmoot.optimizer.gurobi_utils import get_core_gurobi_model
from entmoot.space.space import Categorical, Integer, Real, Space
class EntmootOpti(Algorithm):
""""
This class serves as connector between the package mopti (https://github.com/basf/mopti) and entmoot.
Mopti is a Python package for specifying problems in a number of closely related fields, including experimental
design, multiobjective optimization, decision making and Bayesian optimization.
EntmootOpti inherits from mbo.algorithm (https://github.com/basf/mbo) and migrates problems specified in mopti to
entmoot.
:param problem: opti.Problem
contains all information about the mopti problem
: param base_est_params: dict
base estimator parameters which are handed over to entmoot's Optimizer object
: param gurobi_env: Optional[Callable]
calls a function that returns a Gurobi CloudEnv object, if None: use local license instead
"""
def __init__(self, problem: opti.Problem, base_est_params: dict = None, gurobi_env: Optional[Callable] = None):
self.problem: opti.Problem = problem
if base_est_params is None:
self._base_est_params: dict = {}
else:
self._base_est_params: dict = base_est_params
self.model: lgb.Booster = None
self.num_obj = len(self.problem.outputs.names)
# Gurobi environment handling in case you are using the Gurobi Cloud service
self.gurobi_env = gurobi_env
self.cat_names: list[str] = None
self.cat_idx: list[int] = None
if self.problem.data is None:
raise ValueError("No initial data points provided.")
dimensions: list = self._build_dimensions_list()
self.space = Space(dimensions)
self.entmoot_optimizer: Optimizer = Optimizer(
dimensions=dimensions,
base_estimator="ENTING",
n_initial_points=0,
num_obj=self.num_obj,
random_state=73,
base_estimator_kwargs=self._base_est_params,
)
self._fit_model()
def _build_dimensions_list(self) -> list:
"""
Builds a list with information (variable bounds and variable type) about input variables (decision variables)
from mopti. This is then later used by the Optimizer object.
"""
dimensions = []
for parameter in self.problem.inputs:
if isinstance(parameter, opti.Continuous):
dimensions.append(Real(*parameter.bounds, name=parameter.name))
elif isinstance(parameter, opti.Categorical):
dimensions.append(Categorical(parameter.domain, name=parameter.name))
elif isinstance(parameter, opti.Discrete):
# skopt only supports integer variables [1, 2, 3, 4], not discrete ones [1, 2, 4]
# We handle this by rounding the proposals
dimensions.append(Integer(*parameter.bounds, name=parameter.name))
return dimensions
def _fit_model(self) -> None:
"""Fit a probabilistic model to the available data."""
X = self.problem.data[self.problem.inputs.names]
if self.num_obj == 1:
y = self.problem.data[self.problem.outputs.names[0]]
else:
y = self.problem.data[self.problem.outputs.names]
self.entmoot_optimizer.tell(x=X.to_numpy().tolist(), y=y.to_numpy().tolist(), fit=True)
def predict(self, X: pd.DataFrame) -> pd.DataFrame:
"""
Yields prediction y from surrogate model(s) for provided X.
"""
return self.entmoot_optimizer.predict_with_est(X.to_numpy().tolist())
def predict_pareto_front(
self, sampling_strategy="random", num_samples=10, num_levels=10, add_model_core=None
) -> pd.DataFrame:
pf_res = self.entmoot_optimizer.predict_pareto(
sampling_strategy=sampling_strategy,
num_samples=num_samples,
num_levels=num_levels,
add_model_core=add_model_core
)
pf_list = [list(x)+y for x, y in pf_res]
pf_df = pd.DataFrame(pf_list, columns=self.problem.inputs.names + self.problem.outputs.names)
return pf_df
def propose(self, n_proposals: int = 1) -> pd.DataFrame:
"""
Suggests next proposal by optimizing the acquisition function.
"""
gurobi_model = get_core_gurobi_model(self.space)
# Migrate constraints from opti to gurobi
if self.problem.constraints:
for c in self.problem.constraints:
if isinstance(c, opti.constraint.LinearInequality):
coef = {x: a for (x, a) in zip(c.names, c.lhs)}
gurobi_model.addConstr(
(
sum(
coef[v.varname] * v
for v in gurobi_model.getVars()
if v.varname in coef
)
<= c.rhs
),
name="LinearInequalityOpti"
)
elif isinstance(c, opti.constraint.LinearEquality):
coef = {x: a for (x, a) in zip(c.names, c.lhs)}
gurobi_model.addConstr(
(
sum(
coef[v.varname] * v
for v in gurobi_model.getVars()
if v.varname in coef
)
== c.rhs
),
name="LinearEqualityOpti"
)
elif isinstance(c, opti.constraint.NChooseK):
# Big-M implementation of n-choose-k constraint
y = gurobi_model.addVars(c.names, vtype=gurobipy.GRB.BINARY)
gurobi_model.addConstrs(
(
y[v.varname] * v.lb <= v
for v in gurobi_model.getVars()
if v.varname in c.names
),
name="n-choose-k-constraint LB",
)
gurobi_model.addConstrs(
(
y[v.varname] * v.ub >= v
for v in gurobi_model.getVars()
if v.varname in c.names
),
name="n-choose-k-constraint UB",
)
gurobi_model.addConstr(
y.sum() == c.max_active, name="max active components"
)
else:
raise ValueError(f"Constraint of type {type(c)} not supported.")
X_res = self.entmoot_optimizer.ask(n_points=n_proposals)
return pd.DataFrame(X_res, columns=self.problem.inputs.names) | 0.850267 | 0.407569 |
from __future__ import absolute_import, division, print_function
import re
import os
import subprocess
import tempfile
import requests
def to_snake_case(s, sep="_"):
# type: (str, str) -> str
p = r"\1" + sep + r"\2"
s1 = re.sub("(.)([A-Z][a-z]+)", p, s)
return re.sub("([a-z0-9])([A-Z])", p, s1).lower()
def is_gcs_path(path):
# type: (str) -> bool
"""Returns True if given path is GCS path, False otherwise."""
return path.strip().lower().startswith("gs://")
def get_uri(target):
if hasattr(target, "uri"):
return target.uri()
elif hasattr(target, "path"):
return target.path
else:
raise ValueError("Unknown input target type: %s" % target.__class__.__name__)
def run_with_logging(cmd, logger):
"""
Run cmd and wait for it to finish. While cmd is running, we read it's
output and print it to a logger.
"""
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output_lines = []
while True:
line = process.stdout.readline()
if not line:
break
line = line.decode("utf-8")
output_lines += [line]
logger.info(line.rstrip("\n"))
exit_code = process.wait()
if exit_code:
output = "".join(output_lines)
raise subprocess.CalledProcessError(exit_code, cmd, output=output)
return exit_code
def _fetch_file(url, output_path=None):
# type: (str, str) -> str
"""Fetches a file from the url and saves it to a temp file (or at the provided output path)."""
rep = requests.get(url, allow_redirects=True)
if rep.status_code / 100 != 2:
raise Exception("Got [status_code:{}] fetching file at [url:{}]".format(rep.status_code,
url))
if output_path is None:
output_path = tempfile.NamedTemporaryFile(delete=False).name
with open(output_path, "wb") as out:
out.write(rep.content)
return output_path
def fetch_tfdv_whl(version=None, output_path=None, platform="manylinux1"):
# type: (str, str, str) -> str
"""Fetches the TFDV pip package from PyPI and saves it to a temporary file (or the provided
output path). Returns the path to the fetched package."""
package_name = "tensorflow_data_validation"
if version is None:
import tensorflow_data_validation as tfdv
version = tfdv.__version__
pypi_base = "https://pypi.org/simple/{}".format(package_name)
package_url = None
with open(_fetch_file(pypi_base)) as listing_html:
for line in listing_html:
if version in line and platform in line:
package_url = re.findall(".*href=\"([^ ]*)#[^ ]*\".*", line)[0]
break
if package_url is None:
raise Exception("Problem fetching package. Couldn't parse listing at [url:{}]"
.format(pypi_base))
if output_path is None:
temp_dir = tempfile.mkdtemp()
# Note: output_path file name must exactly match the remote wheel name.
output_path = os.path.join(temp_dir, package_url.split("/")[-1])
return _fetch_file(package_url, output_path=output_path) | spotify_tensorflow/luigi/utils.py |
from __future__ import absolute_import, division, print_function
import re
import os
import subprocess
import tempfile
import requests
def to_snake_case(s, sep="_"):
# type: (str, str) -> str
p = r"\1" + sep + r"\2"
s1 = re.sub("(.)([A-Z][a-z]+)", p, s)
return re.sub("([a-z0-9])([A-Z])", p, s1).lower()
def is_gcs_path(path):
# type: (str) -> bool
"""Returns True if given path is GCS path, False otherwise."""
return path.strip().lower().startswith("gs://")
def get_uri(target):
if hasattr(target, "uri"):
return target.uri()
elif hasattr(target, "path"):
return target.path
else:
raise ValueError("Unknown input target type: %s" % target.__class__.__name__)
def run_with_logging(cmd, logger):
"""
Run cmd and wait for it to finish. While cmd is running, we read it's
output and print it to a logger.
"""
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output_lines = []
while True:
line = process.stdout.readline()
if not line:
break
line = line.decode("utf-8")
output_lines += [line]
logger.info(line.rstrip("\n"))
exit_code = process.wait()
if exit_code:
output = "".join(output_lines)
raise subprocess.CalledProcessError(exit_code, cmd, output=output)
return exit_code
def _fetch_file(url, output_path=None):
# type: (str, str) -> str
"""Fetches a file from the url and saves it to a temp file (or at the provided output path)."""
rep = requests.get(url, allow_redirects=True)
if rep.status_code / 100 != 2:
raise Exception("Got [status_code:{}] fetching file at [url:{}]".format(rep.status_code,
url))
if output_path is None:
output_path = tempfile.NamedTemporaryFile(delete=False).name
with open(output_path, "wb") as out:
out.write(rep.content)
return output_path
def fetch_tfdv_whl(version=None, output_path=None, platform="manylinux1"):
# type: (str, str, str) -> str
"""Fetches the TFDV pip package from PyPI and saves it to a temporary file (or the provided
output path). Returns the path to the fetched package."""
package_name = "tensorflow_data_validation"
if version is None:
import tensorflow_data_validation as tfdv
version = tfdv.__version__
pypi_base = "https://pypi.org/simple/{}".format(package_name)
package_url = None
with open(_fetch_file(pypi_base)) as listing_html:
for line in listing_html:
if version in line and platform in line:
package_url = re.findall(".*href=\"([^ ]*)#[^ ]*\".*", line)[0]
break
if package_url is None:
raise Exception("Problem fetching package. Couldn't parse listing at [url:{}]"
.format(pypi_base))
if output_path is None:
temp_dir = tempfile.mkdtemp()
# Note: output_path file name must exactly match the remote wheel name.
output_path = os.path.join(temp_dir, package_url.split("/")[-1])
return _fetch_file(package_url, output_path=output_path) | 0.663342 | 0.14627 |
from contextlib import contextmanager
from crl.interactivesessions._terminalpools import _TerminalPools
from ._process import (
_AsyncProcessWithoutPty,
_ForegroundProcessWithoutPty,
_BackgroundProcessWithoutPty,
_NoCommBackgroudProcess)
from ._targetproperties import _TargetProperties
__copyright__ = 'Copyright (C) 2019, Nokia'
class _RunnerInTarget(object):
def __init__(self, shelldicts):
self.shelldicts = shelldicts
self.properties = _TargetProperties()
self.terminalpools = _TerminalPools()
@contextmanager
def active_terminal(self):
with self.terminalpools.active_terminal(self.shelldicts,
self.properties) as terminal:
yield terminal
def run(self, cmd, timeout, executable=None, progress_log=False):
processcls = (
_AsyncProcessWithoutPty
if progress_log else
_ForegroundProcessWithoutPty)
return processcls(
cmd,
executable=self._get_executable(executable),
shelldicts=self.shelldicts,
properties=self.properties,
timeout=timeout).run()
def run_in_background(self, cmd, executable=None):
return _BackgroundProcessWithoutPty(
**self._get_background_kwargs(cmd, executable)).run()
def run_in_nocomm_background(self, cmd, executable=None):
return _NoCommBackgroudProcess(
**self._get_background_kwargs(cmd, executable)).run()
def _get_background_kwargs(self, cmd, executable):
return {'cmd': cmd,
'executable': self._get_executable(executable),
'shelldicts': self.shelldicts,
'properties': self.properties}
def _get_executable(self, executable):
return (self.properties.default_executable
if executable is None else
executable)
def get_terminal(self):
return self.terminalpools.get(shelldicts=self.shelldicts,
properties=self.properties)
def put_terminal(self, terminal):
return self.terminalpools.put(terminal) | src/crl/interactivesessions/_runnerintarget.py | from contextlib import contextmanager
from crl.interactivesessions._terminalpools import _TerminalPools
from ._process import (
_AsyncProcessWithoutPty,
_ForegroundProcessWithoutPty,
_BackgroundProcessWithoutPty,
_NoCommBackgroudProcess)
from ._targetproperties import _TargetProperties
__copyright__ = 'Copyright (C) 2019, Nokia'
class _RunnerInTarget(object):
def __init__(self, shelldicts):
self.shelldicts = shelldicts
self.properties = _TargetProperties()
self.terminalpools = _TerminalPools()
@contextmanager
def active_terminal(self):
with self.terminalpools.active_terminal(self.shelldicts,
self.properties) as terminal:
yield terminal
def run(self, cmd, timeout, executable=None, progress_log=False):
processcls = (
_AsyncProcessWithoutPty
if progress_log else
_ForegroundProcessWithoutPty)
return processcls(
cmd,
executable=self._get_executable(executable),
shelldicts=self.shelldicts,
properties=self.properties,
timeout=timeout).run()
def run_in_background(self, cmd, executable=None):
return _BackgroundProcessWithoutPty(
**self._get_background_kwargs(cmd, executable)).run()
def run_in_nocomm_background(self, cmd, executable=None):
return _NoCommBackgroudProcess(
**self._get_background_kwargs(cmd, executable)).run()
def _get_background_kwargs(self, cmd, executable):
return {'cmd': cmd,
'executable': self._get_executable(executable),
'shelldicts': self.shelldicts,
'properties': self.properties}
def _get_executable(self, executable):
return (self.properties.default_executable
if executable is None else
executable)
def get_terminal(self):
return self.terminalpools.get(shelldicts=self.shelldicts,
properties=self.properties)
def put_terminal(self, terminal):
return self.terminalpools.put(terminal) | 0.650689 | 0.070304 |
from abc import ABC, abstractmethod
from io import StringIO
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Union, overload
from pydantic import root_validator
from typing_extensions import Literal
from vkbottle_types.objects import (
AudioAudio,
DocsDoc,
MessagesForward,
MessagesMessage,
PhotosPhoto,
UsersUserFull,
VideoVideo,
WallWallComment,
WallWallpostFull,
)
from vkbottle.dispatch.dispenser.base import StatePeer
from vkbottle.modules import json, logger
if TYPE_CHECKING:
from vkbottle_types.responses.messages import MessagesSendUserIdsResponseItem
from vkbottle.api import ABCAPI, API
from .foreign_message import BaseForeignMessageMin
from .mention import Mention, replace_mention_validator
class BaseMessageMin(MessagesMessage, ABC):
unprepared_ctx_api: Optional[Any] = None
state_peer: Optional["StatePeer"] = None
reply_message: Optional["BaseForeignMessageMin"] = None
fwd_messages: Optional[List["BaseForeignMessageMin"]] = []
replace_mention: Optional[bool] = None
_mention: Optional[Mention] = None
__replace_mention = root_validator(replace_mention_validator, allow_reuse=True, pre=False) # type: ignore
@property
def ctx_api(self) -> Union["ABCAPI", "API"]:
return getattr(self, "unprepared_ctx_api")
@property
def mention(self) -> Optional[Mention]:
"""Returns `Mention` object if message contains mention,
eg if message is `@username text` returns `Mention(id=123, text="text")`,
also mention is automatically removes from message text"""
if not self.replace_mention:
logger.warning(
"labeler.message_view.replace_mention is set to False, the mention will not be processed"
)
return None
return self._mention
@property
@abstractmethod
def is_mentioned(self) -> bool:
"""Returns True if current bot is mentioned in message"""
pass
@overload
async def get_user(self, raw_mode: Literal[False] = ..., **kwargs) -> UsersUserFull:
...
@overload
async def get_user(self, raw_mode: Literal[True] = ..., **kwargs) -> dict:
...
async def get_user(self, raw_mode: bool = False, **kwargs) -> Union[UsersUserFull, dict]:
raw_user = (await self.ctx_api.request("users.get", {"user_ids": self.from_id, **kwargs}))[
"response"
][0]
return raw_user if raw_mode else UsersUserFull(**raw_user)
@property
def chat_id(self) -> int:
return self.peer_id - 2_000_000_000
@property
def message_id(self) -> int:
return self.conversation_message_id or self.id
def get_attachment_strings(self) -> Optional[List[str]]:
if self.attachments is None:
return None
attachments = []
for attachment in self.attachments:
attachment_type = attachment.type.value
attachment_object = getattr(attachment, attachment_type)
if not hasattr(attachment_object, "id") or not hasattr(attachment_object, "owner_id"):
continue
attachment_string = (
f"{attachment_type}{attachment_object.owner_id}_{attachment_object.id}"
)
if attachment_object.access_key:
attachment_string += f"_{attachment_object.access_key}"
attachments.append(attachment_string)
return attachments
def get_wall_attachment(self) -> Optional[List["WallWallpostFull"]]:
if self.attachments is None:
return None
result = [attachment.wall for attachment in self.attachments if attachment.wall]
return result or None
def get_wall_reply_attachment(self) -> Optional[List["WallWallComment"]]:
if self.attachments is None:
return None
result = [
attachment.wall_reply for attachment in self.attachments if attachment.wall_reply
]
return result or None
def get_photo_attachments(self) -> Optional[List["PhotosPhoto"]]:
if self.attachments is None:
return None
return [attachment.photo for attachment in self.attachments if attachment.photo]
def get_video_attachments(self) -> Optional[List["VideoVideo"]]:
if self.attachments is None:
return None
return [attachment.video for attachment in self.attachments if attachment.video]
def get_doc_attachments(self) -> Optional[List["DocsDoc"]]:
if self.attachments is None:
return None
return [attachment.doc for attachment in self.attachments if attachment.doc]
def get_audio_attachments(self) -> Optional[List["AudioAudio"]]:
if self.attachments is None:
return None
return [attachment.audio for attachment in self.attachments if attachment.audio]
def get_message_id(self) -> Optional[int]:
return self.id or self.conversation_message_id
def get_payload_json(
self,
throw_error: bool = False,
unpack_failure: Callable[[str], Union[dict, str]] = lambda payload: payload,
) -> Optional[Union[dict, str]]:
if self.payload is None:
return None
try:
return json.loads(self.payload)
except (ValueError, TypeError) as e:
if throw_error:
raise e from e
return unpack_failure(self.payload)
async def answer(
self,
message: Optional[str] = None,
attachment: Optional[str] = None,
random_id: Optional[int] = 0,
lat: Optional[float] = None,
long: Optional[float] = None,
reply_to: Optional[int] = None,
forward_messages: Optional[List[int]] = None,
forward: Optional[str] = None,
sticker_id: Optional[int] = None,
keyboard: Optional[str] = None,
template: Optional[str] = None,
payload: Optional[str] = None,
content_source: Optional[str] = None,
dont_parse_links: Optional[bool] = None,
disable_mentions: Optional[bool] = None,
intent: Optional[str] = None,
subscribe_id: Optional[int] = None,
**kwargs,
) -> "MessagesSendUserIdsResponseItem":
locals().update(kwargs)
data = {k: v for k, v in locals().items() if k not in ("self", "kwargs") and v is not None}
deprecated_params = ("peer_id", "user_id", "domain", "chat_id", "user_ids")
deprecated = [k for k in data if k in deprecated_params]
if deprecated:
logger.warning(
"Params like peer_id or user_id is deprecated in Message.answer()."
"Use API.messages.send() instead"
)
for k in deprecated:
data.pop(k)
if message is None:
message = ""
elif not isinstance(message, str):
message = str(message)
stream = StringIO(message)
while True:
msg = stream.read(4096)
if msg:
data["message"] = msg
response = (await self.ctx_api.messages.send(peer_ids=[self.peer_id], **data))[0] # type: ignore
if stream.tell() == len(message or ""):
break
return response
async def reply(
self,
message: Optional[str] = None,
attachment: Optional[str] = None,
**kwargs,
) -> "MessagesSendUserIdsResponseItem":
locals().update(kwargs)
data = {k: v for k, v in locals().items() if k not in ("self", "kwargs") and v is not None}
data["forward"] = MessagesForward(
conversation_message_ids=[self.conversation_message_id], # type: ignore
peer_id=self.peer_id,
is_reply=True,
).json()
return await self.answer(**data)
async def forward(
self,
message: Optional[str] = None,
attachment: Optional[str] = None,
**kwargs,
) -> "MessagesSendUserIdsResponseItem":
locals().update(kwargs)
data = {
k: v
for k, v in locals().items()
if k not in ("self", "kwargs", "forward_message_ids") and v is not None
}
data["forward"] = MessagesForward(
conversation_message_ids=[self.conversation_message_id], peer_id=self.peer_id # type: ignore
).json()
return await self.answer(**data)
BaseMessageMin.update_forward_refs() | vkbottle/tools/dev/mini_types/base/message.py | from abc import ABC, abstractmethod
from io import StringIO
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Union, overload
from pydantic import root_validator
from typing_extensions import Literal
from vkbottle_types.objects import (
AudioAudio,
DocsDoc,
MessagesForward,
MessagesMessage,
PhotosPhoto,
UsersUserFull,
VideoVideo,
WallWallComment,
WallWallpostFull,
)
from vkbottle.dispatch.dispenser.base import StatePeer
from vkbottle.modules import json, logger
if TYPE_CHECKING:
from vkbottle_types.responses.messages import MessagesSendUserIdsResponseItem
from vkbottle.api import ABCAPI, API
from .foreign_message import BaseForeignMessageMin
from .mention import Mention, replace_mention_validator
class BaseMessageMin(MessagesMessage, ABC):
unprepared_ctx_api: Optional[Any] = None
state_peer: Optional["StatePeer"] = None
reply_message: Optional["BaseForeignMessageMin"] = None
fwd_messages: Optional[List["BaseForeignMessageMin"]] = []
replace_mention: Optional[bool] = None
_mention: Optional[Mention] = None
__replace_mention = root_validator(replace_mention_validator, allow_reuse=True, pre=False) # type: ignore
@property
def ctx_api(self) -> Union["ABCAPI", "API"]:
return getattr(self, "unprepared_ctx_api")
@property
def mention(self) -> Optional[Mention]:
"""Returns `Mention` object if message contains mention,
eg if message is `@username text` returns `Mention(id=123, text="text")`,
also mention is automatically removes from message text"""
if not self.replace_mention:
logger.warning(
"labeler.message_view.replace_mention is set to False, the mention will not be processed"
)
return None
return self._mention
@property
@abstractmethod
def is_mentioned(self) -> bool:
"""Returns True if current bot is mentioned in message"""
pass
@overload
async def get_user(self, raw_mode: Literal[False] = ..., **kwargs) -> UsersUserFull:
...
@overload
async def get_user(self, raw_mode: Literal[True] = ..., **kwargs) -> dict:
...
async def get_user(self, raw_mode: bool = False, **kwargs) -> Union[UsersUserFull, dict]:
raw_user = (await self.ctx_api.request("users.get", {"user_ids": self.from_id, **kwargs}))[
"response"
][0]
return raw_user if raw_mode else UsersUserFull(**raw_user)
@property
def chat_id(self) -> int:
return self.peer_id - 2_000_000_000
@property
def message_id(self) -> int:
return self.conversation_message_id or self.id
def get_attachment_strings(self) -> Optional[List[str]]:
if self.attachments is None:
return None
attachments = []
for attachment in self.attachments:
attachment_type = attachment.type.value
attachment_object = getattr(attachment, attachment_type)
if not hasattr(attachment_object, "id") or not hasattr(attachment_object, "owner_id"):
continue
attachment_string = (
f"{attachment_type}{attachment_object.owner_id}_{attachment_object.id}"
)
if attachment_object.access_key:
attachment_string += f"_{attachment_object.access_key}"
attachments.append(attachment_string)
return attachments
def get_wall_attachment(self) -> Optional[List["WallWallpostFull"]]:
if self.attachments is None:
return None
result = [attachment.wall for attachment in self.attachments if attachment.wall]
return result or None
def get_wall_reply_attachment(self) -> Optional[List["WallWallComment"]]:
if self.attachments is None:
return None
result = [
attachment.wall_reply for attachment in self.attachments if attachment.wall_reply
]
return result or None
def get_photo_attachments(self) -> Optional[List["PhotosPhoto"]]:
if self.attachments is None:
return None
return [attachment.photo for attachment in self.attachments if attachment.photo]
def get_video_attachments(self) -> Optional[List["VideoVideo"]]:
if self.attachments is None:
return None
return [attachment.video for attachment in self.attachments if attachment.video]
def get_doc_attachments(self) -> Optional[List["DocsDoc"]]:
if self.attachments is None:
return None
return [attachment.doc for attachment in self.attachments if attachment.doc]
def get_audio_attachments(self) -> Optional[List["AudioAudio"]]:
if self.attachments is None:
return None
return [attachment.audio for attachment in self.attachments if attachment.audio]
def get_message_id(self) -> Optional[int]:
return self.id or self.conversation_message_id
def get_payload_json(
self,
throw_error: bool = False,
unpack_failure: Callable[[str], Union[dict, str]] = lambda payload: payload,
) -> Optional[Union[dict, str]]:
if self.payload is None:
return None
try:
return json.loads(self.payload)
except (ValueError, TypeError) as e:
if throw_error:
raise e from e
return unpack_failure(self.payload)
async def answer(
self,
message: Optional[str] = None,
attachment: Optional[str] = None,
random_id: Optional[int] = 0,
lat: Optional[float] = None,
long: Optional[float] = None,
reply_to: Optional[int] = None,
forward_messages: Optional[List[int]] = None,
forward: Optional[str] = None,
sticker_id: Optional[int] = None,
keyboard: Optional[str] = None,
template: Optional[str] = None,
payload: Optional[str] = None,
content_source: Optional[str] = None,
dont_parse_links: Optional[bool] = None,
disable_mentions: Optional[bool] = None,
intent: Optional[str] = None,
subscribe_id: Optional[int] = None,
**kwargs,
) -> "MessagesSendUserIdsResponseItem":
locals().update(kwargs)
data = {k: v for k, v in locals().items() if k not in ("self", "kwargs") and v is not None}
deprecated_params = ("peer_id", "user_id", "domain", "chat_id", "user_ids")
deprecated = [k for k in data if k in deprecated_params]
if deprecated:
logger.warning(
"Params like peer_id or user_id is deprecated in Message.answer()."
"Use API.messages.send() instead"
)
for k in deprecated:
data.pop(k)
if message is None:
message = ""
elif not isinstance(message, str):
message = str(message)
stream = StringIO(message)
while True:
msg = stream.read(4096)
if msg:
data["message"] = msg
response = (await self.ctx_api.messages.send(peer_ids=[self.peer_id], **data))[0] # type: ignore
if stream.tell() == len(message or ""):
break
return response
async def reply(
self,
message: Optional[str] = None,
attachment: Optional[str] = None,
**kwargs,
) -> "MessagesSendUserIdsResponseItem":
locals().update(kwargs)
data = {k: v for k, v in locals().items() if k not in ("self", "kwargs") and v is not None}
data["forward"] = MessagesForward(
conversation_message_ids=[self.conversation_message_id], # type: ignore
peer_id=self.peer_id,
is_reply=True,
).json()
return await self.answer(**data)
async def forward(
self,
message: Optional[str] = None,
attachment: Optional[str] = None,
**kwargs,
) -> "MessagesSendUserIdsResponseItem":
locals().update(kwargs)
data = {
k: v
for k, v in locals().items()
if k not in ("self", "kwargs", "forward_message_ids") and v is not None
}
data["forward"] = MessagesForward(
conversation_message_ids=[self.conversation_message_id], peer_id=self.peer_id # type: ignore
).json()
return await self.answer(**data)
BaseMessageMin.update_forward_refs() | 0.846006 | 0.12408 |
from __future__ import print_function
import argparse
import codecs
import fnmatch
import os
import sys
import yamale
import yaml
def find_question_files(root_directory):
"""Yield all YAML files recursively."""
for root, _, files in os.walk(root_directory):
for basename in fnmatch.filter(files, "[!_]*.yml"):
yield os.path.join(root, basename)
def get_uid(filename):
with codecs.open(filename, 'r', encoding="utf-8") as f:
doc = yaml.load(f)
return doc["uid"]
def validate(schema_filename, data_filename, seen_uids):
"""Validate a YAML file according to the supplied schema."""
schema = yamale.make_schema(schema_filename)
data = yamale.make_data(data_filename)
try:
print("")
print("Checking file '{}'...".format(data_filename))
yamale.validate(schema, data)
curr_uid = get_uid(data_filename)
if curr_uid in seen_uids:
print("Invalid data: Non-unique UID '{:s}'".format(curr_uid))
return 2
else:
seen_uids.add(curr_uid)
print("Everything ok.")
return 0
except ValueError as err:
print("Invalid data. Yamale says:")
print(err)
print("")
print("Probable error cause:")
print(str(err).splitlines()[-1])
return 1
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='Validate web quiz questions')
parser.add_argument('schema', type=str, help='path to the schema')
parser.add_argument('path', type=str,
help='file or a directory with files to be validated')
args = parser.parse_args()
if os.path.isfile(args.path):
sys.exit(validate(args.schema, args.path, set()))
elif os.path.isdir(args.path):
uids = set()
# Use eager evaluation here, otherwise program exits after
# the first invalid file
exit_codes = [validate(args.schema, d, uids)
for d in find_question_files(args.path)]
if all(ec == 0 for ec in exit_codes):
sys.exit(0)
else:
sys.exit(1)
else:
print("Invalid data filename.")
sys.exit(1) | spec/validate_question.py |
from __future__ import print_function
import argparse
import codecs
import fnmatch
import os
import sys
import yamale
import yaml
def find_question_files(root_directory):
"""Yield all YAML files recursively."""
for root, _, files in os.walk(root_directory):
for basename in fnmatch.filter(files, "[!_]*.yml"):
yield os.path.join(root, basename)
def get_uid(filename):
with codecs.open(filename, 'r', encoding="utf-8") as f:
doc = yaml.load(f)
return doc["uid"]
def validate(schema_filename, data_filename, seen_uids):
"""Validate a YAML file according to the supplied schema."""
schema = yamale.make_schema(schema_filename)
data = yamale.make_data(data_filename)
try:
print("")
print("Checking file '{}'...".format(data_filename))
yamale.validate(schema, data)
curr_uid = get_uid(data_filename)
if curr_uid in seen_uids:
print("Invalid data: Non-unique UID '{:s}'".format(curr_uid))
return 2
else:
seen_uids.add(curr_uid)
print("Everything ok.")
return 0
except ValueError as err:
print("Invalid data. Yamale says:")
print(err)
print("")
print("Probable error cause:")
print(str(err).splitlines()[-1])
return 1
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='Validate web quiz questions')
parser.add_argument('schema', type=str, help='path to the schema')
parser.add_argument('path', type=str,
help='file or a directory with files to be validated')
args = parser.parse_args()
if os.path.isfile(args.path):
sys.exit(validate(args.schema, args.path, set()))
elif os.path.isdir(args.path):
uids = set()
# Use eager evaluation here, otherwise program exits after
# the first invalid file
exit_codes = [validate(args.schema, d, uids)
for d in find_question_files(args.path)]
if all(ec == 0 for ec in exit_codes):
sys.exit(0)
else:
sys.exit(1)
else:
print("Invalid data filename.")
sys.exit(1) | 0.434941 | 0.179981 |
from collections import namedtuple
import numpy as np
from roifile import ImagejRoi
from skimage.draw import polygon, polygon_perimeter
from tifffile import TiffFile, TiffWriter
from . import REGION_BACKGROUND, REGION_BORDER, REGION_FOREGROUND
TiffWriter = TiffWriter
TiffInfo = namedtuple('TiffInfo', 'pages, w, h, c, dtype')
def _inner_tiff_peek(tiff):
first_page = tiff.pages[0]
imagej_metadata = tiff.imagej_metadata
if imagej_metadata:
try:
count = imagej_metadata['images']
except KeyError:
count = 1
try:
if imagej_metadata['frames'] < count:
count = imagej_metadata['frames']
except KeyError:
pass
else:
count = 1
return TiffInfo(
pages=count,
h=first_page.imagelength,
w=first_page.imagewidth,
c=1,
dtype=first_page.dtype,
)
def tiff_peek(file_name_or_tiff):
"""
Fetch some information about a TIFF file and returns a TiffInfo named tuple.
:param file_name_or_tiff:
:return:
"""
if isinstance(file_name_or_tiff, TiffFile):
return _inner_tiff_peek(file_name_or_tiff)
else:
file_name = file_name_or_tiff
if isinstance(file_name_or_tiff, bytes): # why?!
file_name = file_name.decode('utf8')
with TiffFile(file_name) as tiff:
return _inner_tiff_peek(tiff)
def guess_frame_identifier(all_overlays):
"""
Guess the right attribute which identifies the frame.
ImageJ ROIs can store the (temporal) frame number in different
attributes depending on the (hyper)stack type.
:param all_overlays: List of overlays
:return: 't_position' or 'position'
"""
return (
't_position'
if (np.array([overlay.position for overlay in all_overlays]) == 0).all()
else 'position'
)
def _get_overlays(all_overlays):
overlays = {}
if not isinstance(all_overlays, list):
all_overlays = [all_overlays]
all_overlays = [ImagejRoi.frombytes(overlay) for overlay in all_overlays]
frame_identifier = guess_frame_identifier(all_overlays)
for overlay in all_overlays:
frame_number = getattr(overlay, frame_identifier)
if frame_number not in overlays:
overlays[frame_number] = []
overlays[frame_number].append(overlay)
return overlays
def tiff_to_array(tiff):
"""
Open a TIFF file as an array, normalizing the dimensions.
:param tiff: Filename
:return:
"""
array = (
tiff.asarray(out='memmap') if tiff.pages[0].is_memmappable else tiff.asarray()
)
if array.ndim < 3:
array = array[np.newaxis, ...]
return array
def tiff_masks(
file_name,
background=REGION_BACKGROUND,
foreground=REGION_FOREGROUND,
border=REGION_BORDER,
skip_empty=False,
):
"""
Read a TIFF file with ImageJ ROIs, generate and yield tuples of image and mask.
:param file_name:
:param background:
:param foreground:
:param border:
:param skip_empty:
:return:
"""
if isinstance(file_name, bytes): # why?!
file_name = file_name.decode('utf8')
with TiffFile(file_name) as tiff:
tiff_info = tiff_peek(tiff)
count = tiff_info.pages
array = tiff_to_array(tiff)
if tiff.imagej_metadata:
overlays = _get_overlays(tiff.imagej_metadata['Overlays'])
else:
overlays = {}
buffer_prototype = np.empty((tiff_info.h, tiff_info.w), dtype=np.uint8)
buffer_prototype.fill(background)
for num in range(count):
buffer = buffer_prototype.copy()
overlay_num = num + 1
if overlay_num == 1 and count == 1:
if 0 in overlays:
overlay_num = 0 # weird corner case?
if 1 in overlays:
overlay_num = 1
if overlay_num not in overlays and skip_empty:
continue
if overlay_num in overlays:
draw_overlays(
overlays[overlay_num], buffer, foreground=foreground, border=border
)
yield array[num], buffer
def draw_overlays(overlays, buffer, foreground=REGION_FOREGROUND, border=REGION_BORDER):
"""
Draws overlays onto a pre-allocated buffer.
:param overlays: Iterable of overlays
:param buffer: Buffer to draw unto
:param foreground: Foreground value to use
:param border: Border value to use
:return:
"""
foregrounds = np.zeros(buffer.shape, dtype=bool)
borders = np.zeros(buffer.shape, dtype=bool)
for overlay in overlays:
if overlay.name.startswith('TrackMate'):
continue
xy = overlay.coordinates()
xy = xy[:, ::-1]
if len(xy) < 3:
continue
rr, cc = polygon(xy[:, 0], xy[:, 1], shape=buffer.shape)
foregrounds[rr, cc] = True
rr, cc = polygon_perimeter(xy[:, 0], xy[:, 1], shape=buffer.shape)
borders[rr, cc] = True
buffer[foregrounds] = foreground
buffer[borders] = border | junn/io/tiffmasks.py | from collections import namedtuple
import numpy as np
from roifile import ImagejRoi
from skimage.draw import polygon, polygon_perimeter
from tifffile import TiffFile, TiffWriter
from . import REGION_BACKGROUND, REGION_BORDER, REGION_FOREGROUND
TiffWriter = TiffWriter
TiffInfo = namedtuple('TiffInfo', 'pages, w, h, c, dtype')
def _inner_tiff_peek(tiff):
first_page = tiff.pages[0]
imagej_metadata = tiff.imagej_metadata
if imagej_metadata:
try:
count = imagej_metadata['images']
except KeyError:
count = 1
try:
if imagej_metadata['frames'] < count:
count = imagej_metadata['frames']
except KeyError:
pass
else:
count = 1
return TiffInfo(
pages=count,
h=first_page.imagelength,
w=first_page.imagewidth,
c=1,
dtype=first_page.dtype,
)
def tiff_peek(file_name_or_tiff):
"""
Fetch some information about a TIFF file and returns a TiffInfo named tuple.
:param file_name_or_tiff:
:return:
"""
if isinstance(file_name_or_tiff, TiffFile):
return _inner_tiff_peek(file_name_or_tiff)
else:
file_name = file_name_or_tiff
if isinstance(file_name_or_tiff, bytes): # why?!
file_name = file_name.decode('utf8')
with TiffFile(file_name) as tiff:
return _inner_tiff_peek(tiff)
def guess_frame_identifier(all_overlays):
"""
Guess the right attribute which identifies the frame.
ImageJ ROIs can store the (temporal) frame number in different
attributes depending on the (hyper)stack type.
:param all_overlays: List of overlays
:return: 't_position' or 'position'
"""
return (
't_position'
if (np.array([overlay.position for overlay in all_overlays]) == 0).all()
else 'position'
)
def _get_overlays(all_overlays):
overlays = {}
if not isinstance(all_overlays, list):
all_overlays = [all_overlays]
all_overlays = [ImagejRoi.frombytes(overlay) for overlay in all_overlays]
frame_identifier = guess_frame_identifier(all_overlays)
for overlay in all_overlays:
frame_number = getattr(overlay, frame_identifier)
if frame_number not in overlays:
overlays[frame_number] = []
overlays[frame_number].append(overlay)
return overlays
def tiff_to_array(tiff):
"""
Open a TIFF file as an array, normalizing the dimensions.
:param tiff: Filename
:return:
"""
array = (
tiff.asarray(out='memmap') if tiff.pages[0].is_memmappable else tiff.asarray()
)
if array.ndim < 3:
array = array[np.newaxis, ...]
return array
def tiff_masks(
file_name,
background=REGION_BACKGROUND,
foreground=REGION_FOREGROUND,
border=REGION_BORDER,
skip_empty=False,
):
"""
Read a TIFF file with ImageJ ROIs, generate and yield tuples of image and mask.
:param file_name:
:param background:
:param foreground:
:param border:
:param skip_empty:
:return:
"""
if isinstance(file_name, bytes): # why?!
file_name = file_name.decode('utf8')
with TiffFile(file_name) as tiff:
tiff_info = tiff_peek(tiff)
count = tiff_info.pages
array = tiff_to_array(tiff)
if tiff.imagej_metadata:
overlays = _get_overlays(tiff.imagej_metadata['Overlays'])
else:
overlays = {}
buffer_prototype = np.empty((tiff_info.h, tiff_info.w), dtype=np.uint8)
buffer_prototype.fill(background)
for num in range(count):
buffer = buffer_prototype.copy()
overlay_num = num + 1
if overlay_num == 1 and count == 1:
if 0 in overlays:
overlay_num = 0 # weird corner case?
if 1 in overlays:
overlay_num = 1
if overlay_num not in overlays and skip_empty:
continue
if overlay_num in overlays:
draw_overlays(
overlays[overlay_num], buffer, foreground=foreground, border=border
)
yield array[num], buffer
def draw_overlays(overlays, buffer, foreground=REGION_FOREGROUND, border=REGION_BORDER):
"""
Draws overlays onto a pre-allocated buffer.
:param overlays: Iterable of overlays
:param buffer: Buffer to draw unto
:param foreground: Foreground value to use
:param border: Border value to use
:return:
"""
foregrounds = np.zeros(buffer.shape, dtype=bool)
borders = np.zeros(buffer.shape, dtype=bool)
for overlay in overlays:
if overlay.name.startswith('TrackMate'):
continue
xy = overlay.coordinates()
xy = xy[:, ::-1]
if len(xy) < 3:
continue
rr, cc = polygon(xy[:, 0], xy[:, 1], shape=buffer.shape)
foregrounds[rr, cc] = True
rr, cc = polygon_perimeter(xy[:, 0], xy[:, 1], shape=buffer.shape)
borders[rr, cc] = True
buffer[foregrounds] = foreground
buffer[borders] = border | 0.70202 | 0.336331 |
import sys, os, xml.sax, re
from xml.dom.minidom import parse, parseString, getDOMImplementation
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
SCRIPT_NAME = os.path.splitext(os.path.split(__file__)[1])[0]
DESCRIPTION = 'Merge two idlak output files to have matching initial / end breaks'
FRAMESHIFT=0.005
# Add to path
sys.path = sys.path + [SCRIPT_DIR + '/../modules']
logopts = {'logging':{
'nolog':"False",
'logdir':".",
'logname':'idlak_util',
'loglevel':"Debug",
'logtofile':"False",
'logtostderr':"True"}
}
from alignsetup_def import saxhandler as idlak_saxhandler
from build_configuration import Logger
# TODO: Should be rewritten to use a sax parser as DOM takes a massive amount of memory
# (about 8Gb for 30Mo label files)
def merge_breaks(input_fname, input_fname2, output_fname):
# Input
dom = parse(input_fname)
break_dict = {}
spurts = dom.getElementsByTagName('file_id')
for spt in spurts:
sid = spt.getAttribute("id")
pauses = spt.getElementsByTagName('break')
ipause, epause = pauses[0], pauses[-1]
ipause_type = int(ipause.getAttribute("type"))
ipause_time = float(ipause.getAttribute("time"))
epause_type = int(epause.getAttribute("type"))
epause_time = float(epause.getAttribute("time"))
break_dict[sid] = [(ipause_type, ipause_time), (epause_type, epause_time)]
dom2 = parse(input_fname2)
spurts = dom2.getElementsByTagName('file_id')
for spt in spurts:
sid = spt.getAttribute("id")
pauses = spt.getElementsByTagName('break')
ipause, epause = pauses[0], pauses[-1]
tipause, tepause = break_dict[sid]
ipause.setAttribute("type", tipause[0])
epause.setAttribute("type", tepause[0])
fp = open(output_fname, 'w')
fp.write(dom2.toxml())
def main():
from optparse import OptionParser
usage="usage: %prog [options] text_norm.xml text_anorm.xml text_anorm_merged.xml\n" \
"Merge two idlak norm files to have same initial and end break types."
parser = OptionParser(usage=usage)
opts, args = parser.parse_args()
if len(args) == 3:
merge_breaks(args[0], args[1], args[2])
else:
parser.error('Mandatory arguments missing or excessive number of arguments')
if __name__ == '__main__':
main() | idlak-egs/tts_tangle_arctic/s2/local/merge_breaks.py | import sys, os, xml.sax, re
from xml.dom.minidom import parse, parseString, getDOMImplementation
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
SCRIPT_NAME = os.path.splitext(os.path.split(__file__)[1])[0]
DESCRIPTION = 'Merge two idlak output files to have matching initial / end breaks'
FRAMESHIFT=0.005
# Add to path
sys.path = sys.path + [SCRIPT_DIR + '/../modules']
logopts = {'logging':{
'nolog':"False",
'logdir':".",
'logname':'idlak_util',
'loglevel':"Debug",
'logtofile':"False",
'logtostderr':"True"}
}
from alignsetup_def import saxhandler as idlak_saxhandler
from build_configuration import Logger
# TODO: Should be rewritten to use a sax parser as DOM takes a massive amount of memory
# (about 8Gb for 30Mo label files)
def merge_breaks(input_fname, input_fname2, output_fname):
# Input
dom = parse(input_fname)
break_dict = {}
spurts = dom.getElementsByTagName('file_id')
for spt in spurts:
sid = spt.getAttribute("id")
pauses = spt.getElementsByTagName('break')
ipause, epause = pauses[0], pauses[-1]
ipause_type = int(ipause.getAttribute("type"))
ipause_time = float(ipause.getAttribute("time"))
epause_type = int(epause.getAttribute("type"))
epause_time = float(epause.getAttribute("time"))
break_dict[sid] = [(ipause_type, ipause_time), (epause_type, epause_time)]
dom2 = parse(input_fname2)
spurts = dom2.getElementsByTagName('file_id')
for spt in spurts:
sid = spt.getAttribute("id")
pauses = spt.getElementsByTagName('break')
ipause, epause = pauses[0], pauses[-1]
tipause, tepause = break_dict[sid]
ipause.setAttribute("type", tipause[0])
epause.setAttribute("type", tepause[0])
fp = open(output_fname, 'w')
fp.write(dom2.toxml())
def main():
from optparse import OptionParser
usage="usage: %prog [options] text_norm.xml text_anorm.xml text_anorm_merged.xml\n" \
"Merge two idlak norm files to have same initial and end break types."
parser = OptionParser(usage=usage)
opts, args = parser.parse_args()
if len(args) == 3:
merge_breaks(args[0], args[1], args[2])
else:
parser.error('Mandatory arguments missing or excessive number of arguments')
if __name__ == '__main__':
main() | 0.126326 | 0.088939 |
from __future__ import absolute_import
import os
import vcr
import unittest
from hatchbuck.api import HatchbuckAPI, HatchbuckAPIAuthenticationError
class TestSearchContacts(unittest.TestCase):
def setUp(self):
# Fake key can be used with existing cassettes
self.test_api_key = os.environ.get("HATCHBUCK_API_KEY", "ABC123")
@vcr.use_cassette(
'tests/fixtures/cassettes/test_search_by_email_with_results.yml',
filter_query_parameters=['api_key']
)
def test_search_by_email_with_results(self):
hatchbuck = HatchbuckAPI(self.test_api_key)
contacts = hatchbuck.search_contacts(emails=["<EMAIL>"])
self.assertEqual(len(contacts), 1)
self.assertEqual(contacts[0].firstName, "Jack")
self.assertEqual(contacts[0].lastName, "Spratt")
self.assertEqual(contacts[0].salesRep.username, "jakesen")
self.assertEqual(contacts[0].status.name, "Lead")
self.assertEqual(contacts[0].emails[0].address, "<EMAIL>")
self.assertEqual(contacts[0].emails[0].type, "Work")
self.assertEqual(contacts[0].subscribed, True)
self.assertEqual(contacts[0].timezone, "Central Standard Time")
@vcr.use_cassette(
'tests/fixtures/cassettes/test_search_by_email_with_no_results.yml',
filter_query_parameters=['api_key']
)
def test_search_by_email_with_no_results(self):
hatchbuck = HatchbuckAPI(self.test_api_key)
contacts = hatchbuck.search_contacts(emails=["<EMAIL>"])
self.assertEqual(contacts, None)
@vcr.use_cassette(
'tests/fixtures/cassettes/test_search_by_contact_id_with_results.yml',
filter_query_parameters=['api_key']
)
def test_search_by_contact_id_with_results(self):
hatchbuck = HatchbuckAPI(self.test_api_key)
contact_id = "NlNfOTJrVFFtd0E4NVhXWGdmSy0ySVdBSHhpZ01hS1NCSFFxMVBTTmlKVTE1"
contacts = hatchbuck.search_contacts(contactId=contact_id)
self.assertEqual(len(contacts), 1)
self.assertEqual(contacts[0].contactId, contact_id)
self.assertEqual(contacts[0].firstName, "Jack")
self.assertEqual(contacts[0].lastName, "Spratt")
@vcr.use_cassette(
'tests/fixtures/cassettes/test_search_by_name_with_results.yml',
filter_query_parameters=['api_key']
)
def test_search_by_name_with_results(self):
hatchbuck = HatchbuckAPI(self.test_api_key)
contacts = hatchbuck.search_contacts(
firstName="Jack",
lastName="Spratt"
)
self.assertEqual(len(contacts), 1)
self.assertEqual(contacts[0].firstName, "Jack")
self.assertEqual(contacts[0].lastName, "Spratt")
@vcr.use_cassette(
'tests/fixtures/cassettes/test_invalid_api_key_raises_exception.yml',
filter_query_parameters=['api_key']
)
def test_invalid_api_key_raises_exception(self):
hatchbuck = HatchbuckAPI("ABC123")
self.assertRaises(
HatchbuckAPIAuthenticationError,
hatchbuck.search_contacts,
emails=["<EMAIL>"]
)
if __name__ == '__main__':
unittest.main() | tests/test_search_contacts.py | from __future__ import absolute_import
import os
import vcr
import unittest
from hatchbuck.api import HatchbuckAPI, HatchbuckAPIAuthenticationError
class TestSearchContacts(unittest.TestCase):
def setUp(self):
# Fake key can be used with existing cassettes
self.test_api_key = os.environ.get("HATCHBUCK_API_KEY", "ABC123")
@vcr.use_cassette(
'tests/fixtures/cassettes/test_search_by_email_with_results.yml',
filter_query_parameters=['api_key']
)
def test_search_by_email_with_results(self):
hatchbuck = HatchbuckAPI(self.test_api_key)
contacts = hatchbuck.search_contacts(emails=["<EMAIL>"])
self.assertEqual(len(contacts), 1)
self.assertEqual(contacts[0].firstName, "Jack")
self.assertEqual(contacts[0].lastName, "Spratt")
self.assertEqual(contacts[0].salesRep.username, "jakesen")
self.assertEqual(contacts[0].status.name, "Lead")
self.assertEqual(contacts[0].emails[0].address, "<EMAIL>")
self.assertEqual(contacts[0].emails[0].type, "Work")
self.assertEqual(contacts[0].subscribed, True)
self.assertEqual(contacts[0].timezone, "Central Standard Time")
@vcr.use_cassette(
'tests/fixtures/cassettes/test_search_by_email_with_no_results.yml',
filter_query_parameters=['api_key']
)
def test_search_by_email_with_no_results(self):
hatchbuck = HatchbuckAPI(self.test_api_key)
contacts = hatchbuck.search_contacts(emails=["<EMAIL>"])
self.assertEqual(contacts, None)
@vcr.use_cassette(
'tests/fixtures/cassettes/test_search_by_contact_id_with_results.yml',
filter_query_parameters=['api_key']
)
def test_search_by_contact_id_with_results(self):
hatchbuck = HatchbuckAPI(self.test_api_key)
contact_id = "NlNfOTJrVFFtd0E4NVhXWGdmSy0ySVdBSHhpZ01hS1NCSFFxMVBTTmlKVTE1"
contacts = hatchbuck.search_contacts(contactId=contact_id)
self.assertEqual(len(contacts), 1)
self.assertEqual(contacts[0].contactId, contact_id)
self.assertEqual(contacts[0].firstName, "Jack")
self.assertEqual(contacts[0].lastName, "Spratt")
@vcr.use_cassette(
'tests/fixtures/cassettes/test_search_by_name_with_results.yml',
filter_query_parameters=['api_key']
)
def test_search_by_name_with_results(self):
hatchbuck = HatchbuckAPI(self.test_api_key)
contacts = hatchbuck.search_contacts(
firstName="Jack",
lastName="Spratt"
)
self.assertEqual(len(contacts), 1)
self.assertEqual(contacts[0].firstName, "Jack")
self.assertEqual(contacts[0].lastName, "Spratt")
@vcr.use_cassette(
'tests/fixtures/cassettes/test_invalid_api_key_raises_exception.yml',
filter_query_parameters=['api_key']
)
def test_invalid_api_key_raises_exception(self):
hatchbuck = HatchbuckAPI("ABC123")
self.assertRaises(
HatchbuckAPIAuthenticationError,
hatchbuck.search_contacts,
emails=["<EMAIL>"]
)
if __name__ == '__main__':
unittest.main() | 0.500732 | 0.132318 |
import gzip
import json
from typing import cast
from unittest.mock import Mock
import pytest
from pytest_wdl.config import UserConfiguration
from pytest_wdl.core import (
DefaultDataFile, DataDirs, DataManager, DataResolver, create_data_file
)
from pytest_wdl.localizers import LinkLocalizer, UrlLocalizer
from pytest_wdl.utils import tempdir
from . import GOOD_URL, setenv
def test_data_file():
with tempdir() as d:
foo = d / "foo.txt"
with pytest.raises(ValueError):
DefaultDataFile(foo, None)
bar = d / "bar.txt"
with open(foo, "wt") as out:
out.write("foo\nbar")
df = DefaultDataFile(bar, LinkLocalizer(foo))
assert str(df) == str(bar)
baz = d / "baz.txt"
with open(baz, "wt") as out:
out.write("foo\nbar")
df.assert_contents_equal(baz)
df.assert_contents_equal(str(baz))
df.assert_contents_equal(DefaultDataFile(baz))
blorf = d / "blorf.txt"
with open(blorf, "wt") as out:
out.write("foo\nblorf\nbork")
with pytest.raises(AssertionError):
df.assert_contents_equal(blorf)
df.compare_opts["allowed_diff_lines"] = 1
with pytest.raises(AssertionError):
df.assert_contents_equal(blorf)
df.compare_opts["allowed_diff_lines"] = 2
df.assert_contents_equal(blorf)
def test_data_file_gz():
with tempdir() as d:
foo = d / "foo.txt.gz"
with gzip.open(foo, "wt") as out:
out.write("foo\nbar")
df = DefaultDataFile(foo, allowed_diff_lines=0)
# Compare identical files
bar = d / "bar.txt.gz"
with gzip.open(bar, "wt") as out:
out.write("foo\nbar")
df.assert_contents_equal(bar)
df.assert_contents_equal(str(bar))
df.assert_contents_equal(DefaultDataFile(bar))
# Compare differing files
df.set_compare_opts(allowed_diff_lines=1)
baz = d / "baz.txt.gz"
with gzip.open(baz, "wt") as out:
out.write("foo\nbaz")
df.assert_contents_equal(bar)
df.assert_contents_equal(str(bar))
df.assert_contents_equal(DefaultDataFile(bar))
def test_data_file_dict_type():
with tempdir() as d:
foo = d / "foo.txt.gz"
with gzip.open(foo, "wt") as out:
out.write("foo\nbar")
df = create_data_file(
user_config=UserConfiguration(),
path=foo,
type={
"name": "default",
"allowed_diff_lines": 1
}
)
bar = d / "bar.txt.gz"
with gzip.open(bar, "wt") as out:
out.write("foo\nbaz")
df.assert_contents_equal(bar)
df.assert_contents_equal(str(bar))
df.assert_contents_equal(DefaultDataFile(bar))
def test_data_file_class():
dd = DataResolver(data_descriptors={
"foo": {
"class": "bar",
"value": 1
}
}, user_config=UserConfiguration())
assert dd.resolve("foo") == 1
def test_data_file_json_contents():
with tempdir() as d:
foo = d / "foo.json"
df = create_data_file(
user_config=UserConfiguration(),
path=foo,
contents={
"a": 1,
"b": "foo"
}
)
with open(df.path, "rt") as inp:
assert json.load(inp) == {
"a": 1,
"b": "foo"
}
def test_data_dirs():
with tempdir() as d:
mod = Mock()
mod.__name__ = "foo.bar"
cls = Mock()
cls.__name__ = "baz"
fun = Mock()
fun.__name__ = "blorf"
mod_cls_fun = d / "foo" / "bar" / "baz" / "blorf"
mod_cls_fun.mkdir(parents=True)
data_mod_cls_fun = d / "data" / "foo" / "bar" / "baz" / "blorf"
data_mod_cls_fun.mkdir(parents=True)
with pytest.raises(RuntimeError):
DataDirs(d, mod, fun, cls)
dd = DataDirs(d / "foo", mod, fun, cls)
assert dd.paths == [
mod_cls_fun,
d / "foo" / "bar" / "baz",
d / "foo" / "bar",
data_mod_cls_fun,
d / "data" / "foo" / "bar" / "baz",
d / "data" / "foo" / "bar",
d / "data"
]
mod_cls_fun = d / "foo" / "bar" / "blorf"
mod_cls_fun.mkdir(parents=True)
data_mod_cls_fun = d / "data" / "foo" / "bar" / "blorf"
data_mod_cls_fun.mkdir(parents=True)
dd = DataDirs(d / "foo", mod, fun)
assert dd.paths == [
mod_cls_fun,
d / "foo" / "bar",
data_mod_cls_fun,
d / "data" / "foo" / "bar",
d / "data"
]
def test_data_resolver():
with tempdir() as d:
test_data = {
"foo": {
"name": "foo.txt"
},
"bar": 1
}
foo_txt = d / "data" / "foo.txt"
foo_txt.parent.mkdir()
with open(foo_txt, "wt") as out:
out.write("bar")
mod = Mock()
mod.__name__ = ""
fun = Mock()
fun.__name__ = "test_foo"
dd = DataDirs(d, mod, fun)
resolver = DataResolver(test_data, UserConfiguration(None, cache_dir=d))
with pytest.raises(FileNotFoundError):
resolver.resolve("bork", dd)
assert resolver.resolve("foo", dd).path == foo_txt
assert resolver.resolve("bar", dd) == 1
def test_data_resolver_env():
with tempdir() as d:
path = d / "foo.txt"
with open(path, "wt") as out:
out.write("foo")
with setenv({"FOO": str(path)}):
resolver = DataResolver({
"foo": {
"env": "FOO"
}
}, UserConfiguration(None, cache_dir=d))
assert resolver.resolve("foo").path == path
bar = d / "bar.txt"
resolver = DataResolver({
"foo": {
"env": "FOO",
"path": bar
}
}, UserConfiguration(None, cache_dir=d))
assert resolver.resolve("foo").path == bar
def test_data_resolver_local_path():
with tempdir() as d:
path = d / "foo.txt"
with open(path, "wt") as out:
out.write("foo")
resolver = DataResolver({
"foo": {
"path": "foo.txt"
}
}, UserConfiguration(None, cache_dir=d))
assert resolver.resolve("foo").path == path
with setenv({"MYPATH": str(d)}):
resolver = DataResolver({
"foo": {
"path": "${MYPATH}/foo.txt"
}
}, UserConfiguration(None, cache_dir=d))
assert resolver.resolve("foo").path == path
def test_data_resolver_create_from_contents():
with tempdir() as d:
resolver = DataResolver({
"foo": {
"path": "dir1/dir2/foo.txt",
"contents": "foo"
}
}, UserConfiguration(None, cache_dir=d))
parent = d / "dir1" / "dir2"
foo = resolver.resolve("foo")
assert foo.path == parent / "foo.txt"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
with tempdir() as d:
resolver = DataResolver({
"foo": {
"name": "foo.txt",
"contents": "foo"
}
}, UserConfiguration(None, cache_dir=d))
foo = resolver.resolve("foo")
assert foo.path == d / "foo.txt"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
with tempdir() as d:
resolver = DataResolver({
"foo": {
"contents": "foo"
}
}, UserConfiguration(None, cache_dir=d))
foo = resolver.resolve("foo")
assert foo.path.parent == d
assert foo.path.exists()
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
def test_data_resolver_create_from_url():
with tempdir() as d:
resolver = DataResolver({
"foo": {
"url": GOOD_URL,
"path": "dir1/dir2/sample.vcf"
}
}, UserConfiguration(None, cache_dir=d))
foo = resolver.resolve("foo")
assert foo.path == d / "dir1" / "dir2" / "sample.vcf"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
with tempdir() as d:
resolver = DataResolver({
"foo": {
"url": GOOD_URL,
"name": "sample.vcf"
}
}, UserConfiguration(None, cache_dir=d))
foo = resolver.resolve("foo")
assert foo.path == d / "sample.vcf"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
with tempdir() as d:
resolver = DataResolver({
"foo": {
"url": GOOD_URL
}
}, UserConfiguration(None, cache_dir=d))
foo = resolver.resolve("foo")
assert foo.path == d / "test_file"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
def test_data_resolver_create_from_datadir():
with tempdir() as d, tempdir() as d1:
mod = Mock()
mod.__name__ = "foo.bar"
cls = Mock()
cls.__name__ = "baz"
fun = Mock()
fun.__name__ = "blorf"
mod_cls_fun = d / "foo" / "bar" / "baz" / "blorf"
mod_cls_fun.mkdir(parents=True)
data_mod_cls_fun = d / "data" / "foo" / "bar" / "baz" / "blorf"
data_mod_cls_fun.mkdir(parents=True)
dd = DataDirs(d / "foo", mod, fun, cls)
resolver = DataResolver({
"boink": {
"name": "boink.txt",
},
"bobble": {
"name": "bobble.txt"
},
"burp": {
"name": "burp.txt",
"path": "burp.txt"
}
}, UserConfiguration(None, cache_dir=d1))
boink = d / "foo" / "bar" / "boink.txt"
with open(boink, "wt") as out:
out.write("boink")
assert boink == resolver.resolve("boink", dd).path
with pytest.raises(FileNotFoundError):
resolver.resolve("bobble", dd)
burp = d / "foo" / "bar" / "burp.txt"
with open(burp, "wt") as out:
out.write("burp")
burp_resolved = resolver.resolve("burp", dd).path
assert burp_resolved == d1 / "burp.txt"
assert burp_resolved.is_symlink()
with pytest.raises(FileNotFoundError):
resolver.resolve("bobble")
def test_data_manager():
dm = DataManager(
data_resolver=DataResolver(
{
"foo": {
"class": "x",
"value": 1
},
"bar": {
"class": "x",
"value": 2
}
}, UserConfiguration()
),
datadirs=None
)
assert [1, 2] == dm.get_list("foo", "bar")
assert {"foo": 1, "bork": 2} == dm.get_dict("foo", bork="bar")
def test_http_header_set_in_workflow_data():
"""
Test that workflow data file can define the HTTP Headers. This is
important because the URLs referenced can be from different hosts and
require different headers, so setting them at this level allows that
fine-grained control.
"""
with tempdir() as d:
config = UserConfiguration(cache_dir=d)
assert not config.default_http_headers
resolver = DataResolver({
"foo": {
"url": GOOD_URL,
"path": "sample.vcf",
"http_headers": {
"Auth-Header-Token": "TOKEN"
}
}
}, config)
foo = resolver.resolve("foo")
assert foo.path == d / "sample.vcf"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
with setenv({"TOKEN": "this_is_the_token"}), tempdir() as d:
config = UserConfiguration(cache_dir=d)
assert not config.default_http_headers
resolver = DataResolver({
"foo": {
"url": GOOD_URL,
"path": "sample.vcf",
"http_headers": {
"Auth-Header-Token": "TOKEN"
}
}
}, config)
foo = resolver.resolve("foo")
assert foo.path == d / "sample.vcf"
assert isinstance(foo.localizer, UrlLocalizer)
assert cast(UrlLocalizer, foo.localizer).http_headers == {
"Auth-Header-Token": "<PASSWORD>"
}
with open(foo.path, "rt") as inp:
assert inp.read() == "foo" | tests/test_core.py |
import gzip
import json
from typing import cast
from unittest.mock import Mock
import pytest
from pytest_wdl.config import UserConfiguration
from pytest_wdl.core import (
DefaultDataFile, DataDirs, DataManager, DataResolver, create_data_file
)
from pytest_wdl.localizers import LinkLocalizer, UrlLocalizer
from pytest_wdl.utils import tempdir
from . import GOOD_URL, setenv
def test_data_file():
with tempdir() as d:
foo = d / "foo.txt"
with pytest.raises(ValueError):
DefaultDataFile(foo, None)
bar = d / "bar.txt"
with open(foo, "wt") as out:
out.write("foo\nbar")
df = DefaultDataFile(bar, LinkLocalizer(foo))
assert str(df) == str(bar)
baz = d / "baz.txt"
with open(baz, "wt") as out:
out.write("foo\nbar")
df.assert_contents_equal(baz)
df.assert_contents_equal(str(baz))
df.assert_contents_equal(DefaultDataFile(baz))
blorf = d / "blorf.txt"
with open(blorf, "wt") as out:
out.write("foo\nblorf\nbork")
with pytest.raises(AssertionError):
df.assert_contents_equal(blorf)
df.compare_opts["allowed_diff_lines"] = 1
with pytest.raises(AssertionError):
df.assert_contents_equal(blorf)
df.compare_opts["allowed_diff_lines"] = 2
df.assert_contents_equal(blorf)
def test_data_file_gz():
with tempdir() as d:
foo = d / "foo.txt.gz"
with gzip.open(foo, "wt") as out:
out.write("foo\nbar")
df = DefaultDataFile(foo, allowed_diff_lines=0)
# Compare identical files
bar = d / "bar.txt.gz"
with gzip.open(bar, "wt") as out:
out.write("foo\nbar")
df.assert_contents_equal(bar)
df.assert_contents_equal(str(bar))
df.assert_contents_equal(DefaultDataFile(bar))
# Compare differing files
df.set_compare_opts(allowed_diff_lines=1)
baz = d / "baz.txt.gz"
with gzip.open(baz, "wt") as out:
out.write("foo\nbaz")
df.assert_contents_equal(bar)
df.assert_contents_equal(str(bar))
df.assert_contents_equal(DefaultDataFile(bar))
def test_data_file_dict_type():
with tempdir() as d:
foo = d / "foo.txt.gz"
with gzip.open(foo, "wt") as out:
out.write("foo\nbar")
df = create_data_file(
user_config=UserConfiguration(),
path=foo,
type={
"name": "default",
"allowed_diff_lines": 1
}
)
bar = d / "bar.txt.gz"
with gzip.open(bar, "wt") as out:
out.write("foo\nbaz")
df.assert_contents_equal(bar)
df.assert_contents_equal(str(bar))
df.assert_contents_equal(DefaultDataFile(bar))
def test_data_file_class():
dd = DataResolver(data_descriptors={
"foo": {
"class": "bar",
"value": 1
}
}, user_config=UserConfiguration())
assert dd.resolve("foo") == 1
def test_data_file_json_contents():
with tempdir() as d:
foo = d / "foo.json"
df = create_data_file(
user_config=UserConfiguration(),
path=foo,
contents={
"a": 1,
"b": "foo"
}
)
with open(df.path, "rt") as inp:
assert json.load(inp) == {
"a": 1,
"b": "foo"
}
def test_data_dirs():
with tempdir() as d:
mod = Mock()
mod.__name__ = "foo.bar"
cls = Mock()
cls.__name__ = "baz"
fun = Mock()
fun.__name__ = "blorf"
mod_cls_fun = d / "foo" / "bar" / "baz" / "blorf"
mod_cls_fun.mkdir(parents=True)
data_mod_cls_fun = d / "data" / "foo" / "bar" / "baz" / "blorf"
data_mod_cls_fun.mkdir(parents=True)
with pytest.raises(RuntimeError):
DataDirs(d, mod, fun, cls)
dd = DataDirs(d / "foo", mod, fun, cls)
assert dd.paths == [
mod_cls_fun,
d / "foo" / "bar" / "baz",
d / "foo" / "bar",
data_mod_cls_fun,
d / "data" / "foo" / "bar" / "baz",
d / "data" / "foo" / "bar",
d / "data"
]
mod_cls_fun = d / "foo" / "bar" / "blorf"
mod_cls_fun.mkdir(parents=True)
data_mod_cls_fun = d / "data" / "foo" / "bar" / "blorf"
data_mod_cls_fun.mkdir(parents=True)
dd = DataDirs(d / "foo", mod, fun)
assert dd.paths == [
mod_cls_fun,
d / "foo" / "bar",
data_mod_cls_fun,
d / "data" / "foo" / "bar",
d / "data"
]
def test_data_resolver():
with tempdir() as d:
test_data = {
"foo": {
"name": "foo.txt"
},
"bar": 1
}
foo_txt = d / "data" / "foo.txt"
foo_txt.parent.mkdir()
with open(foo_txt, "wt") as out:
out.write("bar")
mod = Mock()
mod.__name__ = ""
fun = Mock()
fun.__name__ = "test_foo"
dd = DataDirs(d, mod, fun)
resolver = DataResolver(test_data, UserConfiguration(None, cache_dir=d))
with pytest.raises(FileNotFoundError):
resolver.resolve("bork", dd)
assert resolver.resolve("foo", dd).path == foo_txt
assert resolver.resolve("bar", dd) == 1
def test_data_resolver_env():
with tempdir() as d:
path = d / "foo.txt"
with open(path, "wt") as out:
out.write("foo")
with setenv({"FOO": str(path)}):
resolver = DataResolver({
"foo": {
"env": "FOO"
}
}, UserConfiguration(None, cache_dir=d))
assert resolver.resolve("foo").path == path
bar = d / "bar.txt"
resolver = DataResolver({
"foo": {
"env": "FOO",
"path": bar
}
}, UserConfiguration(None, cache_dir=d))
assert resolver.resolve("foo").path == bar
def test_data_resolver_local_path():
with tempdir() as d:
path = d / "foo.txt"
with open(path, "wt") as out:
out.write("foo")
resolver = DataResolver({
"foo": {
"path": "foo.txt"
}
}, UserConfiguration(None, cache_dir=d))
assert resolver.resolve("foo").path == path
with setenv({"MYPATH": str(d)}):
resolver = DataResolver({
"foo": {
"path": "${MYPATH}/foo.txt"
}
}, UserConfiguration(None, cache_dir=d))
assert resolver.resolve("foo").path == path
def test_data_resolver_create_from_contents():
with tempdir() as d:
resolver = DataResolver({
"foo": {
"path": "dir1/dir2/foo.txt",
"contents": "foo"
}
}, UserConfiguration(None, cache_dir=d))
parent = d / "dir1" / "dir2"
foo = resolver.resolve("foo")
assert foo.path == parent / "foo.txt"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
with tempdir() as d:
resolver = DataResolver({
"foo": {
"name": "foo.txt",
"contents": "foo"
}
}, UserConfiguration(None, cache_dir=d))
foo = resolver.resolve("foo")
assert foo.path == d / "foo.txt"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
with tempdir() as d:
resolver = DataResolver({
"foo": {
"contents": "foo"
}
}, UserConfiguration(None, cache_dir=d))
foo = resolver.resolve("foo")
assert foo.path.parent == d
assert foo.path.exists()
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
def test_data_resolver_create_from_url():
with tempdir() as d:
resolver = DataResolver({
"foo": {
"url": GOOD_URL,
"path": "dir1/dir2/sample.vcf"
}
}, UserConfiguration(None, cache_dir=d))
foo = resolver.resolve("foo")
assert foo.path == d / "dir1" / "dir2" / "sample.vcf"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
with tempdir() as d:
resolver = DataResolver({
"foo": {
"url": GOOD_URL,
"name": "sample.vcf"
}
}, UserConfiguration(None, cache_dir=d))
foo = resolver.resolve("foo")
assert foo.path == d / "sample.vcf"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
with tempdir() as d:
resolver = DataResolver({
"foo": {
"url": GOOD_URL
}
}, UserConfiguration(None, cache_dir=d))
foo = resolver.resolve("foo")
assert foo.path == d / "test_file"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
def test_data_resolver_create_from_datadir():
with tempdir() as d, tempdir() as d1:
mod = Mock()
mod.__name__ = "foo.bar"
cls = Mock()
cls.__name__ = "baz"
fun = Mock()
fun.__name__ = "blorf"
mod_cls_fun = d / "foo" / "bar" / "baz" / "blorf"
mod_cls_fun.mkdir(parents=True)
data_mod_cls_fun = d / "data" / "foo" / "bar" / "baz" / "blorf"
data_mod_cls_fun.mkdir(parents=True)
dd = DataDirs(d / "foo", mod, fun, cls)
resolver = DataResolver({
"boink": {
"name": "boink.txt",
},
"bobble": {
"name": "bobble.txt"
},
"burp": {
"name": "burp.txt",
"path": "burp.txt"
}
}, UserConfiguration(None, cache_dir=d1))
boink = d / "foo" / "bar" / "boink.txt"
with open(boink, "wt") as out:
out.write("boink")
assert boink == resolver.resolve("boink", dd).path
with pytest.raises(FileNotFoundError):
resolver.resolve("bobble", dd)
burp = d / "foo" / "bar" / "burp.txt"
with open(burp, "wt") as out:
out.write("burp")
burp_resolved = resolver.resolve("burp", dd).path
assert burp_resolved == d1 / "burp.txt"
assert burp_resolved.is_symlink()
with pytest.raises(FileNotFoundError):
resolver.resolve("bobble")
def test_data_manager():
dm = DataManager(
data_resolver=DataResolver(
{
"foo": {
"class": "x",
"value": 1
},
"bar": {
"class": "x",
"value": 2
}
}, UserConfiguration()
),
datadirs=None
)
assert [1, 2] == dm.get_list("foo", "bar")
assert {"foo": 1, "bork": 2} == dm.get_dict("foo", bork="bar")
def test_http_header_set_in_workflow_data():
"""
Test that workflow data file can define the HTTP Headers. This is
important because the URLs referenced can be from different hosts and
require different headers, so setting them at this level allows that
fine-grained control.
"""
with tempdir() as d:
config = UserConfiguration(cache_dir=d)
assert not config.default_http_headers
resolver = DataResolver({
"foo": {
"url": GOOD_URL,
"path": "sample.vcf",
"http_headers": {
"Auth-Header-Token": "TOKEN"
}
}
}, config)
foo = resolver.resolve("foo")
assert foo.path == d / "sample.vcf"
with open(foo.path, "rt") as inp:
assert inp.read() == "foo"
with setenv({"TOKEN": "this_is_the_token"}), tempdir() as d:
config = UserConfiguration(cache_dir=d)
assert not config.default_http_headers
resolver = DataResolver({
"foo": {
"url": GOOD_URL,
"path": "sample.vcf",
"http_headers": {
"Auth-Header-Token": "TOKEN"
}
}
}, config)
foo = resolver.resolve("foo")
assert foo.path == d / "sample.vcf"
assert isinstance(foo.localizer, UrlLocalizer)
assert cast(UrlLocalizer, foo.localizer).http_headers == {
"Auth-Header-Token": "<PASSWORD>"
}
with open(foo.path, "rt") as inp:
assert inp.read() == "foo" | 0.67822 | 0.314129 |
import os
import glob
import shutil
import tarfile
import argparse
import mapred_utils as util
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--folder', default=os.getcwd(), help='The "save folder" of the map/reduce task being backed up')
parser.add_argument('--name', default=util.sortable_timestamp(), help='Name of the subfolder to create in the data directory')
parser.add_argument('--force', action='store_true', help='Go on with the backup even if folder already exists')
parser.add_argument('--no-jobs', dest='jobs', action='store_false', help='Exclude jobs from backup')
parser.add_argument('--compress', action='store_true', help='Compress jobs using bz2')
args = parser.parse_args()
# Make sure save folder exists
saveFolder = args.folder
assert os.path.isdir(saveFolder), 'Folder not found: ' + saveFolder
# Find config file
cfgFile = os.path.join( saveFolder, 'config', 'config.json' )
assert os.path.isfile(cfgFile), 'Could not find config file: ' + cfgFile
config = util.read_json(cfgFile)
# Create backup folder
backupFolder = os.path.join( saveFolder, 'data', args.name )
if os.path.isdir(backupFolder):
assert args.force, 'Folder "%s" already exists, aborting.' % (backupFolder)
else:
os.makedirs( backupFolder )
# Copy current config
shutil.copy2( cfgFile, backupFolder )
# Move workers output
nworkers = len(config['exec']['workers'])
wmove = []
for i in xrange(nworkers):
wname = config['files']['worker'] % (i+1)
wfile = os.path.join( saveFolder, wname )
if os.path.isfile(wfile):
wmove.append(wname)
os.rename( wfile, os.path.join(backupFolder,wname) )
# Move reduced output
if 'reduced' in config['files']:
rname = config['files']['reduced'] # compatibility issue
else:
rname = config['files']['reduce']
rfile = os.path.join( saveFolder, rname )
if os.path.isfile(rfile):
os.rename( rfile, os.path.join(backupFolder,rname) )
# Move log folder (should match substitution in mapred_build)
try:
logFolder = os.path.join(saveFolder,'logs')
shutil.move( logFolder, backupFolder )
os.makedirs( logFolder ) # make a new one
except:
print "Could not find or move logs folder: " + logFolder
# Compress job folders
jmove = []
if args.jobs:
if args.compress: cx = {'ext': '.tar.bz2', 'fmt': 'w:bz2'}
else: cx = {'ext': '.tar', 'fmt': 'w'}
jobFolders = glob.glob(os.path.join( saveFolder, 'job_*' ))
jobArchive = os.path.join( backupFolder, 'jobs' + cx['ext'] )
print "Compressing %d jobs outputs to archive %s (please wait)..." % ( len(jobFolders), jobArchive )
with tarfile.open( jobArchive, cx['fmt'] ) as tar:
for job in jobFolders:
jobName = os.path.basename(job)
jmove.append( jobName )
tar.add( job, arcname=jobName )
# Write summary
print 'Backed up to folder "%s" (%d output(s), %d folder(s))' % (backupFolder,len(wmove),len(jmove)) | external_packages/matlab/non_default_packages/Gaussian_Process/deck/+dk/+mapred/python/mapred_backup.py |
import os
import glob
import shutil
import tarfile
import argparse
import mapred_utils as util
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--folder', default=os.getcwd(), help='The "save folder" of the map/reduce task being backed up')
parser.add_argument('--name', default=util.sortable_timestamp(), help='Name of the subfolder to create in the data directory')
parser.add_argument('--force', action='store_true', help='Go on with the backup even if folder already exists')
parser.add_argument('--no-jobs', dest='jobs', action='store_false', help='Exclude jobs from backup')
parser.add_argument('--compress', action='store_true', help='Compress jobs using bz2')
args = parser.parse_args()
# Make sure save folder exists
saveFolder = args.folder
assert os.path.isdir(saveFolder), 'Folder not found: ' + saveFolder
# Find config file
cfgFile = os.path.join( saveFolder, 'config', 'config.json' )
assert os.path.isfile(cfgFile), 'Could not find config file: ' + cfgFile
config = util.read_json(cfgFile)
# Create backup folder
backupFolder = os.path.join( saveFolder, 'data', args.name )
if os.path.isdir(backupFolder):
assert args.force, 'Folder "%s" already exists, aborting.' % (backupFolder)
else:
os.makedirs( backupFolder )
# Copy current config
shutil.copy2( cfgFile, backupFolder )
# Move workers output
nworkers = len(config['exec']['workers'])
wmove = []
for i in xrange(nworkers):
wname = config['files']['worker'] % (i+1)
wfile = os.path.join( saveFolder, wname )
if os.path.isfile(wfile):
wmove.append(wname)
os.rename( wfile, os.path.join(backupFolder,wname) )
# Move reduced output
if 'reduced' in config['files']:
rname = config['files']['reduced'] # compatibility issue
else:
rname = config['files']['reduce']
rfile = os.path.join( saveFolder, rname )
if os.path.isfile(rfile):
os.rename( rfile, os.path.join(backupFolder,rname) )
# Move log folder (should match substitution in mapred_build)
try:
logFolder = os.path.join(saveFolder,'logs')
shutil.move( logFolder, backupFolder )
os.makedirs( logFolder ) # make a new one
except:
print "Could not find or move logs folder: " + logFolder
# Compress job folders
jmove = []
if args.jobs:
if args.compress: cx = {'ext': '.tar.bz2', 'fmt': 'w:bz2'}
else: cx = {'ext': '.tar', 'fmt': 'w'}
jobFolders = glob.glob(os.path.join( saveFolder, 'job_*' ))
jobArchive = os.path.join( backupFolder, 'jobs' + cx['ext'] )
print "Compressing %d jobs outputs to archive %s (please wait)..." % ( len(jobFolders), jobArchive )
with tarfile.open( jobArchive, cx['fmt'] ) as tar:
for job in jobFolders:
jobName = os.path.basename(job)
jmove.append( jobName )
tar.add( job, arcname=jobName )
# Write summary
print 'Backed up to folder "%s" (%d output(s), %d folder(s))' % (backupFolder,len(wmove),len(jmove)) | 0.099284 | 0.077518 |
import requests
import pyexcel as pe
from bs4 import BeautifulSoup
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
import csv
import re
from datetime import datetime
class Text:
def __init__(self, body):
self.body = body
def calculate_sentiment(self, analyzer):
vs = analyzer.polarity_scores(self.body)
return(vs["compound"])
def calculate_length(self):
return len(self.body.strip().lower().split())
class Goal:
def __init__(self,raised, goal):
self.raised = raised
self.goal = goal
def pct_raised(self):
if self.goal is not 0:
pct = round(self.raised/self.goal,2)
return float(pct)
else:
return self.goal
class Campaign:
def __init__(self, url, campaignTitle, goal, shareCount, campaignDesc, donorCount, timePeriod):
self.url = url
self.campaignTitle = campaignTitle
self.goal = goal
self.shareCount = shareCount
self.campaignDesc = campaignDesc
self.donorCount = donorCount
self.timePeriod = timePeriod
def clean_goal(goalText):
if ' of ' in goalText:
raised, goal = goalText.strip('\n').rstrip(' goal').replace('$', '').replace(',', '').split(' of ')
raised = int(raised.replace(' ','').strip('\n'))
goal = goal.replace(' ','').strip('\n')
else:
goal = goalText.strip('\n').rstrip(' goal').replace('$', '').replace(',', '')
raised = 0
thousand = 'k'
million = 'M'
if thousand in goal:
goal = float(goal.replace('k',''))*1000
elif million in goal:
goal = float(goal.replace('M',''))*1000000
else:
goal = float(goal)
return raised, goal
def clean_share_count(shareCountText):
if shareCountText:
shareCountText = shareCountText.text.replace(' ','').strip('\n')
if 'k' in shareCountText:
shareCountText = float(shareCountText.replace('k',''))*1000
else:
shareCountText = 0
return int(shareCountText)
def clean_donor_count(donorCountText):
if 'Campaign created ' in donorCountText:
if 'month' in donorCountText:
time, suffix = donorCountText.replace('Campaign created ','').split(' month')
time = time.replace(' ','').strip('\n')
donorCountText = 0
elif 'day' in donorCountText:
time, suffix = donorCountText.replace('Campaign created ','').split(' day')
time = time.replace(' ','').strip('\n')
donorCountText = 0
else:
donorCountText, time = donorCountText.replace('Raised by ','').split(' in ')
donorCountText = donorCountText.replace(' ','').replace(',','').strip('\n').replace('people','').replace('person','')
time = time.replace(' ','').strip('\n')
return(int(donorCountText), time)
def generate_urls(city, state, urls):
url = "https://www.gofundme.com/search/us/" + city + "-" + state + "-" "fundraising"
req = requests.get(url)
soup = BeautifulSoup(req.text, "lxml")
data = soup.findAll('div',attrs={'class':'react-campaign-tile-details'})
for div in data:
links = div.findAll('a')
for a in links:
this_link = a['href']
if this_link not in urls and this_link != '':
urls.append(this_link)
return urls
def scrape():
urls = []
campaigns = []
analyzer = SentimentIntensityAnalyzer()
#Place (city, stateAbbreviation) tuples in a list that you would like to be scraped
locations = [["austin","tx"], ["san-antonio", "tx"], ["dallas", "tx"], ["houston", "tx"], ["fort-worth","tx"], ["el-paso", "tx"], ["arlington", "tx"]]
for city, state in locations:
generate_urls(city, state, urls)
for url in urls:
print(url)
req = requests.get(url)
soup = BeautifulSoup(req.text, "lxml")
#Exclude archived campaigns
active = soup.find('div', class_="var-width-column")
if active:
if "no longer active" in active.text:
print(url)
break
#Grabbing title
title = soup.find('h1', class_='campaign-title')
if title is None:
ctitle=Text('')
else:
ctitle = Text(title.text)
#Grabbing goal info
goal_class = soup.find('h2', class_='goal')
if goal_class is None:
cgoal = Goal(0, 0)
else:
raised, goal = clean_goal(goal_class.text)
cgoal = Goal(raised, goal)
#Grabbing share count
cShareCount = clean_share_count(soup.find('strong', class_='js-share-count-text'))
#Grabbing description
desc = soup.find('div', class_='co-story')
if desc is None:
cDesc = Text('')
else:
desc = re.sub('\\s+',' ',desc.text)
cDesc = Text(desc)
#Grabbing donor count and time spent fundraising
donor_count = soup.find('div', class_='campaign-status text-small')
if donor_count is None:
donor = ''
time = ''
else:
donor, time = clean_donor_count(donor_count.text)
c = Campaign(url, ctitle, cgoal, cShareCount, cDesc, donor, time)
cData = {
"url": c.url,
"title": c.campaignTitle.body,
"title-length": c.campaignTitle.calculate_length(),
"title-sentiment": c.campaignTitle.calculate_sentiment(analyzer),
"description": c.campaignDesc.body,
"description-length": c.campaignDesc.calculate_length(),
"description-sentiment": c.campaignDesc.calculate_sentiment(analyzer),
"share-count": c.shareCount,
"donor-count": c.donorCount,
"raised": c.goal.goal,
"pct-goal-met": c.goal.pct_raised()
}
campaigns.append(cData)
return campaigns | scraping.py | import requests
import pyexcel as pe
from bs4 import BeautifulSoup
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
import csv
import re
from datetime import datetime
class Text:
def __init__(self, body):
self.body = body
def calculate_sentiment(self, analyzer):
vs = analyzer.polarity_scores(self.body)
return(vs["compound"])
def calculate_length(self):
return len(self.body.strip().lower().split())
class Goal:
def __init__(self,raised, goal):
self.raised = raised
self.goal = goal
def pct_raised(self):
if self.goal is not 0:
pct = round(self.raised/self.goal,2)
return float(pct)
else:
return self.goal
class Campaign:
def __init__(self, url, campaignTitle, goal, shareCount, campaignDesc, donorCount, timePeriod):
self.url = url
self.campaignTitle = campaignTitle
self.goal = goal
self.shareCount = shareCount
self.campaignDesc = campaignDesc
self.donorCount = donorCount
self.timePeriod = timePeriod
def clean_goal(goalText):
if ' of ' in goalText:
raised, goal = goalText.strip('\n').rstrip(' goal').replace('$', '').replace(',', '').split(' of ')
raised = int(raised.replace(' ','').strip('\n'))
goal = goal.replace(' ','').strip('\n')
else:
goal = goalText.strip('\n').rstrip(' goal').replace('$', '').replace(',', '')
raised = 0
thousand = 'k'
million = 'M'
if thousand in goal:
goal = float(goal.replace('k',''))*1000
elif million in goal:
goal = float(goal.replace('M',''))*1000000
else:
goal = float(goal)
return raised, goal
def clean_share_count(shareCountText):
if shareCountText:
shareCountText = shareCountText.text.replace(' ','').strip('\n')
if 'k' in shareCountText:
shareCountText = float(shareCountText.replace('k',''))*1000
else:
shareCountText = 0
return int(shareCountText)
def clean_donor_count(donorCountText):
if 'Campaign created ' in donorCountText:
if 'month' in donorCountText:
time, suffix = donorCountText.replace('Campaign created ','').split(' month')
time = time.replace(' ','').strip('\n')
donorCountText = 0
elif 'day' in donorCountText:
time, suffix = donorCountText.replace('Campaign created ','').split(' day')
time = time.replace(' ','').strip('\n')
donorCountText = 0
else:
donorCountText, time = donorCountText.replace('Raised by ','').split(' in ')
donorCountText = donorCountText.replace(' ','').replace(',','').strip('\n').replace('people','').replace('person','')
time = time.replace(' ','').strip('\n')
return(int(donorCountText), time)
def generate_urls(city, state, urls):
url = "https://www.gofundme.com/search/us/" + city + "-" + state + "-" "fundraising"
req = requests.get(url)
soup = BeautifulSoup(req.text, "lxml")
data = soup.findAll('div',attrs={'class':'react-campaign-tile-details'})
for div in data:
links = div.findAll('a')
for a in links:
this_link = a['href']
if this_link not in urls and this_link != '':
urls.append(this_link)
return urls
def scrape():
urls = []
campaigns = []
analyzer = SentimentIntensityAnalyzer()
#Place (city, stateAbbreviation) tuples in a list that you would like to be scraped
locations = [["austin","tx"], ["san-antonio", "tx"], ["dallas", "tx"], ["houston", "tx"], ["fort-worth","tx"], ["el-paso", "tx"], ["arlington", "tx"]]
for city, state in locations:
generate_urls(city, state, urls)
for url in urls:
print(url)
req = requests.get(url)
soup = BeautifulSoup(req.text, "lxml")
#Exclude archived campaigns
active = soup.find('div', class_="var-width-column")
if active:
if "no longer active" in active.text:
print(url)
break
#Grabbing title
title = soup.find('h1', class_='campaign-title')
if title is None:
ctitle=Text('')
else:
ctitle = Text(title.text)
#Grabbing goal info
goal_class = soup.find('h2', class_='goal')
if goal_class is None:
cgoal = Goal(0, 0)
else:
raised, goal = clean_goal(goal_class.text)
cgoal = Goal(raised, goal)
#Grabbing share count
cShareCount = clean_share_count(soup.find('strong', class_='js-share-count-text'))
#Grabbing description
desc = soup.find('div', class_='co-story')
if desc is None:
cDesc = Text('')
else:
desc = re.sub('\\s+',' ',desc.text)
cDesc = Text(desc)
#Grabbing donor count and time spent fundraising
donor_count = soup.find('div', class_='campaign-status text-small')
if donor_count is None:
donor = ''
time = ''
else:
donor, time = clean_donor_count(donor_count.text)
c = Campaign(url, ctitle, cgoal, cShareCount, cDesc, donor, time)
cData = {
"url": c.url,
"title": c.campaignTitle.body,
"title-length": c.campaignTitle.calculate_length(),
"title-sentiment": c.campaignTitle.calculate_sentiment(analyzer),
"description": c.campaignDesc.body,
"description-length": c.campaignDesc.calculate_length(),
"description-sentiment": c.campaignDesc.calculate_sentiment(analyzer),
"share-count": c.shareCount,
"donor-count": c.donorCount,
"raised": c.goal.goal,
"pct-goal-met": c.goal.pct_raised()
}
campaigns.append(cData)
return campaigns | 0.299515 | 0.220384 |
import dash_core_components as dcc
import dash_html_components as html
def render(title="Earnings Overview", xl_size=8, lg_size=7, dropdown_id="dropdownMenuLink", graph=dcc.Graph()):
return html.Div(
className=f'col-xl-{xl_size} col-lg-{lg_size}',
children=html.Div(
className='card shadow mb-4',
children=[
# Card Header - Dropdown
html.Div(
className="card-header py-3 d-flex flex-row align-items-center justify-content-between",
children=[
html.H6(
title, className="m-0 font-weight-bold text-primary"),
html.Div(
className="dropdown no-arrow",
children=[html.A(
className="dropdown-toggle",
href="#",
role="button",
id=dropdown_id,
**{"data-toggle": "dropdown", "aria-haspopup": "true", "aria-expanded": "false"},
children=html.I(
className="fas fa-ellipsis-v fa-sm fa-fw text-gray-400")
),
html.Div(
className="dropdown-menu dropdown-menu-right shadow animated--fade-in",
**{"aria-labelledby": dropdown_id},
children=[
html.Div("Dropdown Header:",
className="dropdown-header"),
html.A("Action", className="dropdown-item"),
html.A("Another action",
className="dropdown-item"),
html.Div(className="dropdown-divider"),
html.A("Something else here",
className="dropdown-item")
]
)
],
)
]
),
# Card Body
html.Div(
className="card-body",
children=graph
)
]
)
) | SB_Admin_2/templates/layouts/graph_wrapper.py | import dash_core_components as dcc
import dash_html_components as html
def render(title="Earnings Overview", xl_size=8, lg_size=7, dropdown_id="dropdownMenuLink", graph=dcc.Graph()):
return html.Div(
className=f'col-xl-{xl_size} col-lg-{lg_size}',
children=html.Div(
className='card shadow mb-4',
children=[
# Card Header - Dropdown
html.Div(
className="card-header py-3 d-flex flex-row align-items-center justify-content-between",
children=[
html.H6(
title, className="m-0 font-weight-bold text-primary"),
html.Div(
className="dropdown no-arrow",
children=[html.A(
className="dropdown-toggle",
href="#",
role="button",
id=dropdown_id,
**{"data-toggle": "dropdown", "aria-haspopup": "true", "aria-expanded": "false"},
children=html.I(
className="fas fa-ellipsis-v fa-sm fa-fw text-gray-400")
),
html.Div(
className="dropdown-menu dropdown-menu-right shadow animated--fade-in",
**{"aria-labelledby": dropdown_id},
children=[
html.Div("Dropdown Header:",
className="dropdown-header"),
html.A("Action", className="dropdown-item"),
html.A("Another action",
className="dropdown-item"),
html.Div(className="dropdown-divider"),
html.A("Something else here",
className="dropdown-item")
]
)
],
)
]
),
# Card Body
html.Div(
className="card-body",
children=graph
)
]
)
) | 0.505859 | 0.0686 |
import typing, math, os
import multiprocessing, tempfile, pickle
def divideChunks(lis: typing.Iterable, n_size: int) -> typing.Iterator:
for i in range(0, len(lis), n_size):
yield lis[i:i + n_size]
def inferWorkers() -> int:
workers = multiprocessing.cpu_count() - 1
return workers
def lisJobParallel(func: typing.Callable, list_like: typing.Iterable, use_buffer:bool = False, n_workers: int = -1) -> list:
"""
parallel a job that is applied to a list-like object
The paralleled function should only take one argument which is the list_like
the function should be able to be run on subset of the list_like
and return list-like results or None
i.e.
func(list_like) -> list_like2 | None
- list_like: list-like argument
- n_workers: number of process, set to -1 for using auto-inferring
- use_buffer: use hard disk as buffer for each subprocess output, enable when the data exchange is large
"""
if n_workers <= 0:
n_workers = inferWorkers()
data = list_like
chunk_size = math.ceil(len(data)/n_workers)
conns = []
procs = []
# define the function for multiprocessing.Process
def processFunc(conn, func_, data_):
out_ = func_(data_)
if use_buffer:
f_path = tempfile.NamedTemporaryFile(mode = "w+b").name
with open(f_path, "wb") as fp:
bytes = pickle.dumps(out_)
fp.write(bytes)
conn.send(f_path)
else:
conn.send(out_)
# Create and run the processes
for d in divideChunks(data, chunk_size):
conn1, conn2 = multiprocessing.Pipe()
process = multiprocessing.Process(\
target=processFunc, args=(conn1, func, d))
conns.append(conn2)
procs.append(process)
process.start()
for p in procs:
p.join()
# Concatenate results
out = []
for conn_ in conns:
obj = conn_.recv()
if use_buffer:
with open(obj, "rb") as fp:
out_ = pickle.loads(fp.read())
os.remove(obj) # delete the temporary buffer file
else:
out_ = obj
# concatenate
if out_ is not None:
out = [*out, *out_]
else:
out.append(None)
return out | b64ImConverter/multiProcess.py | import typing, math, os
import multiprocessing, tempfile, pickle
def divideChunks(lis: typing.Iterable, n_size: int) -> typing.Iterator:
for i in range(0, len(lis), n_size):
yield lis[i:i + n_size]
def inferWorkers() -> int:
workers = multiprocessing.cpu_count() - 1
return workers
def lisJobParallel(func: typing.Callable, list_like: typing.Iterable, use_buffer:bool = False, n_workers: int = -1) -> list:
"""
parallel a job that is applied to a list-like object
The paralleled function should only take one argument which is the list_like
the function should be able to be run on subset of the list_like
and return list-like results or None
i.e.
func(list_like) -> list_like2 | None
- list_like: list-like argument
- n_workers: number of process, set to -1 for using auto-inferring
- use_buffer: use hard disk as buffer for each subprocess output, enable when the data exchange is large
"""
if n_workers <= 0:
n_workers = inferWorkers()
data = list_like
chunk_size = math.ceil(len(data)/n_workers)
conns = []
procs = []
# define the function for multiprocessing.Process
def processFunc(conn, func_, data_):
out_ = func_(data_)
if use_buffer:
f_path = tempfile.NamedTemporaryFile(mode = "w+b").name
with open(f_path, "wb") as fp:
bytes = pickle.dumps(out_)
fp.write(bytes)
conn.send(f_path)
else:
conn.send(out_)
# Create and run the processes
for d in divideChunks(data, chunk_size):
conn1, conn2 = multiprocessing.Pipe()
process = multiprocessing.Process(\
target=processFunc, args=(conn1, func, d))
conns.append(conn2)
procs.append(process)
process.start()
for p in procs:
p.join()
# Concatenate results
out = []
for conn_ in conns:
obj = conn_.recv()
if use_buffer:
with open(obj, "rb") as fp:
out_ = pickle.loads(fp.read())
os.remove(obj) # delete the temporary buffer file
else:
out_ = obj
# concatenate
if out_ is not None:
out = [*out, *out_]
else:
out.append(None)
return out | 0.363082 | 0.283639 |
from .conf import settings
import urllib.request
import urllib.error
import logging
import sys
import time
log = logging.getLogger('svp_integration')
def list_request(path):
try:
response = urllib.request.urlopen(settings.svp_url + "?" + path)
return response.read().decode('utf-8').replace('\r\n', '\n').split('\n')
except urllib.error.URLError as ex:
log.error("Could not reach SVP API server.", exc_info=1)
return None
def simple_request(path):
response_list = list_request(path)
if response_list is None:
return None
if len(response_list) != 1 or " = " not in response_list[0]:
return None
return response_list[0].split(" = ")[1]
def get_profiles():
profile_ids = list_request("list=profiles")
profiles = {}
for profile_id in profile_ids:
profile_id = profile_id.replace("profiles.", "")
if profile_id == "predef":
continue
if profile_id == "P10000001_1001_1001_1001_100000000001":
profile_name = "Automatic"
else:
profile_name = simple_request("profiles.{0}.title".format(profile_id))
if simple_request("profiles.{0}.on".format(profile_id)) == "false":
continue
profile_guid = "{" + profile_id[1:].replace("_", "-") + "}"
profiles[profile_guid] = profile_name
return profiles
def get_name_from_guid(profile_id):
profile_id = "P" + profile_id[1:-1].replace("-", "_")
if profile_id == "P10000001_1001_1001_1001_100000000001":
return "Automatic"
else:
return simple_request("profiles.{0}.title".format(profile_id))
def get_last_profile():
return simple_request("rt.playback.last_profile")
def is_svp_alive():
try:
response = list_request("")
return response is not None
except Exception:
log.error("Could not reach SVP API server.", exc_info=1)
return False
def is_svp_enabled():
return simple_request("rt.disabled") == "false"
def is_svp_active():
response = simple_request("rt.playback.active")
if response is None:
return False
return response != ""
def set_active_profile(profile_id):
# As far as I know, there is no way to directly set the profile.
if not is_svp_active():
return False
if profile_id == get_last_profile():
return True
for i in range(len(list_request("list=profiles"))):
list_request("!profile_next")
if get_last_profile() == profile_id:
return True
return False
def set_disabled(disabled):
return simple_request("rt.disabled={0}".format("true" if disabled else "false")) == "true"
class SVPManager:
def __init__(self, menu, playerManager):
self.menu = menu
if settings.svp_enable:
socket = settings.svp_socket
if socket is None:
if sys.platform.startswith("win32") or sys.platform.startswith("cygwin"):
socket = "mpvpipe"
else:
socket = "/tmp/mpvsocket"
# This actually *adds* another ipc server.
playerManager._player.input_ipc_server = socket
if settings.svp_enable and not is_svp_alive():
log.error("SVP is not reachable. Please make sure you have the API enabled.")
def is_available(self):
if not settings.svp_enable:
return False
if not is_svp_alive():
return False
return True
def menu_set_profile(self):
profile_id = self.menu.menu_list[self.menu.menu_selection][2]
if profile_id is None:
set_disabled(True)
else:
set_active_profile(profile_id)
# Need to re-render menu. SVP has a race condition so we wait a second.
time.sleep(1)
self.menu.menu_action("back")
self.menu_action()
def menu_set_enabled(self):
set_disabled(False)
# Need to re-render menu. SVP has a race condition so we wait a second.
time.sleep(1)
self.menu.menu_action("back")
self.menu_action()
def menu_action(self):
if is_svp_active():
selected = 0
active_profile = get_last_profile()
profile_option_list = [
("Disabled", self.menu_set_profile, None)
]
for i, (profile_id, profile_name) in enumerate(get_profiles().items()):
profile_option_list.append(
(profile_name, self.menu_set_profile, profile_id)
)
if profile_id == active_profile:
selected = i+1
self.menu.put_menu("Select SVP Profile", profile_option_list, selected)
else:
if is_svp_enabled():
self.menu.put_menu("SVP is Not Active", [
("Disable", self.menu_set_profile, None),
("Retry", self.menu_set_enabled)
], selected=1)
else:
self.menu.put_menu("SVP is Disabled", [
("Enable SVP", self.menu_set_enabled)
]) | plex_mpv_shim/svp_integration.py | from .conf import settings
import urllib.request
import urllib.error
import logging
import sys
import time
log = logging.getLogger('svp_integration')
def list_request(path):
try:
response = urllib.request.urlopen(settings.svp_url + "?" + path)
return response.read().decode('utf-8').replace('\r\n', '\n').split('\n')
except urllib.error.URLError as ex:
log.error("Could not reach SVP API server.", exc_info=1)
return None
def simple_request(path):
response_list = list_request(path)
if response_list is None:
return None
if len(response_list) != 1 or " = " not in response_list[0]:
return None
return response_list[0].split(" = ")[1]
def get_profiles():
profile_ids = list_request("list=profiles")
profiles = {}
for profile_id in profile_ids:
profile_id = profile_id.replace("profiles.", "")
if profile_id == "predef":
continue
if profile_id == "P10000001_1001_1001_1001_100000000001":
profile_name = "Automatic"
else:
profile_name = simple_request("profiles.{0}.title".format(profile_id))
if simple_request("profiles.{0}.on".format(profile_id)) == "false":
continue
profile_guid = "{" + profile_id[1:].replace("_", "-") + "}"
profiles[profile_guid] = profile_name
return profiles
def get_name_from_guid(profile_id):
profile_id = "P" + profile_id[1:-1].replace("-", "_")
if profile_id == "P10000001_1001_1001_1001_100000000001":
return "Automatic"
else:
return simple_request("profiles.{0}.title".format(profile_id))
def get_last_profile():
return simple_request("rt.playback.last_profile")
def is_svp_alive():
try:
response = list_request("")
return response is not None
except Exception:
log.error("Could not reach SVP API server.", exc_info=1)
return False
def is_svp_enabled():
return simple_request("rt.disabled") == "false"
def is_svp_active():
response = simple_request("rt.playback.active")
if response is None:
return False
return response != ""
def set_active_profile(profile_id):
# As far as I know, there is no way to directly set the profile.
if not is_svp_active():
return False
if profile_id == get_last_profile():
return True
for i in range(len(list_request("list=profiles"))):
list_request("!profile_next")
if get_last_profile() == profile_id:
return True
return False
def set_disabled(disabled):
return simple_request("rt.disabled={0}".format("true" if disabled else "false")) == "true"
class SVPManager:
def __init__(self, menu, playerManager):
self.menu = menu
if settings.svp_enable:
socket = settings.svp_socket
if socket is None:
if sys.platform.startswith("win32") or sys.platform.startswith("cygwin"):
socket = "mpvpipe"
else:
socket = "/tmp/mpvsocket"
# This actually *adds* another ipc server.
playerManager._player.input_ipc_server = socket
if settings.svp_enable and not is_svp_alive():
log.error("SVP is not reachable. Please make sure you have the API enabled.")
def is_available(self):
if not settings.svp_enable:
return False
if not is_svp_alive():
return False
return True
def menu_set_profile(self):
profile_id = self.menu.menu_list[self.menu.menu_selection][2]
if profile_id is None:
set_disabled(True)
else:
set_active_profile(profile_id)
# Need to re-render menu. SVP has a race condition so we wait a second.
time.sleep(1)
self.menu.menu_action("back")
self.menu_action()
def menu_set_enabled(self):
set_disabled(False)
# Need to re-render menu. SVP has a race condition so we wait a second.
time.sleep(1)
self.menu.menu_action("back")
self.menu_action()
def menu_action(self):
if is_svp_active():
selected = 0
active_profile = get_last_profile()
profile_option_list = [
("Disabled", self.menu_set_profile, None)
]
for i, (profile_id, profile_name) in enumerate(get_profiles().items()):
profile_option_list.append(
(profile_name, self.menu_set_profile, profile_id)
)
if profile_id == active_profile:
selected = i+1
self.menu.put_menu("Select SVP Profile", profile_option_list, selected)
else:
if is_svp_enabled():
self.menu.put_menu("SVP is Not Active", [
("Disable", self.menu_set_profile, None),
("Retry", self.menu_set_enabled)
], selected=1)
else:
self.menu.put_menu("SVP is Disabled", [
("Enable SVP", self.menu_set_enabled)
]) | 0.181372 | 0.071106 |
import sys
import numpy as np
import pylab as pl
import scipy.signal
from UFL.common import DataInputOutput, DataNormalization, Visualization
from UFL.PCA import PCA
from UFL.SoftICA import SoftICA
from UFL.Softmax import Softmax
def convolveAndPool(images, W, poolDim):
''' Returns the convolution of the features given by W with
the given images.
Arguments
images : large images to convolve with, matrix in the form
images(r, c, image number)
W : filterbank, is of shape (filterDim,filterDim,numFilters)
poolDim : dimension of square pooling
Returns
features : matrix of convolved and pooled features in the form
features(imageRow, imageCol, featureNum, imageNum)
'''
imageDimX = np.shape(images)[0];
imageDimY = np.shape(images)[1];
numImages = np.shape(images)[2];
filterDimX = np.shape(W)[0];
filterDimY = np.shape(W)[1];
numFilters = np.shape(W)[2];
convDimX = imageDimX - filterDimX + 1;
convDimY = imageDimY - filterDimY + 1;
features = np.zeros([convDimX/poolDim, convDimY/poolDim, numFilters, numImages]);
poolMat = np.ones([poolDim]);
for imageNum in range(numImages):
for filterNum in range(numFilters):
filter = W[:,:,filterNum];
# Flip the feature matrix because of the definition of convolution
filter = np.rot90(filter, 2);
# Obtain the image
im = images[:, :, imageNum];
resp = scipy.signal.convolve2d(im, filter, mode='valid');
# Apply pooling on "resp" to get the hidden activation "act"
if 0:
# Mean pooling
poolingFilter = np.ones([poolDim, poolDim]) * (poolDim * poolDim)**(-1);
act = scipy.signal.convolve2d(resp, poolingFilter, mode='valid');
else:
# Square root pooling
poolingFilter = np.ones([poolDim, poolDim]);
aux1 = resp**2;
act = np.sqrt(scipy.signal.convolve2d(aux1, poolingFilter, 'valid'));
features[:, :, filterNum, imageNum] = act[0:convDimX-poolDim+1:poolDim, 0:convDimY-poolDim+1:poolDim];
return features
if __name__ == '__main__':
# --------------------------
# Example:
# Learning orthagonal bases of images of handwritten digits (MNIST dataset)
# --------------------------
mnist_img_filename_training = 'C://develop//python//UFL//data//train-images-idx3-ubyte';
mnist_lbl_filename_training = 'C://develop//python//UFL//data//train-labels-idx1-ubyte';
debug = 1;
imWidth = 28;
imHeight = 28;
imageChannels = 1;
numImages_unlabeled = 30000;
numImages_training = 5000;
numImages_test = 10000;
patchWidth = 9;
patchHeight = 9;
numPatches = 60000;
inputDim_patch = patchWidth * patchHeight * imageChannels;
inputDim_img = imWidth * imHeight * imageChannels;
numFeatures = 32;
nClasses = 10;
epsilon = 1e-2;
lambd = 0.99;
poolDim = 5;
#-------------------------
# Load Data
#-------------------------
if debug: print "Loading data..."
# Read data from file
numImages = numImages_unlabeled + numImages_training + numImages_test;
images = DataInputOutput.loadMNISTImages(mnist_img_filename_training, numImages);
images = np.reshape(images, [imHeight, imWidth, images.shape[1]]);
images_unlabeled = images[:,:,0:numImages_unlabeled];
images_training = images[:,:,numImages_unlabeled:numImages_unlabeled+numImages_training];
images_test = images[:,:,numImages_unlabeled+numImages_training:numImages_unlabeled+numImages_training+numImages_test];
labels = DataInputOutput.loadMNISTLabels(mnist_lbl_filename_training, numImages);
labels_training = labels[numImages_unlabeled:numImages_unlabeled+numImages_training];
labels_test = labels[numImages_unlabeled+numImages_training:numImages_unlabeled+numImages_training+numImages_test];
# Sample patches
patches = DataInputOutput.samplePatches(images_unlabeled, patchWidth, patchHeight, numPatches);
# Normalize data: ZCA whiten patches
patches = patches/255.0;
instance_pca = PCA.PCA(inputDim_patch, 0.99, debug);
ZCAwhite = instance_pca.computeZCAWhiteningMatrix(patches);
patches_ZCAwhite = instance_pca.doZCAWhitening(patches);
# Each patch should be normalized as x / ||x||_2 where x is the vector representation of the patch
patches_ZCAwhite = DataNormalization.normL2(patches_ZCAwhite, axis=0)
#-------------------------
# Learn Features
#-------------------------
if debug: print "Learning SoftICA features..."
sizeLayers = [inputDim_patch, numFeatures];
sica = SoftICA.SoftICA(sizeLayers, lambd, epsilon, debug=debug);
success = sica.optimizeParameters(patches_ZCAwhite);
weights = sica.getWeights();
# Visualize the learned bases
if debug>1:
Visualization.displayNetwork(np.transpose(weights));
#-------------------------
# Extract Features
#-------------------------
if debug: print "Extracting features..."
# Pre-multiply the weights with whitening matrix, equivalent to whitening each image patch before applying convolution.
weights = np.dot(weights, ZCAwhite);
# Reshape SoftICA weights to be convolutional weights.
weights = np.reshape(weights, [numFeatures, patchWidth, patchHeight]);
weights = np.transpose(weights, [2,1,0]);
activations_training = convolveAndPool(images_training, weights, poolDim);
activations_test = convolveAndPool(images_test, weights, poolDim);
if 0:
for i in range(activations_training.shape[2]):
pl.figure()
pl.imshow(activations_training[:,:,i,0], cmap='gray');
pl.show();
featureDim = activations_training.shape[0] * activations_training.shape[1] * activations_training.shape[2];
features_training = np.reshape(activations_training, [featureDim, activations_training.shape[3]])
features_test = np.reshape(activations_test, [featureDim, activations_test.shape[3]])
#-------------------------
# Train Softmax Classifier
#-------------------------
if debug: print "Learning classification model..."
softmaxModel = Softmax.Softmax(featureDim, nClasses, debug);
success = softmaxModel.optimizeParameters(features_training, labels_training);
#-------------------------
# Testing
#-------------------------
if debug: print "Testing..."
# Print out accuracy
correct_training = labels_training == np.argmax(softmaxModel.predict(features_training),0)
accuracy_training = np.sum(correct_training.astype(int)) * 100 / len(labels_training);
print 'Training accuracy: ', accuracy_training, '%'
correct_test = labels_test == np.argmax(softmaxModel.predict(features_test),0)
accuracy_test = np.sum(correct_test.astype(int)) * 100 / len(labels_test);
print 'Test accuracy: ', accuracy_test, '%' | examples/SelfTaughtLearning.py | import sys
import numpy as np
import pylab as pl
import scipy.signal
from UFL.common import DataInputOutput, DataNormalization, Visualization
from UFL.PCA import PCA
from UFL.SoftICA import SoftICA
from UFL.Softmax import Softmax
def convolveAndPool(images, W, poolDim):
''' Returns the convolution of the features given by W with
the given images.
Arguments
images : large images to convolve with, matrix in the form
images(r, c, image number)
W : filterbank, is of shape (filterDim,filterDim,numFilters)
poolDim : dimension of square pooling
Returns
features : matrix of convolved and pooled features in the form
features(imageRow, imageCol, featureNum, imageNum)
'''
imageDimX = np.shape(images)[0];
imageDimY = np.shape(images)[1];
numImages = np.shape(images)[2];
filterDimX = np.shape(W)[0];
filterDimY = np.shape(W)[1];
numFilters = np.shape(W)[2];
convDimX = imageDimX - filterDimX + 1;
convDimY = imageDimY - filterDimY + 1;
features = np.zeros([convDimX/poolDim, convDimY/poolDim, numFilters, numImages]);
poolMat = np.ones([poolDim]);
for imageNum in range(numImages):
for filterNum in range(numFilters):
filter = W[:,:,filterNum];
# Flip the feature matrix because of the definition of convolution
filter = np.rot90(filter, 2);
# Obtain the image
im = images[:, :, imageNum];
resp = scipy.signal.convolve2d(im, filter, mode='valid');
# Apply pooling on "resp" to get the hidden activation "act"
if 0:
# Mean pooling
poolingFilter = np.ones([poolDim, poolDim]) * (poolDim * poolDim)**(-1);
act = scipy.signal.convolve2d(resp, poolingFilter, mode='valid');
else:
# Square root pooling
poolingFilter = np.ones([poolDim, poolDim]);
aux1 = resp**2;
act = np.sqrt(scipy.signal.convolve2d(aux1, poolingFilter, 'valid'));
features[:, :, filterNum, imageNum] = act[0:convDimX-poolDim+1:poolDim, 0:convDimY-poolDim+1:poolDim];
return features
if __name__ == '__main__':
# --------------------------
# Example:
# Learning orthagonal bases of images of handwritten digits (MNIST dataset)
# --------------------------
mnist_img_filename_training = 'C://develop//python//UFL//data//train-images-idx3-ubyte';
mnist_lbl_filename_training = 'C://develop//python//UFL//data//train-labels-idx1-ubyte';
debug = 1;
imWidth = 28;
imHeight = 28;
imageChannels = 1;
numImages_unlabeled = 30000;
numImages_training = 5000;
numImages_test = 10000;
patchWidth = 9;
patchHeight = 9;
numPatches = 60000;
inputDim_patch = patchWidth * patchHeight * imageChannels;
inputDim_img = imWidth * imHeight * imageChannels;
numFeatures = 32;
nClasses = 10;
epsilon = 1e-2;
lambd = 0.99;
poolDim = 5;
#-------------------------
# Load Data
#-------------------------
if debug: print "Loading data..."
# Read data from file
numImages = numImages_unlabeled + numImages_training + numImages_test;
images = DataInputOutput.loadMNISTImages(mnist_img_filename_training, numImages);
images = np.reshape(images, [imHeight, imWidth, images.shape[1]]);
images_unlabeled = images[:,:,0:numImages_unlabeled];
images_training = images[:,:,numImages_unlabeled:numImages_unlabeled+numImages_training];
images_test = images[:,:,numImages_unlabeled+numImages_training:numImages_unlabeled+numImages_training+numImages_test];
labels = DataInputOutput.loadMNISTLabels(mnist_lbl_filename_training, numImages);
labels_training = labels[numImages_unlabeled:numImages_unlabeled+numImages_training];
labels_test = labels[numImages_unlabeled+numImages_training:numImages_unlabeled+numImages_training+numImages_test];
# Sample patches
patches = DataInputOutput.samplePatches(images_unlabeled, patchWidth, patchHeight, numPatches);
# Normalize data: ZCA whiten patches
patches = patches/255.0;
instance_pca = PCA.PCA(inputDim_patch, 0.99, debug);
ZCAwhite = instance_pca.computeZCAWhiteningMatrix(patches);
patches_ZCAwhite = instance_pca.doZCAWhitening(patches);
# Each patch should be normalized as x / ||x||_2 where x is the vector representation of the patch
patches_ZCAwhite = DataNormalization.normL2(patches_ZCAwhite, axis=0)
#-------------------------
# Learn Features
#-------------------------
if debug: print "Learning SoftICA features..."
sizeLayers = [inputDim_patch, numFeatures];
sica = SoftICA.SoftICA(sizeLayers, lambd, epsilon, debug=debug);
success = sica.optimizeParameters(patches_ZCAwhite);
weights = sica.getWeights();
# Visualize the learned bases
if debug>1:
Visualization.displayNetwork(np.transpose(weights));
#-------------------------
# Extract Features
#-------------------------
if debug: print "Extracting features..."
# Pre-multiply the weights with whitening matrix, equivalent to whitening each image patch before applying convolution.
weights = np.dot(weights, ZCAwhite);
# Reshape SoftICA weights to be convolutional weights.
weights = np.reshape(weights, [numFeatures, patchWidth, patchHeight]);
weights = np.transpose(weights, [2,1,0]);
activations_training = convolveAndPool(images_training, weights, poolDim);
activations_test = convolveAndPool(images_test, weights, poolDim);
if 0:
for i in range(activations_training.shape[2]):
pl.figure()
pl.imshow(activations_training[:,:,i,0], cmap='gray');
pl.show();
featureDim = activations_training.shape[0] * activations_training.shape[1] * activations_training.shape[2];
features_training = np.reshape(activations_training, [featureDim, activations_training.shape[3]])
features_test = np.reshape(activations_test, [featureDim, activations_test.shape[3]])
#-------------------------
# Train Softmax Classifier
#-------------------------
if debug: print "Learning classification model..."
softmaxModel = Softmax.Softmax(featureDim, nClasses, debug);
success = softmaxModel.optimizeParameters(features_training, labels_training);
#-------------------------
# Testing
#-------------------------
if debug: print "Testing..."
# Print out accuracy
correct_training = labels_training == np.argmax(softmaxModel.predict(features_training),0)
accuracy_training = np.sum(correct_training.astype(int)) * 100 / len(labels_training);
print 'Training accuracy: ', accuracy_training, '%'
correct_test = labels_test == np.argmax(softmaxModel.predict(features_test),0)
accuracy_test = np.sum(correct_test.astype(int)) * 100 / len(labels_test);
print 'Test accuracy: ', accuracy_test, '%' | 0.534612 | 0.601974 |
import numpy as np
import pyqtgraph as pg
import time
import csv
import sys
import thorlabs_apt as apt
from PyQt5.Qsci import QsciScintilla, QsciLexerPython
from spyre import Spyrelet, Task, Element
from spyre.widgets.task import TaskWidget
from spyre.plotting import LinePlotWidget
from spyre.widgets.rangespace import Rangespace
from spyre.widgets.param_widget import ParamWidget
from spyre.widgets.repository_widget import RepositoryWidget
from lantz import Q_
import time
from lantz.drivers.gwinstek.g3303s import GPD3303S
from lantz.drivers.thorlabs.pm100d import PM100D
class FiberPulling(Spyrelet):
xs = []
ys = []
requires = {
'gpd': GPD3303S,
'pmd': PM100D
}
@Task()
def readVoltage(self):
print(str(self.gpd.voltage()))
return
@Element()
def setVoltage(self, value):
self.gpd.set_voltage(value)
return
@Element()
def setOutput(self, value):
self.gpd.set_output(value)
return
@Task()
def HardPull(self):
elements = apt.list_available_devices()
serials = [x[1] for x in elements]
serial1 = serials[0]
serial2 = serials[1]
print(elements)
motor1 = apt.Motor(serial1)
motor2 = apt.Motor(serial2)
motor1.move_home()
motor2.move_home(True)
print("homed")
time.sleep(2)
motor1.move_to(50)
motor2.move_to(50, True)
print("ready")
input("Press any key to start pulling")
print("pulling")
motor1.move_velocity(0.2)
motor1.move_to(20)
motor2.move_velocity(0.2)
motor2.move_to(20)
input("Press any key to start stop")
motor1.stop_profiled()
motor2.stop_profiled()
t0 = time.time()
while True:
t1 = time.time()
t = t1 - t0
self.xs.append(t)
self.ys.append(self.pmd.power.magnitude * 1000)
values = {
'x': self.xs,
'y': self.ys,
}
self.HardPull.acquire(values)
sleep(0.5)
if len(xs) < 10:
continue
else:
tail = ys[-10:]
maxi = max(tail)
mini = min(tail)
variance = maxi - mini
if variance < 0.001 and t > 20:
self.gpd.set_voltage(12)
self.gpd.set_output(1)
sleep(2)
self.gpd.set_output(0)
break
return
@Element(name='Histogram')
def averaged(self):
p = LinePlotWidget()
p.plot('Transmission Power')
return p
@averaged.on(HardPull.acquired)
def averaged_update(self, ev):
w = ev.widget
xs = np.array(self.xs)
ys = np.array(self.ys)
w.set('Transmission Power', xs=xs, ys=ys)
return
def initialize(self):
return
def finalize(self):
return | spyre/spyre/spyrelets/fiberpulling_spyrelet.py | import numpy as np
import pyqtgraph as pg
import time
import csv
import sys
import thorlabs_apt as apt
from PyQt5.Qsci import QsciScintilla, QsciLexerPython
from spyre import Spyrelet, Task, Element
from spyre.widgets.task import TaskWidget
from spyre.plotting import LinePlotWidget
from spyre.widgets.rangespace import Rangespace
from spyre.widgets.param_widget import ParamWidget
from spyre.widgets.repository_widget import RepositoryWidget
from lantz import Q_
import time
from lantz.drivers.gwinstek.g3303s import GPD3303S
from lantz.drivers.thorlabs.pm100d import PM100D
class FiberPulling(Spyrelet):
xs = []
ys = []
requires = {
'gpd': GPD3303S,
'pmd': PM100D
}
@Task()
def readVoltage(self):
print(str(self.gpd.voltage()))
return
@Element()
def setVoltage(self, value):
self.gpd.set_voltage(value)
return
@Element()
def setOutput(self, value):
self.gpd.set_output(value)
return
@Task()
def HardPull(self):
elements = apt.list_available_devices()
serials = [x[1] for x in elements]
serial1 = serials[0]
serial2 = serials[1]
print(elements)
motor1 = apt.Motor(serial1)
motor2 = apt.Motor(serial2)
motor1.move_home()
motor2.move_home(True)
print("homed")
time.sleep(2)
motor1.move_to(50)
motor2.move_to(50, True)
print("ready")
input("Press any key to start pulling")
print("pulling")
motor1.move_velocity(0.2)
motor1.move_to(20)
motor2.move_velocity(0.2)
motor2.move_to(20)
input("Press any key to start stop")
motor1.stop_profiled()
motor2.stop_profiled()
t0 = time.time()
while True:
t1 = time.time()
t = t1 - t0
self.xs.append(t)
self.ys.append(self.pmd.power.magnitude * 1000)
values = {
'x': self.xs,
'y': self.ys,
}
self.HardPull.acquire(values)
sleep(0.5)
if len(xs) < 10:
continue
else:
tail = ys[-10:]
maxi = max(tail)
mini = min(tail)
variance = maxi - mini
if variance < 0.001 and t > 20:
self.gpd.set_voltage(12)
self.gpd.set_output(1)
sleep(2)
self.gpd.set_output(0)
break
return
@Element(name='Histogram')
def averaged(self):
p = LinePlotWidget()
p.plot('Transmission Power')
return p
@averaged.on(HardPull.acquired)
def averaged_update(self, ev):
w = ev.widget
xs = np.array(self.xs)
ys = np.array(self.ys)
w.set('Transmission Power', xs=xs, ys=ys)
return
def initialize(self):
return
def finalize(self):
return | 0.222785 | 0.242441 |
import unittest
from pkg_resources import resource_filename
import numpy as np
try:
import fitsio
missing_fitsio = False
except ImportError:
missing_fitsio = True
from desisim import lya_spectra
class TestLya(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.infile = resource_filename('desisim', 'test/data/simpleLyaSpec.fits.gz')
if not missing_fitsio:
fx = fitsio.FITS(cls.infile)
cls.nspec = len(fx) - 1
fx.close()
cls.wavemin = 3550
cls.wavemax = 8000
cls.dwave = 2.0
cls.wave = np.arange(cls.wavemin, cls.wavemax+cls.dwave/2, cls.dwave)
cls.nspec = 5
cls.templateid = [3, 10, 500]
cls.seed = 12311423
#cls.seed = np.random.randint(2**31)
cls.rand = np.random.RandomState(cls.seed)
@unittest.skipIf(missing_fitsio, 'fitsio not installed; skipping lya_spectra tests')
def test_read_lya(self):
flux, wave, meta, objmeta = lya_spectra.get_spectra(self.infile, wave=self.wave, seed=self.seed)
self.assertEqual(flux.shape[0], self.nspec)
self.assertEqual(wave.shape[0], flux.shape[1])
self.assertEqual(len(meta), self.nspec)
self.assertEqual(len(objmeta), self.nspec)
templateid = [0,1,2]
nqso = len(templateid)
flux, wave, meta, objmeta = lya_spectra.get_spectra(self.infile, templateid=templateid,
wave=self.wave, seed=self.seed)
self.assertEqual(flux.shape[0], nqso)
self.assertEqual(wave.shape[0], flux.shape[1])
self.assertEqual(len(meta), nqso)
self.assertEqual(len(objmeta), nqso)
@unittest.skipIf(missing_fitsio, 'fitsio not installed; skipping lya_spectra tests')
def test_read_lya_seed(self):
flux1a, wave1a, meta1a, objmeta1a = lya_spectra.get_spectra(self.infile, wave=self.wave, nqso=3, seed=1)
flux1b, wave1b, meta1b, objmeta1b = lya_spectra.get_spectra(self.infile, wave=self.wave, nqso=3, seed=1)
flux2, wave2, meta2, objmeta2 = lya_spectra.get_spectra(self.infile, wave=self.wave, nqso=3, seed=2)
self.assertTrue(np.all(flux1a == flux1b))
self.assertTrue(np.any(flux1a != flux2))
@unittest.skipIf(missing_fitsio, 'fitsio not installed; skipping lya_spectra tests')
def test_insert_dla(self):
flux, wave, meta, objmeta, dla_meta = lya_spectra.get_spectra(
self.infile, wave=self.wave, seed=self.seed, add_dlas=True)
self.assertEqual(flux.shape[0], self.nspec)
self.assertEqual(wave.shape[0], flux.shape[1])
self.assertEqual(len(meta), self.nspec)
self.assertEqual(len(objmeta), self.nspec)
self.assertGreater(len(dla_meta), 0)
self.assertIn('NHI', dla_meta.keys())
templateid = [0,1,2]
nqso = len(templateid)
flux, wave, meta, objmeta = lya_spectra.get_spectra(self.infile, templateid=templateid,
wave=self.wave, seed=self.seed)
self.assertEqual(flux.shape[0], nqso)
self.assertEqual(wave.shape[0], flux.shape[1])
self.assertEqual(len(meta), nqso)
self.assertEqual(len(objmeta), nqso)
#flux, wave, meta = lya_spectra.get_spectra(self.infile, nqso=nqso, first=2)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main() | py/desisim/test/test_lya.py | import unittest
from pkg_resources import resource_filename
import numpy as np
try:
import fitsio
missing_fitsio = False
except ImportError:
missing_fitsio = True
from desisim import lya_spectra
class TestLya(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.infile = resource_filename('desisim', 'test/data/simpleLyaSpec.fits.gz')
if not missing_fitsio:
fx = fitsio.FITS(cls.infile)
cls.nspec = len(fx) - 1
fx.close()
cls.wavemin = 3550
cls.wavemax = 8000
cls.dwave = 2.0
cls.wave = np.arange(cls.wavemin, cls.wavemax+cls.dwave/2, cls.dwave)
cls.nspec = 5
cls.templateid = [3, 10, 500]
cls.seed = 12311423
#cls.seed = np.random.randint(2**31)
cls.rand = np.random.RandomState(cls.seed)
@unittest.skipIf(missing_fitsio, 'fitsio not installed; skipping lya_spectra tests')
def test_read_lya(self):
flux, wave, meta, objmeta = lya_spectra.get_spectra(self.infile, wave=self.wave, seed=self.seed)
self.assertEqual(flux.shape[0], self.nspec)
self.assertEqual(wave.shape[0], flux.shape[1])
self.assertEqual(len(meta), self.nspec)
self.assertEqual(len(objmeta), self.nspec)
templateid = [0,1,2]
nqso = len(templateid)
flux, wave, meta, objmeta = lya_spectra.get_spectra(self.infile, templateid=templateid,
wave=self.wave, seed=self.seed)
self.assertEqual(flux.shape[0], nqso)
self.assertEqual(wave.shape[0], flux.shape[1])
self.assertEqual(len(meta), nqso)
self.assertEqual(len(objmeta), nqso)
@unittest.skipIf(missing_fitsio, 'fitsio not installed; skipping lya_spectra tests')
def test_read_lya_seed(self):
flux1a, wave1a, meta1a, objmeta1a = lya_spectra.get_spectra(self.infile, wave=self.wave, nqso=3, seed=1)
flux1b, wave1b, meta1b, objmeta1b = lya_spectra.get_spectra(self.infile, wave=self.wave, nqso=3, seed=1)
flux2, wave2, meta2, objmeta2 = lya_spectra.get_spectra(self.infile, wave=self.wave, nqso=3, seed=2)
self.assertTrue(np.all(flux1a == flux1b))
self.assertTrue(np.any(flux1a != flux2))
@unittest.skipIf(missing_fitsio, 'fitsio not installed; skipping lya_spectra tests')
def test_insert_dla(self):
flux, wave, meta, objmeta, dla_meta = lya_spectra.get_spectra(
self.infile, wave=self.wave, seed=self.seed, add_dlas=True)
self.assertEqual(flux.shape[0], self.nspec)
self.assertEqual(wave.shape[0], flux.shape[1])
self.assertEqual(len(meta), self.nspec)
self.assertEqual(len(objmeta), self.nspec)
self.assertGreater(len(dla_meta), 0)
self.assertIn('NHI', dla_meta.keys())
templateid = [0,1,2]
nqso = len(templateid)
flux, wave, meta, objmeta = lya_spectra.get_spectra(self.infile, templateid=templateid,
wave=self.wave, seed=self.seed)
self.assertEqual(flux.shape[0], nqso)
self.assertEqual(wave.shape[0], flux.shape[1])
self.assertEqual(len(meta), nqso)
self.assertEqual(len(objmeta), nqso)
#flux, wave, meta = lya_spectra.get_spectra(self.infile, nqso=nqso, first=2)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main() | 0.561455 | 0.449695 |
import numpy as np
import ref
from .img import Transform
def getPreds(hm):
assert len(hm.shape) == 4, 'Input must be a 4-D tensor'
res = hm.shape[2]
hm = hm.reshape(hm.shape[0], hm.shape[1], hm.shape[2] * hm.shape[3])
idx = np.argmax(hm, axis = 2)
preds = np.zeros((hm.shape[0], hm.shape[1], 2))
for i in range(hm.shape[0]):
for j in range(hm.shape[1]):
preds[i, j, 0], preds[i, j, 1] = idx[i, j] % res, idx[i, j] / res
return preds
def calcDists(preds, gt, normalize):
dists = np.zeros((preds.shape[1], preds.shape[0]))
for i in range(preds.shape[0]):
for j in range(preds.shape[1]):
if gt[i, j, 0] > 0 and gt[i, j, 1] > 0:
dists[j][i] = ((gt[i][j] - preds[i][j]) ** 2).sum() ** 0.5 / normalize[i]
else:
dists[j][i] = -1
return dists
def distAccuracy(dist, thr = 0.5):
dist = dist[dist != -1]
if len(dist) > 0:
return 1.0 * (dist < thr).sum() / len(dist)
else:
return -1
def Accuracy(output, target):
preds = getPreds(output)
gt = getPreds(target)
dists = calcDists(preds, gt, np.ones(preds.shape[0]) * ref.outputRes / 10)
acc = np.zeros(len(ref.accIdxs))
avgAcc = 0
badIdxCount = 0
for i in range(len(ref.accIdxs)):
acc[i] = distAccuracy(dists[ref.accIdxs[i]])
if acc[i] >= 0:
avgAcc = avgAcc + acc[i]
else:
badIdxCount = badIdxCount + 1
if badIdxCount == len(ref.accIdxs):
return 0
else:
return avgAcc / (len(ref.accIdxs) - badIdxCount)
def finalPreds(output, center, scale, rotate):
p = getPreds(output).copy()
hm = output.reshape(output.shape[0], output.shape[1], ref.outputRes, ref.outputRes)
for i in range(hm.shape[0]):
for j in range(hm.shape[1]):
pX, pY = int(p[i, j, 0]), int(p[i, j, 1])
scores = hm[i, j, pX, pY]
if pX > 0 and pX < ref.outputRes - 1 and pY > 0 and pY < ref.outputRes - 1:
diffY = hm[i, j, pX, pY + 1] - hm[i, j, pX, pY - 1]
diffX = hm[i, j, pX + 1, pY] - hm[i, j, pX - 1, pY]
p[i, j, 0] = p[i, j, 0] + 0.25 * (1 if diffX >=0 else -1)
p[i, j, 1] = p[i, j, 1] + 0.25 * (1 if diffY >=0 else -1)
p = p + 0.5
preds = np.zeros((p.shape[0], p.shape[1], 2))
for i in range(p.shape[0]):
for j in range(p.shape[1]):
preds[i, j] = Transform(p[i, j], center[i], scale[i], rotate[i], ref.outputRes, invert = True)
return preds | utils/eval.py | import numpy as np
import ref
from .img import Transform
def getPreds(hm):
assert len(hm.shape) == 4, 'Input must be a 4-D tensor'
res = hm.shape[2]
hm = hm.reshape(hm.shape[0], hm.shape[1], hm.shape[2] * hm.shape[3])
idx = np.argmax(hm, axis = 2)
preds = np.zeros((hm.shape[0], hm.shape[1], 2))
for i in range(hm.shape[0]):
for j in range(hm.shape[1]):
preds[i, j, 0], preds[i, j, 1] = idx[i, j] % res, idx[i, j] / res
return preds
def calcDists(preds, gt, normalize):
dists = np.zeros((preds.shape[1], preds.shape[0]))
for i in range(preds.shape[0]):
for j in range(preds.shape[1]):
if gt[i, j, 0] > 0 and gt[i, j, 1] > 0:
dists[j][i] = ((gt[i][j] - preds[i][j]) ** 2).sum() ** 0.5 / normalize[i]
else:
dists[j][i] = -1
return dists
def distAccuracy(dist, thr = 0.5):
dist = dist[dist != -1]
if len(dist) > 0:
return 1.0 * (dist < thr).sum() / len(dist)
else:
return -1
def Accuracy(output, target):
preds = getPreds(output)
gt = getPreds(target)
dists = calcDists(preds, gt, np.ones(preds.shape[0]) * ref.outputRes / 10)
acc = np.zeros(len(ref.accIdxs))
avgAcc = 0
badIdxCount = 0
for i in range(len(ref.accIdxs)):
acc[i] = distAccuracy(dists[ref.accIdxs[i]])
if acc[i] >= 0:
avgAcc = avgAcc + acc[i]
else:
badIdxCount = badIdxCount + 1
if badIdxCount == len(ref.accIdxs):
return 0
else:
return avgAcc / (len(ref.accIdxs) - badIdxCount)
def finalPreds(output, center, scale, rotate):
p = getPreds(output).copy()
hm = output.reshape(output.shape[0], output.shape[1], ref.outputRes, ref.outputRes)
for i in range(hm.shape[0]):
for j in range(hm.shape[1]):
pX, pY = int(p[i, j, 0]), int(p[i, j, 1])
scores = hm[i, j, pX, pY]
if pX > 0 and pX < ref.outputRes - 1 and pY > 0 and pY < ref.outputRes - 1:
diffY = hm[i, j, pX, pY + 1] - hm[i, j, pX, pY - 1]
diffX = hm[i, j, pX + 1, pY] - hm[i, j, pX - 1, pY]
p[i, j, 0] = p[i, j, 0] + 0.25 * (1 if diffX >=0 else -1)
p[i, j, 1] = p[i, j, 1] + 0.25 * (1 if diffY >=0 else -1)
p = p + 0.5
preds = np.zeros((p.shape[0], p.shape[1], 2))
for i in range(p.shape[0]):
for j in range(p.shape[1]):
preds[i, j] = Transform(p[i, j], center[i], scale[i], rotate[i], ref.outputRes, invert = True)
return preds | 0.356671 | 0.634685 |
import requests
from ..classes import Champion
class DataDragonAPI:
def __init__(self):
self.latest = self.get_versions()[0]
def get_versions(self):
"""
Get a list of all versions.
:rtype: List[str]
"""
list = requests.get('https://ddragon.leagueoflegends.com/api/versions.json').json()
return list
def get_languages(self):
"""
Get a list of all languages.
:rtype: List[str]
"""
list = requests.get('https://ddragon.leagueoflegends.com/cdn/languages.json').json()
return list
def get_champions_list(self, version: str = None, language: str = 'en_US'):
"""
Get a dictionary containing each champion's ID, key and name.
:param str version: League version
:param str language: League language
The syntax for this dictionary is as follows:
.. code-block:: python
{champion_id (int): {'key': champion_key (str), 'name':champion_name (str)}, ...}
"""
if not version:
version = self.latest
champions_dict_raw = requests.get(f'http://ddragon.leagueoflegends.com/cdn/{version}/data/{language}/champion.json').json()['data']
champions_dict = {int(champ['key']): {"key": champ['id'], "name": champ['name']} for champ in champions_dict_raw.values()}
return champions_dict
def get_champion_from_id(self, id: int, version: str = None, language: str = 'en_US'):
"""
Get the :class:`~riot_apy.classes.Champion` given its ID.
:param int id: Champion ID
:param str version: League version
:param str language: League language
:rtype: Champion
"""
if not version:
version = self.latest
key = self.get_champions_list(version=version, language=language)[id]['key']
raw = requests.get(f'http://ddragon.leagueoflegends.com/cdn/{version}/data/{language}/champion/{key}.json').json()['data'][key]
return Champion(raw) | riot_apy/apis/DataDragonAPI.py | import requests
from ..classes import Champion
class DataDragonAPI:
def __init__(self):
self.latest = self.get_versions()[0]
def get_versions(self):
"""
Get a list of all versions.
:rtype: List[str]
"""
list = requests.get('https://ddragon.leagueoflegends.com/api/versions.json').json()
return list
def get_languages(self):
"""
Get a list of all languages.
:rtype: List[str]
"""
list = requests.get('https://ddragon.leagueoflegends.com/cdn/languages.json').json()
return list
def get_champions_list(self, version: str = None, language: str = 'en_US'):
"""
Get a dictionary containing each champion's ID, key and name.
:param str version: League version
:param str language: League language
The syntax for this dictionary is as follows:
.. code-block:: python
{champion_id (int): {'key': champion_key (str), 'name':champion_name (str)}, ...}
"""
if not version:
version = self.latest
champions_dict_raw = requests.get(f'http://ddragon.leagueoflegends.com/cdn/{version}/data/{language}/champion.json').json()['data']
champions_dict = {int(champ['key']): {"key": champ['id'], "name": champ['name']} for champ in champions_dict_raw.values()}
return champions_dict
def get_champion_from_id(self, id: int, version: str = None, language: str = 'en_US'):
"""
Get the :class:`~riot_apy.classes.Champion` given its ID.
:param int id: Champion ID
:param str version: League version
:param str language: League language
:rtype: Champion
"""
if not version:
version = self.latest
key = self.get_champions_list(version=version, language=language)[id]['key']
raw = requests.get(f'http://ddragon.leagueoflegends.com/cdn/{version}/data/{language}/champion/{key}.json').json()['data'][key]
return Champion(raw) | 0.611034 | 0.2227 |
AppId = "8c6cc7b45d2568fb668be6e05b6e5a3b"
# locale parameter(url postfix)
LocaleParam = "&gcc=KR&locale=ko_KR"
PlatformPCParam = "&platformType=PC"
# API: Post Info API
# APIPostUrl("POST-ID"): str
# APIPostReferer("POST-ID"): dict
def APIPostUrl(post):
return "https://www.vlive.tv/globalv-web/vam-web/post/v1.0/post-%s?" \
"appId=%s&fields=title,attachments,officialVideo%s" \
% (post, AppId, LocaleParam)
def APIPostReferer(post):
return {"Referer": "https://www.vlive.tv/post/%s" % post}
# API: Get user session (sign-in)
# APISignInUrl: str
# APISignInReferer: dict
APISignInUrl = "https://www.vlive.tv/auth/email/login"
APISignInReferer = {'Referer': 'https://www.vlive.tv/auth/email/login'}
def APIInkeyUrl(videoSeq):
return ("https://www.vlive.tv/globalv-web/vam-web/video/v1.0/vod/%s/inkey?appId=%s%s%s" %
(videoSeq, AppId, LocaleParam, PlatformPCParam))
# API: officialVideoPost
def APIofficialVideoPostUrl(videoSeq):
return ("https://www.vlive.tv/globalv-web/vam-web/post/v1.0/officialVideoPost-"
"%s?appId=%s&fields=attachments,author,authorId,availableActions,"
"board{boardId,title,boardType,readAllowedLabel,payRequired,"
"includedCountries,excludedCountries},boardId,body,channel{channelName,channelCode},"
"channelCode,commentCount,contentType,createdAt,emotionCount,excludedCountries,"
"includedCountries,isViewerBookmarked,isCommentEnabled,isHiddenFromStar,lastModifierMember,"
"notice,officialVideo,originPost,plainBody,postId,postVersion,reservation,starReactions,"
"targetMember,targetMemberId,thumbnail,title,url,smartEditorAsHtml,viewerEmotionId,"
"writtenIn"
"%s" % (videoSeq, AppId, LocaleParam))
def APIofficialVideoPostReferer(videoSeq):
return {"referer": "https://www.vlive.tv/video/%s" % videoSeq}
def APILiveV3PlayInfoUrl(videoSeq):
# Optional: vpdid2
return ("https://www.vlive.tv/globalv-web/vam-web/old/v3/live/%s/playInfo?appId=%s%s%s" %
(videoSeq, AppId, PlatformPCParam, LocaleParam))
def APILiveV2StatusUrl(videoSeq):
return ("https://www.vlive.tv/globalv-web/vam-web/old/v2/live/%s/status?appId=%s%s" %
(videoSeq, AppId, LocaleParam))
def APIVodPlayInfoUrl(vodId, inkey):
return "https://apis.naver.com/rmcnmv/rmcnmv/vod/play/v2.0/%s?key=%s&videoId=%s" % (vodId, inkey, vodId)
APIVodPlayInfoReferer = {"referer": "https://www.vlive.tv/"}
# User-Agent header for requests module
HeaderUserAgent = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/87.0.4280.88 Safari/537.36"}
# Accept-Language header for requests module
HeaderAcceptLang = {"Accept-Language": "ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7"}
# Header for common use
HeaderCommon = {**HeaderUserAgent, **HeaderAcceptLang} | vlivepy/variables.py | AppId = "8c6cc7b45d2568fb668be6e05b6e5a3b"
# locale parameter(url postfix)
LocaleParam = "&gcc=KR&locale=ko_KR"
PlatformPCParam = "&platformType=PC"
# API: Post Info API
# APIPostUrl("POST-ID"): str
# APIPostReferer("POST-ID"): dict
def APIPostUrl(post):
return "https://www.vlive.tv/globalv-web/vam-web/post/v1.0/post-%s?" \
"appId=%s&fields=title,attachments,officialVideo%s" \
% (post, AppId, LocaleParam)
def APIPostReferer(post):
return {"Referer": "https://www.vlive.tv/post/%s" % post}
# API: Get user session (sign-in)
# APISignInUrl: str
# APISignInReferer: dict
APISignInUrl = "https://www.vlive.tv/auth/email/login"
APISignInReferer = {'Referer': 'https://www.vlive.tv/auth/email/login'}
def APIInkeyUrl(videoSeq):
return ("https://www.vlive.tv/globalv-web/vam-web/video/v1.0/vod/%s/inkey?appId=%s%s%s" %
(videoSeq, AppId, LocaleParam, PlatformPCParam))
# API: officialVideoPost
def APIofficialVideoPostUrl(videoSeq):
return ("https://www.vlive.tv/globalv-web/vam-web/post/v1.0/officialVideoPost-"
"%s?appId=%s&fields=attachments,author,authorId,availableActions,"
"board{boardId,title,boardType,readAllowedLabel,payRequired,"
"includedCountries,excludedCountries},boardId,body,channel{channelName,channelCode},"
"channelCode,commentCount,contentType,createdAt,emotionCount,excludedCountries,"
"includedCountries,isViewerBookmarked,isCommentEnabled,isHiddenFromStar,lastModifierMember,"
"notice,officialVideo,originPost,plainBody,postId,postVersion,reservation,starReactions,"
"targetMember,targetMemberId,thumbnail,title,url,smartEditorAsHtml,viewerEmotionId,"
"writtenIn"
"%s" % (videoSeq, AppId, LocaleParam))
def APIofficialVideoPostReferer(videoSeq):
return {"referer": "https://www.vlive.tv/video/%s" % videoSeq}
def APILiveV3PlayInfoUrl(videoSeq):
# Optional: vpdid2
return ("https://www.vlive.tv/globalv-web/vam-web/old/v3/live/%s/playInfo?appId=%s%s%s" %
(videoSeq, AppId, PlatformPCParam, LocaleParam))
def APILiveV2StatusUrl(videoSeq):
return ("https://www.vlive.tv/globalv-web/vam-web/old/v2/live/%s/status?appId=%s%s" %
(videoSeq, AppId, LocaleParam))
def APIVodPlayInfoUrl(vodId, inkey):
return "https://apis.naver.com/rmcnmv/rmcnmv/vod/play/v2.0/%s?key=%s&videoId=%s" % (vodId, inkey, vodId)
APIVodPlayInfoReferer = {"referer": "https://www.vlive.tv/"}
# User-Agent header for requests module
HeaderUserAgent = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/87.0.4280.88 Safari/537.36"}
# Accept-Language header for requests module
HeaderAcceptLang = {"Accept-Language": "ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7"}
# Header for common use
HeaderCommon = {**HeaderUserAgent, **HeaderAcceptLang} | 0.417271 | 0.117826 |
import json
import os
import time
from traceback import print_exc
from typing import TypedDict, cast
import click
from flask import Flask, redirect, url_for, session
from flask.cli import with_appcontext
from flask.wrappers import Response
from flask_dance.contrib.google import make_google_blueprint
from flask_dance.consumer import oauth_authorized, oauth_before_login, oauth_error
from flask_dance.consumer.oauth2 import OAuth2ConsumerBlueprint
from flask_dance.consumer.storage.sqla import OAuthConsumerMixin, SQLAlchemyStorage
import flask_sqlalchemy as fsql
from flask_sqlalchemy import SQLAlchemy
from flask_security import (
UserMixin, RoleMixin, SQLAlchemyUserDatastore, Security, current_user, login_user
)
from requests import Session
from sqlalchemy.orm.exc import NoResultFound
def login(self):
print('In overridden login method')
self.session.redirect_uri = url_for(".authorized", _external=True)
url, state = self.session.authorization_url(
self.authorization_url, state=self.state, **self.authorization_url_params
)
state_key = f"{self.name}_oauth_state"
session[state_key] = state
oauth_before_login.send(self, url=url)
return {'url': url, 'state': state}
OAuth2ConsumerBlueprint.login = login
C = fsql.sqlalchemy.Column
Int = fsql.sqlalchemy.Integer
Str = fsql.sqlalchemy.String
Bool = fsql.sqlalchemy.Boolean
DT = fsql.sqlalchemy.DateTime
FK = fsql.sqlalchemy.ForeignKey
Rel = fsql.sqlalchemy.orm.RelationshipProperty
db = SQLAlchemy()
roles_users: fsql.sqlalchemy.Table = db.Table(
'roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id'))
)
class Role(db.Model, RoleMixin):
id: C[Int] = db.Column(db.Integer(), primary_key=True)
name: C[Str] = db.Column(db.String(80), unique=True)
description: C[Str] = db.Column(db.String(255))
class User(UserMixin, db.Model):
id: C[Int] = db.Column(db.Integer, primary_key=True)
email: C[Str] = db.Column(db.String(255), unique=True)
password: C[Str] = db.Column(db.String(255))
active: C[Bool] = db.Column(db.Boolean())
confirmed_at: C[DT] = db.Column(db.DateTime())
roles = db.relationship(
Role, secondary=roles_users, backref=db.backref('users', lazy='dynamic')
)
class OAuth(OAuthConsumerMixin, db.Model):
provider_user_id: C[Str] = db.Column(db.String(256), unique=True, nullable=False)
user_id: C[Int] = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
user: Rel = db.relationship(User)
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
security = Security(datastore=user_datastore)
class GoogleWebCredentials(TypedDict):
client_id: str
project_id: str
auth_uri: str
token_uri: str
auth_provider_x509_cert_url: str
client_secret: str
redirect_uris: list[str]
class GoogleCredentials(TypedDict):
web: GoogleWebCredentials
google: Session
app = Flask(__name__)
with open('./client_secrets.json', 'rb') as f:
creds: GoogleCredentials = json.load(f)
class Config(object):
SECRET_KEY = os.getenv('FLASK_SECRET_KEY') or 'asdf'
SQLALCHEMY_DATABASE_URI = 'sqlite:///app.sqlite3'
SQLALCHEMY_TRACK_MODIFICATIONS = False
GOOGLE_OAUTH_CLIENT_ID = creds['web']['client_id']
GOOGLE_OAUTH_CLIENT_SECRET = creds['web']['client_secret']
OAUTHLIB_RELAX_TOKEN_SCOPE = True
OAUTHLIB_INSECURE_TRANSPORT = True
DEBUG = True
blueprint = make_google_blueprint(
scope=['profile', 'email', 'https://www.googleapis.com/auth/youtube.force-ssl'],
storage=SQLAlchemyStorage(OAuth, db.session, user=current_user)
)
@click.command(name='createdb')
@with_appcontext
def create_db():
try:
db.create_all()
db.session.commit()
print("Database tables created")
except Exception:
print_exc()
print("Database tables already created or something went wrong")
@oauth_authorized.connect_via(blueprint)
def google_logged_in(blueprint, token):
if not token:
return False
print(token)
resp = blueprint.session.get("/oauth2/v1/userinfo")
if not resp.ok:
return False
info = resp.json()
user_id = info["id"]
# Find this OAuth token in the database, or create it
query = OAuth.query.filter_by(provider=blueprint.name, provider_user_id=user_id)
try:
oauth = query.one()
except NoResultFound:
oauth = OAuth(provider=blueprint.name, provider_user_id=user_id, token=token)
if oauth.user:
login_user(oauth.user)
else:
# Create a new local user account for this user
user = User(email=info["email"], active=True)
# Associate the new local user account with the OAuth token
oauth.user = user
# Save and commit our database models
db.session.add_all([user, oauth])
db.session.commit()
# Log in the new local user account
login_user(user)
# Disable Flask-Dance's default behavior for saving the OAuth token
return False
app.config.from_object(Config)
app.register_blueprint(blueprint, url_prefix='/ytsp')
app.cli.add_command(create_db)
db.init_app(app)
security.init_app(app, user_datastore)
@app.route("/")
def index():
if current_user.is_authenticated:
token = OAuth.query.filter_by(user_id=current_user.id).one().token
if token['expires_at'] > time.time():
return token
return redirect(url_for('google.login'))
app.run() | server.py | import json
import os
import time
from traceback import print_exc
from typing import TypedDict, cast
import click
from flask import Flask, redirect, url_for, session
from flask.cli import with_appcontext
from flask.wrappers import Response
from flask_dance.contrib.google import make_google_blueprint
from flask_dance.consumer import oauth_authorized, oauth_before_login, oauth_error
from flask_dance.consumer.oauth2 import OAuth2ConsumerBlueprint
from flask_dance.consumer.storage.sqla import OAuthConsumerMixin, SQLAlchemyStorage
import flask_sqlalchemy as fsql
from flask_sqlalchemy import SQLAlchemy
from flask_security import (
UserMixin, RoleMixin, SQLAlchemyUserDatastore, Security, current_user, login_user
)
from requests import Session
from sqlalchemy.orm.exc import NoResultFound
def login(self):
print('In overridden login method')
self.session.redirect_uri = url_for(".authorized", _external=True)
url, state = self.session.authorization_url(
self.authorization_url, state=self.state, **self.authorization_url_params
)
state_key = f"{self.name}_oauth_state"
session[state_key] = state
oauth_before_login.send(self, url=url)
return {'url': url, 'state': state}
OAuth2ConsumerBlueprint.login = login
C = fsql.sqlalchemy.Column
Int = fsql.sqlalchemy.Integer
Str = fsql.sqlalchemy.String
Bool = fsql.sqlalchemy.Boolean
DT = fsql.sqlalchemy.DateTime
FK = fsql.sqlalchemy.ForeignKey
Rel = fsql.sqlalchemy.orm.RelationshipProperty
db = SQLAlchemy()
roles_users: fsql.sqlalchemy.Table = db.Table(
'roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id'))
)
class Role(db.Model, RoleMixin):
id: C[Int] = db.Column(db.Integer(), primary_key=True)
name: C[Str] = db.Column(db.String(80), unique=True)
description: C[Str] = db.Column(db.String(255))
class User(UserMixin, db.Model):
id: C[Int] = db.Column(db.Integer, primary_key=True)
email: C[Str] = db.Column(db.String(255), unique=True)
password: C[Str] = db.Column(db.String(255))
active: C[Bool] = db.Column(db.Boolean())
confirmed_at: C[DT] = db.Column(db.DateTime())
roles = db.relationship(
Role, secondary=roles_users, backref=db.backref('users', lazy='dynamic')
)
class OAuth(OAuthConsumerMixin, db.Model):
provider_user_id: C[Str] = db.Column(db.String(256), unique=True, nullable=False)
user_id: C[Int] = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
user: Rel = db.relationship(User)
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
security = Security(datastore=user_datastore)
class GoogleWebCredentials(TypedDict):
client_id: str
project_id: str
auth_uri: str
token_uri: str
auth_provider_x509_cert_url: str
client_secret: str
redirect_uris: list[str]
class GoogleCredentials(TypedDict):
web: GoogleWebCredentials
google: Session
app = Flask(__name__)
with open('./client_secrets.json', 'rb') as f:
creds: GoogleCredentials = json.load(f)
class Config(object):
SECRET_KEY = os.getenv('FLASK_SECRET_KEY') or 'asdf'
SQLALCHEMY_DATABASE_URI = 'sqlite:///app.sqlite3'
SQLALCHEMY_TRACK_MODIFICATIONS = False
GOOGLE_OAUTH_CLIENT_ID = creds['web']['client_id']
GOOGLE_OAUTH_CLIENT_SECRET = creds['web']['client_secret']
OAUTHLIB_RELAX_TOKEN_SCOPE = True
OAUTHLIB_INSECURE_TRANSPORT = True
DEBUG = True
blueprint = make_google_blueprint(
scope=['profile', 'email', 'https://www.googleapis.com/auth/youtube.force-ssl'],
storage=SQLAlchemyStorage(OAuth, db.session, user=current_user)
)
@click.command(name='createdb')
@with_appcontext
def create_db():
try:
db.create_all()
db.session.commit()
print("Database tables created")
except Exception:
print_exc()
print("Database tables already created or something went wrong")
@oauth_authorized.connect_via(blueprint)
def google_logged_in(blueprint, token):
if not token:
return False
print(token)
resp = blueprint.session.get("/oauth2/v1/userinfo")
if not resp.ok:
return False
info = resp.json()
user_id = info["id"]
# Find this OAuth token in the database, or create it
query = OAuth.query.filter_by(provider=blueprint.name, provider_user_id=user_id)
try:
oauth = query.one()
except NoResultFound:
oauth = OAuth(provider=blueprint.name, provider_user_id=user_id, token=token)
if oauth.user:
login_user(oauth.user)
else:
# Create a new local user account for this user
user = User(email=info["email"], active=True)
# Associate the new local user account with the OAuth token
oauth.user = user
# Save and commit our database models
db.session.add_all([user, oauth])
db.session.commit()
# Log in the new local user account
login_user(user)
# Disable Flask-Dance's default behavior for saving the OAuth token
return False
app.config.from_object(Config)
app.register_blueprint(blueprint, url_prefix='/ytsp')
app.cli.add_command(create_db)
db.init_app(app)
security.init_app(app, user_datastore)
@app.route("/")
def index():
if current_user.is_authenticated:
token = OAuth.query.filter_by(user_id=current_user.id).one().token
if token['expires_at'] > time.time():
return token
return redirect(url_for('google.login'))
app.run() | 0.423577 | 0.054651 |
from abc import ABC, abstractmethod
from functools import lru_cache
from typing import Iterator
class Team(ABC):
"""Abstract interface for some teams."""
pass
class Teams(ABC):
"""Abstract interface for some teams."""
@abstractmethod
def __next__(self) -> Team:
pass
@abstractmethod
def __iter__(self) -> Iterator[Team]:
pass
class Atlanta(Team):
"""Represent `Atlanta Hawks` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'ATL'
team_id: str = '1610612737'
nick_name: str = 'Hawks'
url_name: str = 'hawks'
class Boston(Team):
"""Represent `Boston Celtics` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'BOS'
team_id: str = '1610612738'
nick_name: str = 'Celtics'
url_name: str = 'celtics'
class Brooklyn(Team):
"""Represent `Brooklyn Nets` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'BKN'
team_id: str = '1610612751'
nick_name: str = 'Brooklyn'
url_name: str = 'nets'
class Charlotte(Team):
"""Represent `Charlotte Hornets` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'CHA'
team_id: str = '1610612766'
nick_name: str = 'Hornets'
url_name: str = 'hornets'
class Chicago(Team):
"""Represent `Chicago Bulls` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'CHI'
team_id: str = '1610612741'
nick_name: str = 'Bulls'
url_name: str = 'bulls'
class Cleveland(Team):
"""Represent `Cleveland Cavaliers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'CLE'
team_id: str = '1610612739'
nick_name: str = 'Cavaliers'
url_name: str = 'cavaliers'
class Dallas(Team):
"""Represent `Dallas Mavericks` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'DAL'
team_id: str = '1610612742'
nick_name: str = 'Mavericks'
url_name: str = 'mavericks'
class Denver(Team):
"""Represent `Denver Nuggets` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'DEN'
team_id: str = '1610612743'
nick_name: str = 'Nuggets'
url_name: str = 'nuggets'
class Detroit(Team):
"""Represent `Detroit Pistons` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'DET'
team_id: str = '1610612765'
nick_name: str = 'Pistons'
url_name: str = 'pistons'
class GoldenState(Team):
"""Represent `Golden State` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'GSW'
team_id: str = '1610612744'
nick_name: str = 'Warriors'
url_name: str = 'warriors'
class Houston(Team):
"""Represent `Houston Rockets` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'HOU'
team_id: str = '1610612745'
nick_name: str = 'Rockets'
url_name: str = 'rockets'
class Indiana(Team):
"""Represent `Indiana Pacers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'IND'
team_id: str = '1610612754'
nick_name: str = 'Pacers'
url_name: str = 'pacers'
class Clippers(Team):
"""Represent `LA Clippers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'LAC'
team_id: str = '1610612746'
nick_name: str = 'Clippers'
url_name: str = 'clippers'
class Lakers(Team):
"""Represent `Los Angeles Lakers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'LAL'
team_id: str = '1610612747'
nick_name: str = 'Lakers'
url_name: str = 'lakers'
class Memphis(Team):
"""Represent `Memphis Grizzlies` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'MEM'
team_id: str = '1610612763'
nick_name: str = 'Grizzlies'
url_name: str = 'grizzlies'
class Miami(Team):
"""Represent `Miami Heat` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'MIA'
team_id: str = '1610612748'
nick_name: str = 'Heat'
url_name: str = 'heat'
class Milwaukee(Team):
"""Represent `Milwaukee Bucks` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'MIL'
team_id: str = '1610612749'
nick_name: str = 'Bucks'
url_name: str = 'bucks'
class Minnesota(Team):
"""Represent `Minnesota Timberwolves` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'MIN'
team_id: str = '1610612750'
nick_name: str = 'Timberwolves'
url_name: str = 'timberwolves'
class NewOrleans(Team):
"""Represent `New Orleans Pelicans` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'NOP'
team_id: str = '1610612740'
nick_name: str = 'Pelicans'
url_name: str = 'pelicans'
class NewYork(Team):
"""Represent `New York Knicks` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'NYK'
team_id: str = '1610612752'
nick_name: str = 'Knicks'
url_name: str = 'knicks'
class OklahomaCity(Team):
"""Represent `Oklahoma City` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'OKC'
team_id: str = '1610612760'
nick_name: str = 'Thunder'
url_name: str = 'thunder'
class Orlando(Team):
"""Represent `Orlando Magic` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'ORL'
team_id: str = '1610612753'
nick_name: str = 'Magic'
url_name: str = 'magic'
class Philadelphia(Team):
"""Represent `Philadelphia 76ers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'PHI'
team_id: str = '1610612755'
nick_name: str = '76ers'
url_name: str = 'sixers'
class Phoenix(Team):
"""Represent `Phoenix Suns` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'PHX'
team_id: str = '1610612756'
nick_name: str = 'Suns'
url_name: str = 'suns'
class Portland(Team):
"""Represent `Portland Trail Blazers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'POR'
team_id: str = '1610612757'
nick_name: str = '<NAME>'
url_name: str = 'blazers'
class Sacramento(Team):
"""Represent `Sacramento Kings` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'SAC'
team_id: str = '1610612758'
nick_name: str = 'Kings'
url_name: str = 'kings'
class SanAntonio(Team):
"""Represent `San Antonio Spurs` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'SAS'
team_id: str = '1610612759'
nick_name: str = 'Spurs'
url_name: str = 'spurs'
class Toronto(Team):
"""Represent `Toronto Raptors` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'TOR'
team_id: str = '1610612761'
nick_name: str = 'Raptors'
url_name: str = 'raptors'
class Utah(Team):
"""Represent `Utah Jazz` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'UTA'
team_id: str = '1610612762'
nick_name: str = 'Jazz'
url_name: str = 'jazz'
class Washington(Team):
"""Represent `Washington Wizards` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'WAS'
team_id: str = '1610612764'
nick_name: str = 'Wizards'
url_name: str = 'wizards'
class NbaTeams(Teams):
"""Concrete interface for nba teams."""
def __init__(self) -> None:
@lru_cache()
def teams() -> Iterator[Team]:
yield from (
Atlanta, Boston, Brooklyn, Charlotte, Chicago, Cleveland, Dallas, Denver, Detroit,
GoldenState, Houston, Indiana, Clippers, Lakers, Memphis, Milwaukee, Minnesota,
NewOrleans, NewYork, OklahomaCity, Orlando, Philadelphia, Phoenix, Portland,
Sacramento, SanAntonio, Toronto, Utah, Washington
)
self._teams = teams
def __next__(self) -> Team:
return next(self._teams())
def __iter__(self) -> Iterator[Team]:
return self | stats/league/teams.py | from abc import ABC, abstractmethod
from functools import lru_cache
from typing import Iterator
class Team(ABC):
"""Abstract interface for some teams."""
pass
class Teams(ABC):
"""Abstract interface for some teams."""
@abstractmethod
def __next__(self) -> Team:
pass
@abstractmethod
def __iter__(self) -> Iterator[Team]:
pass
class Atlanta(Team):
"""Represent `Atlanta Hawks` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'ATL'
team_id: str = '1610612737'
nick_name: str = 'Hawks'
url_name: str = 'hawks'
class Boston(Team):
"""Represent `Boston Celtics` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'BOS'
team_id: str = '1610612738'
nick_name: str = 'Celtics'
url_name: str = 'celtics'
class Brooklyn(Team):
"""Represent `Brooklyn Nets` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'BKN'
team_id: str = '1610612751'
nick_name: str = 'Brooklyn'
url_name: str = 'nets'
class Charlotte(Team):
"""Represent `Charlotte Hornets` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'CHA'
team_id: str = '1610612766'
nick_name: str = 'Hornets'
url_name: str = 'hornets'
class Chicago(Team):
"""Represent `Chicago Bulls` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'CHI'
team_id: str = '1610612741'
nick_name: str = 'Bulls'
url_name: str = 'bulls'
class Cleveland(Team):
"""Represent `Cleveland Cavaliers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'CLE'
team_id: str = '1610612739'
nick_name: str = 'Cavaliers'
url_name: str = 'cavaliers'
class Dallas(Team):
"""Represent `Dallas Mavericks` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'DAL'
team_id: str = '1610612742'
nick_name: str = 'Mavericks'
url_name: str = 'mavericks'
class Denver(Team):
"""Represent `Denver Nuggets` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'DEN'
team_id: str = '1610612743'
nick_name: str = 'Nuggets'
url_name: str = 'nuggets'
class Detroit(Team):
"""Represent `Detroit Pistons` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'DET'
team_id: str = '1610612765'
nick_name: str = 'Pistons'
url_name: str = 'pistons'
class GoldenState(Team):
"""Represent `Golden State` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'GSW'
team_id: str = '1610612744'
nick_name: str = 'Warriors'
url_name: str = 'warriors'
class Houston(Team):
"""Represent `Houston Rockets` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'HOU'
team_id: str = '1610612745'
nick_name: str = 'Rockets'
url_name: str = 'rockets'
class Indiana(Team):
"""Represent `Indiana Pacers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'IND'
team_id: str = '1610612754'
nick_name: str = 'Pacers'
url_name: str = 'pacers'
class Clippers(Team):
"""Represent `LA Clippers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'LAC'
team_id: str = '1610612746'
nick_name: str = 'Clippers'
url_name: str = 'clippers'
class Lakers(Team):
"""Represent `Los Angeles Lakers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'LAL'
team_id: str = '1610612747'
nick_name: str = 'Lakers'
url_name: str = 'lakers'
class Memphis(Team):
"""Represent `Memphis Grizzlies` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'MEM'
team_id: str = '1610612763'
nick_name: str = 'Grizzlies'
url_name: str = 'grizzlies'
class Miami(Team):
"""Represent `Miami Heat` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'MIA'
team_id: str = '1610612748'
nick_name: str = 'Heat'
url_name: str = 'heat'
class Milwaukee(Team):
"""Represent `Milwaukee Bucks` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'MIL'
team_id: str = '1610612749'
nick_name: str = 'Bucks'
url_name: str = 'bucks'
class Minnesota(Team):
"""Represent `Minnesota Timberwolves` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'MIN'
team_id: str = '1610612750'
nick_name: str = 'Timberwolves'
url_name: str = 'timberwolves'
class NewOrleans(Team):
"""Represent `New Orleans Pelicans` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'NOP'
team_id: str = '1610612740'
nick_name: str = 'Pelicans'
url_name: str = 'pelicans'
class NewYork(Team):
"""Represent `New York Knicks` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'NYK'
team_id: str = '1610612752'
nick_name: str = 'Knicks'
url_name: str = 'knicks'
class OklahomaCity(Team):
"""Represent `Oklahoma City` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'OKC'
team_id: str = '1610612760'
nick_name: str = 'Thunder'
url_name: str = 'thunder'
class Orlando(Team):
"""Represent `Orlando Magic` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'ORL'
team_id: str = '1610612753'
nick_name: str = 'Magic'
url_name: str = 'magic'
class Philadelphia(Team):
"""Represent `Philadelphia 76ers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'PHI'
team_id: str = '1610612755'
nick_name: str = '76ers'
url_name: str = 'sixers'
class Phoenix(Team):
"""Represent `Phoenix Suns` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'PHX'
team_id: str = '1610612756'
nick_name: str = 'Suns'
url_name: str = 'suns'
class Portland(Team):
"""Represent `Portland Trail Blazers` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'POR'
team_id: str = '1610612757'
nick_name: str = '<NAME>'
url_name: str = 'blazers'
class Sacramento(Team):
"""Represent `Sacramento Kings` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'SAC'
team_id: str = '1610612758'
nick_name: str = 'Kings'
url_name: str = 'kings'
class SanAntonio(Team):
"""Represent `San Antonio Spurs` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'SAS'
team_id: str = '1610612759'
nick_name: str = 'Spurs'
url_name: str = 'spurs'
class Toronto(Team):
"""Represent `Toronto Raptors` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'TOR'
team_id: str = '1610612761'
nick_name: str = 'Raptors'
url_name: str = 'raptors'
class Utah(Team):
"""Represent `Utah Jazz` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'UTA'
team_id: str = '1610612762'
nick_name: str = 'Jazz'
url_name: str = 'jazz'
class Washington(Team):
"""Represent `Washington Wizards` nba team."""
full_name: str = '<NAME>'
tri_code: str = 'WAS'
team_id: str = '1610612764'
nick_name: str = 'Wizards'
url_name: str = 'wizards'
class NbaTeams(Teams):
"""Concrete interface for nba teams."""
def __init__(self) -> None:
@lru_cache()
def teams() -> Iterator[Team]:
yield from (
Atlanta, Boston, Brooklyn, Charlotte, Chicago, Cleveland, Dallas, Denver, Detroit,
GoldenState, Houston, Indiana, Clippers, Lakers, Memphis, Milwaukee, Minnesota,
NewOrleans, NewYork, OklahomaCity, Orlando, Philadelphia, Phoenix, Portland,
Sacramento, SanAntonio, Toronto, Utah, Washington
)
self._teams = teams
def __next__(self) -> Team:
return next(self._teams())
def __iter__(self) -> Iterator[Team]:
return self | 0.872863 | 0.128635 |
import serial
import time
import socket
import struct
import msvcrt
ser = serial.Serial('com7', 9600, timeout = 0.5)
UDP_IP = "10.6.3.1"
UDP_PORT = 5005
#sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#sock.bind((UDP_IP, UDP_PORT))
#sock.listen(1)
#conn, addr = sock.accept()
#print "Connected by: ", addr
def send_full_data(Name, arg1, arg2, arg3):
while True:
ser.write("%s\n" % Name)
print Name
time.sleep(.5)
incoming = ser.readline().strip()
if incoming == "Go":
print "writing arg1"
time.sleep(1)
ser.write("%s\n" % arg1)
time.sleep(1)
becoming = ser.readline().strip()
print "should receive"
print becoming
if becoming == "Received arg1":
print "writing arg2"
time.sleep(1)
ser.write("%s\n" % arg2)
time.sleep(1)
#print "I got to here"
becoming = ser.readline().strip()
print becoming
if becoming == "Received arg2":
print "writing arg3"
time.sleep(1)
ser.write("%s\n" % arg3)
#print "I got to here"
time.sleep(1)
becoming = ser.readline().strip()
print becoming
if becoming == "Received arg3":
break
print "woohoo"
break
def rec_full_data(Name):
while True:
ser.write("%s\n" % Name)
time.sleep(1)
if ser.readline().strip() == Name:
time.sleep(.5)
ser.write("Go\n")
print "ready for arg1"
time.sleep(1)
incoming = ser.readline().strip()
while True:
try:
incoming = ser.readline().strip()
print "incoming is: %s" % incoming
arg1 = float(incoming)
time.sleep(.5)
ser.write("Received arg1\n")
print "Arg1 worked!"
break
except ValueError, e:
print "error", e
time.sleep(1)
#ready for arg2
time.sleep(.5)
incoming = ser.readline().strip()
print incoming
while True:
try:
incoming = ser.readline().strip()
print "incoming is: %s" % incoming
arg2 = float(incoming)
time.sleep(.5)
ser.write("Received arg2\n")
print "Arg2 worked!"
break
except ValueError, e:
print "error", e
time.sleep(1)
#ready for arg3
time.sleep(.5)
incoming = ser.readline().strip()
print incoming
while True:
try:
incoming = ser.readline().strip()
print "incoming is: %s" % incoming
arg3 = float(incoming)
time.sleep(.5)
ser.write("Received arg3\n")
print "Arg3 worked!"
break
except ValueError, e:
print "error", e
time.sleep(1)
incoming = ser.readline().strip()
print "incoming is: %s" % incoming
return Name, arg1, arg2, arg3
#How to receive chars thru XBee
def rec_key(Name):
while True:
ser.write("%s\n" % Name)
time.sleep(1)
if ser.readline().strip() == Name:
time.sleep(.5)
print Name
ser.write("Go\n")
print "ready for arg1"
time.sleep(.5)
incoming = ser.readline().strip()
print "incoming is: %s" % incoming
while True:
if incoming == Name:
time.sleep(.5)
print "still failing"
bc = ser.readline().strip()
if bc != Name:
arg1 = bc
print "got it"
ser.write("Received arg1\n")
break
else:
arg1 = incoming
print "got it"
ser.write("Received arg1\n")
break
return Name, arg1
#How to send chars thru XBee
def send_key(Name, arg1):
while True:
ser.write("%s\n" % Name)
time.sleep(.5)
incoming = ser.readline().strip()
print "waiting for GO"
if incoming == "Go":
print "writing arg1"
time.sleep(.5)
while True:
ser.write("%s\n" % arg1)
print "wrote"
time.sleep(.5)
becoming = ser.readline().strip()
print "becoming is: %s" % becoming
if becoming == "Received arg1":
return
#i = 1
lat = None
lon = None
alt = None
elat = None
elon = None
ealt = None
while True:
"""
data = conn.recv(1024)
lat,lon,alt,elat,elon,ealt = data.split(",")
print "received message: ", data
lat = float(lat)
lon = float(lon)
alt = float(alt)
elat = float(elat)
elon = float(elon)
ealt = float(ealt)
"""
incoming = ser.readline().strip()
print "Drone says: %s" % incoming
if incoming == "WP":
print "Asked for WP"
time.sleep(.5)
send_full_data("WP", lat, lon, alt)
#send_full_data("WP", 39.793828, -84.171092, 12)
elif incoming == "EnemyWP":
print "Asked for EnemyWP"
time.sleep(.5)
send_full_data("EnemyWP", elat, elon, ealt)
#send_full_data("EnemyWP", 42, 42, 3)
elif incoming == "Key":
print "asked for key"
time.sleep(.5)
print "Type Key Now"
key = msvcrt.getch()
send_key("key", key)
ser.close()
#sock.close() | gcs_code.py | import serial
import time
import socket
import struct
import msvcrt
ser = serial.Serial('com7', 9600, timeout = 0.5)
UDP_IP = "10.6.3.1"
UDP_PORT = 5005
#sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#sock.bind((UDP_IP, UDP_PORT))
#sock.listen(1)
#conn, addr = sock.accept()
#print "Connected by: ", addr
def send_full_data(Name, arg1, arg2, arg3):
while True:
ser.write("%s\n" % Name)
print Name
time.sleep(.5)
incoming = ser.readline().strip()
if incoming == "Go":
print "writing arg1"
time.sleep(1)
ser.write("%s\n" % arg1)
time.sleep(1)
becoming = ser.readline().strip()
print "should receive"
print becoming
if becoming == "Received arg1":
print "writing arg2"
time.sleep(1)
ser.write("%s\n" % arg2)
time.sleep(1)
#print "I got to here"
becoming = ser.readline().strip()
print becoming
if becoming == "Received arg2":
print "writing arg3"
time.sleep(1)
ser.write("%s\n" % arg3)
#print "I got to here"
time.sleep(1)
becoming = ser.readline().strip()
print becoming
if becoming == "Received arg3":
break
print "woohoo"
break
def rec_full_data(Name):
while True:
ser.write("%s\n" % Name)
time.sleep(1)
if ser.readline().strip() == Name:
time.sleep(.5)
ser.write("Go\n")
print "ready for arg1"
time.sleep(1)
incoming = ser.readline().strip()
while True:
try:
incoming = ser.readline().strip()
print "incoming is: %s" % incoming
arg1 = float(incoming)
time.sleep(.5)
ser.write("Received arg1\n")
print "Arg1 worked!"
break
except ValueError, e:
print "error", e
time.sleep(1)
#ready for arg2
time.sleep(.5)
incoming = ser.readline().strip()
print incoming
while True:
try:
incoming = ser.readline().strip()
print "incoming is: %s" % incoming
arg2 = float(incoming)
time.sleep(.5)
ser.write("Received arg2\n")
print "Arg2 worked!"
break
except ValueError, e:
print "error", e
time.sleep(1)
#ready for arg3
time.sleep(.5)
incoming = ser.readline().strip()
print incoming
while True:
try:
incoming = ser.readline().strip()
print "incoming is: %s" % incoming
arg3 = float(incoming)
time.sleep(.5)
ser.write("Received arg3\n")
print "Arg3 worked!"
break
except ValueError, e:
print "error", e
time.sleep(1)
incoming = ser.readline().strip()
print "incoming is: %s" % incoming
return Name, arg1, arg2, arg3
#How to receive chars thru XBee
def rec_key(Name):
while True:
ser.write("%s\n" % Name)
time.sleep(1)
if ser.readline().strip() == Name:
time.sleep(.5)
print Name
ser.write("Go\n")
print "ready for arg1"
time.sleep(.5)
incoming = ser.readline().strip()
print "incoming is: %s" % incoming
while True:
if incoming == Name:
time.sleep(.5)
print "still failing"
bc = ser.readline().strip()
if bc != Name:
arg1 = bc
print "got it"
ser.write("Received arg1\n")
break
else:
arg1 = incoming
print "got it"
ser.write("Received arg1\n")
break
return Name, arg1
#How to send chars thru XBee
def send_key(Name, arg1):
while True:
ser.write("%s\n" % Name)
time.sleep(.5)
incoming = ser.readline().strip()
print "waiting for GO"
if incoming == "Go":
print "writing arg1"
time.sleep(.5)
while True:
ser.write("%s\n" % arg1)
print "wrote"
time.sleep(.5)
becoming = ser.readline().strip()
print "becoming is: %s" % becoming
if becoming == "Received arg1":
return
#i = 1
lat = None
lon = None
alt = None
elat = None
elon = None
ealt = None
while True:
"""
data = conn.recv(1024)
lat,lon,alt,elat,elon,ealt = data.split(",")
print "received message: ", data
lat = float(lat)
lon = float(lon)
alt = float(alt)
elat = float(elat)
elon = float(elon)
ealt = float(ealt)
"""
incoming = ser.readline().strip()
print "Drone says: %s" % incoming
if incoming == "WP":
print "Asked for WP"
time.sleep(.5)
send_full_data("WP", lat, lon, alt)
#send_full_data("WP", 39.793828, -84.171092, 12)
elif incoming == "EnemyWP":
print "Asked for EnemyWP"
time.sleep(.5)
send_full_data("EnemyWP", elat, elon, ealt)
#send_full_data("EnemyWP", 42, 42, 3)
elif incoming == "Key":
print "asked for key"
time.sleep(.5)
print "Type Key Now"
key = msvcrt.getch()
send_key("key", key)
ser.close()
#sock.close() | 0.05634 | 0.072834 |
#Scraps a page from Amazon website and collects all the product related information from the website and store them in a data frame.
import pandas as pd
import numpy as np
import re
from urllib.request import urlopen
from bs4 import BeautifulSoup
import requests
no_pages = 1
def get_data(pageNo):
r = requests.get('https://www.amazon.in/gp/bestsellers/dvd/21360334031/ref=zg_bs_pg_?'+str(pageNo)+'ie=UTF8&pg='+str(pageNo))
content = r.content
soup = BeautifulSoup(content)
alls = []
for d in soup.findAll('div', attrs={'class':'a-section a-spacing-none aok-relative'}):
name = d.find('a', attrs={'class':'a-link-normal'})
n = name.find_all('img', alt=True)
userRatings = d.find('span', attrs={'class':'zg-badge-text'})
stars = d.find('span', attrs={'class':'a-icon-alt'})
NoOfRatings = d.find('a', attrs={'class':'a-size-small a-link-normal'})
all1=[]
if name is not None:
#print(n[0]['alt'])
all1.append(n[0]['alt'])
else:
all1.append("Movie name cannot be found")
if userRatings is not None:
#print(rating.text)
all1.append(userRatings.text)
else:
all1.append('0')
if stars is not None:
#print(rating.text)
all1.append(stars.text)
else:
all1.append('0')
if NoOfRatings is not None:
all1.append(NoOfRatings.text)
else:
all1.append('0')
alls.append(all1)
return alls
results = []
for i in range(1, no_pages+1):
results.append(get_data(i))
flatten = lambda l: [item for sublist in l for item in sublist]
df = pd.DataFrame(flatten(results),columns=['Movie Name', 'User Rating', 'Stars', 'No of User Ratings'])
df.to_csv('actionMovies.csv', index=False, encoding='utf-8')
df = pd.read_csv("actionMovies.csv")
df.head(5) | PYTHON/web_scraping_Amazon.py |
#Scraps a page from Amazon website and collects all the product related information from the website and store them in a data frame.
import pandas as pd
import numpy as np
import re
from urllib.request import urlopen
from bs4 import BeautifulSoup
import requests
no_pages = 1
def get_data(pageNo):
r = requests.get('https://www.amazon.in/gp/bestsellers/dvd/21360334031/ref=zg_bs_pg_?'+str(pageNo)+'ie=UTF8&pg='+str(pageNo))
content = r.content
soup = BeautifulSoup(content)
alls = []
for d in soup.findAll('div', attrs={'class':'a-section a-spacing-none aok-relative'}):
name = d.find('a', attrs={'class':'a-link-normal'})
n = name.find_all('img', alt=True)
userRatings = d.find('span', attrs={'class':'zg-badge-text'})
stars = d.find('span', attrs={'class':'a-icon-alt'})
NoOfRatings = d.find('a', attrs={'class':'a-size-small a-link-normal'})
all1=[]
if name is not None:
#print(n[0]['alt'])
all1.append(n[0]['alt'])
else:
all1.append("Movie name cannot be found")
if userRatings is not None:
#print(rating.text)
all1.append(userRatings.text)
else:
all1.append('0')
if stars is not None:
#print(rating.text)
all1.append(stars.text)
else:
all1.append('0')
if NoOfRatings is not None:
all1.append(NoOfRatings.text)
else:
all1.append('0')
alls.append(all1)
return alls
results = []
for i in range(1, no_pages+1):
results.append(get_data(i))
flatten = lambda l: [item for sublist in l for item in sublist]
df = pd.DataFrame(flatten(results),columns=['Movie Name', 'User Rating', 'Stars', 'No of User Ratings'])
df.to_csv('actionMovies.csv', index=False, encoding='utf-8')
df = pd.read_csv("actionMovies.csv")
df.head(5) | 0.1254 | 0.247879 |
import os
import shutil
import sys
import tinify
import settings
SUPPORTED_FORMATS = ('jpg', 'jpeg', 'png')
def create_dirs(raw_images_dir=settings.USER_INPUT_PATH,
save_dir=settings.USER_OUTPUT_PATH):
"""Creates the necessary directories if they do not exist.
Args
raw_images_dir (str): raw directory path
save_dir (str): save directory path
"""
# Checking raw-images directory
if not os.path.isdir(raw_images_dir):
os.makedirs(raw_images_dir)
# Collect user directories in raw-images dir
custom_dirs = []
for root, directories, files in os.walk(raw_images_dir):
for directory in directories:
custom_path = os.path.join(save_dir, directory)
custom_dirs.append(custom_path)
# Creation of all necessary dirs in the dir with compressed images
compress_dirs = (save_dir, (*custom_dirs))
for dir_ in compress_dirs:
if not os.path.isdir(dir_):
os.makedirs(dir_)
def get_raw_images(raw_images_dir=settings.USER_INPUT_PATH):
"""Gets images path from the user directory.
If supported images are found, return a list :raw_images:
Else raises an exception
Arg
raw_images_dir (str): raw directory path
"""
print('\n[*] Looking for images...\n')
raw_images = []
# Walk the tree
for root, directories, files in os.walk(raw_images_dir):
for filename in files:
if not filename.startswith('.'):
file_type = filename.split('.')[-1]
if file_type in SUPPORTED_FORMATS:
filepath = os.path.join(root, filename)
raw_images.append(filepath)
# If no images found → raise exception
if not raw_images:
try:
raise OSError('No images found')
except OSError:
dir_name = os.path.basename(raw_images_dir)
print(f'[!] Please add images to “{dir_name}” and try again...\n')
sys.exit()
return raw_images
def change_dir(abs_image_path,
raw_images_dir=settings.USER_INPUT_PATH,
save_dir=settings.USER_OUTPUT_PATH):
"""Changes the directory to the save location.
Args
abs_image_path (str): absolute image path
raw_images_dir (str): raw directory path
save_dir (str): save directory path
"""
# If the original image is not saved in the custom direcory,
# change the directory to the default save-directory
if os.path.dirname(abs_image_path) == raw_images_dir:
os.chdir(save_dir)
else: # Else change the directory to a custom
custom_dir_path = os.path.dirname(abs_image_path)
custom_dir_name = os.path.basename(custom_dir_path)
compressed_custom_dir_path = os.path.join(save_dir, custom_dir_name)
os.chdir(compressed_custom_dir_path)
def compress_and_save(abs_image_path,
metadata=settings.METADATA):
"""Compresses and saves result image.
Args
abs_image_path (str): absolute image path
metadata (bool): user metadata flag
"""
# Get image info
only_image_path, image_info = os.path.split(abs_image_path)
image_name, image_type = image_info.split('.')
if metadata: # Transfer the metadata (if this op. selected in the settings)
meta_filename = f'{image_name}_optimized_copyright.{image_type}'
if not os.path.isfile(meta_filename):
print(f'[*] Compressing {image_name}')
source = tinify.from_file(abs_image_path)
copyrighted = source.preserve('copyright', 'creation')
print(f'[*] Saving {meta_filename}\n')
copyrighted.to_file(meta_filename)
else: # Just save image without metadata
optimized_filename = f'{image_name}_optimized.{image_type}'
if not os.path.isfile(optimized_filename):
print(f'[*] Compressing {image_name}')
source = tinify.from_file(abs_image_path)
print(f'[*] Saving {optimized_filename}\n')
source.to_file(optimized_filename)
def delete_after_compress(raw_images_dir=settings.USER_INPUT_PATH):
"""Deletes all uncompressed images.
Creates an empty directory if the main directory is deleted
Arg
raw_images_dir (str): raw directory path
"""
shutil.rmtree(raw_images_dir, ignore_errors=True)
if not os.path.isdir(raw_images_dir):
os.makedirs(raw_images_dir)
def main():
try:
# Prepare tinify
tinify.key = settings.API_KEY
tinify.validate()
# Main logic
create_dirs()
raw_image_pull = get_raw_images()
for image in raw_image_pull:
change_dir(image)
compress_and_save(image)
print('[!] All optimized images have been saved')
if settings.DELETE_RAW_AFTER_COMPRESS:
delete_after_compress()
print('\n[×] All the uncompressed images have been removed [×]\n')
except tinify.AccountError:
print('[AccountError]: Please verify your Tinify API key and account limit.')
except tinify.ClientError:
print('[ClientError]: Please check your source image.')
except tinify.ServerError:
print('[ServerError]: Temporary issue with the Tinify API.')
except tinify.ConnectionError:
print('[ConnectionError]: A network connection error occurred.')
except Exception as e:
print('[UnknownError]: Something went wrong. Please try again...\n', e)
if __name__ == "__main__":
main() | image_optimizer.py | import os
import shutil
import sys
import tinify
import settings
SUPPORTED_FORMATS = ('jpg', 'jpeg', 'png')
def create_dirs(raw_images_dir=settings.USER_INPUT_PATH,
save_dir=settings.USER_OUTPUT_PATH):
"""Creates the necessary directories if they do not exist.
Args
raw_images_dir (str): raw directory path
save_dir (str): save directory path
"""
# Checking raw-images directory
if not os.path.isdir(raw_images_dir):
os.makedirs(raw_images_dir)
# Collect user directories in raw-images dir
custom_dirs = []
for root, directories, files in os.walk(raw_images_dir):
for directory in directories:
custom_path = os.path.join(save_dir, directory)
custom_dirs.append(custom_path)
# Creation of all necessary dirs in the dir with compressed images
compress_dirs = (save_dir, (*custom_dirs))
for dir_ in compress_dirs:
if not os.path.isdir(dir_):
os.makedirs(dir_)
def get_raw_images(raw_images_dir=settings.USER_INPUT_PATH):
"""Gets images path from the user directory.
If supported images are found, return a list :raw_images:
Else raises an exception
Arg
raw_images_dir (str): raw directory path
"""
print('\n[*] Looking for images...\n')
raw_images = []
# Walk the tree
for root, directories, files in os.walk(raw_images_dir):
for filename in files:
if not filename.startswith('.'):
file_type = filename.split('.')[-1]
if file_type in SUPPORTED_FORMATS:
filepath = os.path.join(root, filename)
raw_images.append(filepath)
# If no images found → raise exception
if not raw_images:
try:
raise OSError('No images found')
except OSError:
dir_name = os.path.basename(raw_images_dir)
print(f'[!] Please add images to “{dir_name}” and try again...\n')
sys.exit()
return raw_images
def change_dir(abs_image_path,
raw_images_dir=settings.USER_INPUT_PATH,
save_dir=settings.USER_OUTPUT_PATH):
"""Changes the directory to the save location.
Args
abs_image_path (str): absolute image path
raw_images_dir (str): raw directory path
save_dir (str): save directory path
"""
# If the original image is not saved in the custom direcory,
# change the directory to the default save-directory
if os.path.dirname(abs_image_path) == raw_images_dir:
os.chdir(save_dir)
else: # Else change the directory to a custom
custom_dir_path = os.path.dirname(abs_image_path)
custom_dir_name = os.path.basename(custom_dir_path)
compressed_custom_dir_path = os.path.join(save_dir, custom_dir_name)
os.chdir(compressed_custom_dir_path)
def compress_and_save(abs_image_path,
metadata=settings.METADATA):
"""Compresses and saves result image.
Args
abs_image_path (str): absolute image path
metadata (bool): user metadata flag
"""
# Get image info
only_image_path, image_info = os.path.split(abs_image_path)
image_name, image_type = image_info.split('.')
if metadata: # Transfer the metadata (if this op. selected in the settings)
meta_filename = f'{image_name}_optimized_copyright.{image_type}'
if not os.path.isfile(meta_filename):
print(f'[*] Compressing {image_name}')
source = tinify.from_file(abs_image_path)
copyrighted = source.preserve('copyright', 'creation')
print(f'[*] Saving {meta_filename}\n')
copyrighted.to_file(meta_filename)
else: # Just save image without metadata
optimized_filename = f'{image_name}_optimized.{image_type}'
if not os.path.isfile(optimized_filename):
print(f'[*] Compressing {image_name}')
source = tinify.from_file(abs_image_path)
print(f'[*] Saving {optimized_filename}\n')
source.to_file(optimized_filename)
def delete_after_compress(raw_images_dir=settings.USER_INPUT_PATH):
"""Deletes all uncompressed images.
Creates an empty directory if the main directory is deleted
Arg
raw_images_dir (str): raw directory path
"""
shutil.rmtree(raw_images_dir, ignore_errors=True)
if not os.path.isdir(raw_images_dir):
os.makedirs(raw_images_dir)
def main():
try:
# Prepare tinify
tinify.key = settings.API_KEY
tinify.validate()
# Main logic
create_dirs()
raw_image_pull = get_raw_images()
for image in raw_image_pull:
change_dir(image)
compress_and_save(image)
print('[!] All optimized images have been saved')
if settings.DELETE_RAW_AFTER_COMPRESS:
delete_after_compress()
print('\n[×] All the uncompressed images have been removed [×]\n')
except tinify.AccountError:
print('[AccountError]: Please verify your Tinify API key and account limit.')
except tinify.ClientError:
print('[ClientError]: Please check your source image.')
except tinify.ServerError:
print('[ServerError]: Temporary issue with the Tinify API.')
except tinify.ConnectionError:
print('[ConnectionError]: A network connection error occurred.')
except Exception as e:
print('[UnknownError]: Something went wrong. Please try again...\n', e)
if __name__ == "__main__":
main() | 0.375936 | 0.177829 |
from libqtile.config import Group, Key
from libqtile.lazy import lazy
from variables.commands import Commands, mod
# A list of available commands that can be bound to keys can be found
# at https://docs.qtile.org/en/latest/manual/config/lazy.html
# Qtile keyboard shortcuts
keys = [
# Qtile
Key([mod, 'control'], 'r', lazy.reload_config(), 'Reload config Qtile'),
Key([mod, 'control'], 'q', lazy.shutdown(), 'Quite Qtile'),
# Window
Key([mod], 'q',lazy.window.kill(), 'Close window'),
Key([mod], 'space',lazy.window.toggle_fullscreen(), 'Fullscreen window'),
Key([mod], 'f', lazy.window.toggle_floating(), 'Floating window'),
Key([mod], 'Tab', lazy.next_layout(), 'Switch window layout'),
Key([mod, 'shift'], 'Tab', lazy.layout.toggle_split(), 'Split window'),
Key([mod, 'control'], 'Tab', lazy.layout.normalize(), 'Normalize window'),
# Volume
Key([mod], 'v', lazy.spawn(Commands.volumeUp), 'Increase Volume'),
Key([mod, 'shift'], 'v', lazy.spawn(Commands.volumeDown),'Decrease Volume'),
Key([mod, 'control'], 'v', lazy.spawn(Commands.volumeMute), 'Mute Volume'),
# Mic
Key([mod], 'm', lazy.spawn(Commands.micUp), 'Increase mic sensitivity'),
Key([mod, 'shift'], 'm', lazy.spawn(Commands.micDown),
'Decrease mic sensitivity'),
Key([mod, 'control'], 'm', lazy.spawn(Commands.micMute), 'Mute mic'),
# Brightness
Key([mod], 'b', lazy.spawn(Commands.brightnessUp), 'Increase brightness'),
Key([mod, 'shift'], 'b', lazy.spawn(Commands.brightnessDown),
'Decrease brightness'),
# Screenshot
Key([mod], 's', lazy.spawn(Commands.screenshort), "Screenshot of area"),
Key([mod, 'shift'], 's', lazy.spawn(Commands.screenshortFull),
"Screenshot fullscreen"),
# Menu
Key([mod], 'Return', lazy.spawn(Commands.dmenu), 'Menu'),
Key([mod], 'e', lazy.spawn(Commands.emoji), 'Emoji menu'),
Key([mod], 'c', lazy.spawn(Commands.clipboard), desc='Clipboard menu'),
# Application
Key([mod], 't', lazy.spawn(Commands.terminal), desc='Terminal'),
Key([mod], 'i', lazy.spawn(Commands.browser), 'Browser'),
]
for key in ['up', 'down', 'left', 'right']:
keys.extend([
Key([mod], key.capitalize(),
getattr(lazy.layout, key)(), f'Move window focus {key}'),
Key([mod, 'shift'], key.capitalize(),
getattr(lazy.layout, 'shuffle_' + key)(), f'Move window {key}'),
Key([mod, 'control'], key.capitalize(),
getattr(lazy.layout, 'grow_' + key)(), f'Grow window size {key}'),
])
# Keyboard shortcut for Workspaces
groups = [Group(i) for i in '123456789']
for i in groups:
keys.extend([
Key([mod], i.name, lazy.group[i.name].toscreen(),
f'Go to group {i.name}'),
Key([mod, 'shift'], i.name,
lazy.window.togroup(i.name, switch_group=True),
f'Move window & Switch focus to group {i.name}'),
Key([mod, 'control'], i.name, lazy.window.togroup(i.name),
f'Move window to group {i.name}'),
]) | qtile/modules/shortcuts.py | from libqtile.config import Group, Key
from libqtile.lazy import lazy
from variables.commands import Commands, mod
# A list of available commands that can be bound to keys can be found
# at https://docs.qtile.org/en/latest/manual/config/lazy.html
# Qtile keyboard shortcuts
keys = [
# Qtile
Key([mod, 'control'], 'r', lazy.reload_config(), 'Reload config Qtile'),
Key([mod, 'control'], 'q', lazy.shutdown(), 'Quite Qtile'),
# Window
Key([mod], 'q',lazy.window.kill(), 'Close window'),
Key([mod], 'space',lazy.window.toggle_fullscreen(), 'Fullscreen window'),
Key([mod], 'f', lazy.window.toggle_floating(), 'Floating window'),
Key([mod], 'Tab', lazy.next_layout(), 'Switch window layout'),
Key([mod, 'shift'], 'Tab', lazy.layout.toggle_split(), 'Split window'),
Key([mod, 'control'], 'Tab', lazy.layout.normalize(), 'Normalize window'),
# Volume
Key([mod], 'v', lazy.spawn(Commands.volumeUp), 'Increase Volume'),
Key([mod, 'shift'], 'v', lazy.spawn(Commands.volumeDown),'Decrease Volume'),
Key([mod, 'control'], 'v', lazy.spawn(Commands.volumeMute), 'Mute Volume'),
# Mic
Key([mod], 'm', lazy.spawn(Commands.micUp), 'Increase mic sensitivity'),
Key([mod, 'shift'], 'm', lazy.spawn(Commands.micDown),
'Decrease mic sensitivity'),
Key([mod, 'control'], 'm', lazy.spawn(Commands.micMute), 'Mute mic'),
# Brightness
Key([mod], 'b', lazy.spawn(Commands.brightnessUp), 'Increase brightness'),
Key([mod, 'shift'], 'b', lazy.spawn(Commands.brightnessDown),
'Decrease brightness'),
# Screenshot
Key([mod], 's', lazy.spawn(Commands.screenshort), "Screenshot of area"),
Key([mod, 'shift'], 's', lazy.spawn(Commands.screenshortFull),
"Screenshot fullscreen"),
# Menu
Key([mod], 'Return', lazy.spawn(Commands.dmenu), 'Menu'),
Key([mod], 'e', lazy.spawn(Commands.emoji), 'Emoji menu'),
Key([mod], 'c', lazy.spawn(Commands.clipboard), desc='Clipboard menu'),
# Application
Key([mod], 't', lazy.spawn(Commands.terminal), desc='Terminal'),
Key([mod], 'i', lazy.spawn(Commands.browser), 'Browser'),
]
for key in ['up', 'down', 'left', 'right']:
keys.extend([
Key([mod], key.capitalize(),
getattr(lazy.layout, key)(), f'Move window focus {key}'),
Key([mod, 'shift'], key.capitalize(),
getattr(lazy.layout, 'shuffle_' + key)(), f'Move window {key}'),
Key([mod, 'control'], key.capitalize(),
getattr(lazy.layout, 'grow_' + key)(), f'Grow window size {key}'),
])
# Keyboard shortcut for Workspaces
groups = [Group(i) for i in '123456789']
for i in groups:
keys.extend([
Key([mod], i.name, lazy.group[i.name].toscreen(),
f'Go to group {i.name}'),
Key([mod, 'shift'], i.name,
lazy.window.togroup(i.name, switch_group=True),
f'Move window & Switch focus to group {i.name}'),
Key([mod, 'control'], i.name, lazy.window.togroup(i.name),
f'Move window to group {i.name}'),
]) | 0.556641 | 0.184694 |
from __future__ import unicode_literals
from django.db import models, migrations
import msgvis.apps.enhance.fields
class Migration(migrations.Migration):
dependencies = [
('corpus', '0014_auto_20150221_0240'),
]
operations = [
migrations.CreateModel(
name='Dictionary',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('dataset', models.CharField(max_length=100)),
('settings', models.TextField()),
('time', models.DateTimeField(auto_now_add=True)),
('num_docs', msgvis.apps.enhance.fields.PositiveBigIntegerField(default=0)),
('num_pos', msgvis.apps.enhance.fields.PositiveBigIntegerField(default=0)),
('num_nnz', msgvis.apps.enhance.fields.PositiveBigIntegerField(default=0)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MessageTopic',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('probability', models.FloatField()),
('source', models.ForeignKey(related_name='topics', to='corpus.Message')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MessageWord',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('word_index', models.IntegerField()),
('count', models.FloatField()),
('tfidf', models.FloatField()),
('dictionary', models.ForeignKey(to='enhance.Dictionary', db_index=False)),
('source', models.ForeignKey(related_name='words', to='corpus.Message')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('description', models.CharField(max_length=200)),
('index', models.IntegerField()),
('alpha', models.FloatField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TopicModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('description', models.CharField(max_length=200)),
('time', models.DateTimeField(auto_now_add=True)),
('perplexity', models.FloatField(default=0)),
('dictionary', models.ForeignKey(to='enhance.Dictionary')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TopicWord',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('word_index', models.IntegerField()),
('probability', models.FloatField()),
('topic', models.ForeignKey(related_name='words', to='enhance.Topic')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Word',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('index', models.IntegerField()),
('text', models.CharField(max_length=100)),
('document_frequency', models.IntegerField()),
('dictionary', models.ForeignKey(related_name='words', to='enhance.Dictionary')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='topicword',
name='word',
field=models.ForeignKey(to='enhance.Word'),
preserve_default=True,
),
migrations.AddField(
model_name='topic',
name='model',
field=models.ForeignKey(related_name='topics', to='enhance.TopicModel'),
preserve_default=True,
),
migrations.AddField(
model_name='messageword',
name='word',
field=models.ForeignKey(to='enhance.Word'),
preserve_default=True,
),
migrations.AlterIndexTogether(
name='messageword',
index_together=set([('dictionary', 'source')]),
),
migrations.AddField(
model_name='messagetopic',
name='topic',
field=models.ForeignKey(to='enhance.Topic'),
preserve_default=True,
),
migrations.AddField(
model_name='messagetopic',
name='topic_model',
field=models.ForeignKey(to='enhance.TopicModel', db_index=False),
preserve_default=True,
),
migrations.AlterIndexTogether(
name='messagetopic',
index_together=set([('topic_model', 'source')]),
),
] | msgvis/apps/enhance/migrations/0001_initial.py | from __future__ import unicode_literals
from django.db import models, migrations
import msgvis.apps.enhance.fields
class Migration(migrations.Migration):
dependencies = [
('corpus', '0014_auto_20150221_0240'),
]
operations = [
migrations.CreateModel(
name='Dictionary',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('dataset', models.CharField(max_length=100)),
('settings', models.TextField()),
('time', models.DateTimeField(auto_now_add=True)),
('num_docs', msgvis.apps.enhance.fields.PositiveBigIntegerField(default=0)),
('num_pos', msgvis.apps.enhance.fields.PositiveBigIntegerField(default=0)),
('num_nnz', msgvis.apps.enhance.fields.PositiveBigIntegerField(default=0)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MessageTopic',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('probability', models.FloatField()),
('source', models.ForeignKey(related_name='topics', to='corpus.Message')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MessageWord',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('word_index', models.IntegerField()),
('count', models.FloatField()),
('tfidf', models.FloatField()),
('dictionary', models.ForeignKey(to='enhance.Dictionary', db_index=False)),
('source', models.ForeignKey(related_name='words', to='corpus.Message')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('description', models.CharField(max_length=200)),
('index', models.IntegerField()),
('alpha', models.FloatField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TopicModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('description', models.CharField(max_length=200)),
('time', models.DateTimeField(auto_now_add=True)),
('perplexity', models.FloatField(default=0)),
('dictionary', models.ForeignKey(to='enhance.Dictionary')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TopicWord',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('word_index', models.IntegerField()),
('probability', models.FloatField()),
('topic', models.ForeignKey(related_name='words', to='enhance.Topic')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Word',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('index', models.IntegerField()),
('text', models.CharField(max_length=100)),
('document_frequency', models.IntegerField()),
('dictionary', models.ForeignKey(related_name='words', to='enhance.Dictionary')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='topicword',
name='word',
field=models.ForeignKey(to='enhance.Word'),
preserve_default=True,
),
migrations.AddField(
model_name='topic',
name='model',
field=models.ForeignKey(related_name='topics', to='enhance.TopicModel'),
preserve_default=True,
),
migrations.AddField(
model_name='messageword',
name='word',
field=models.ForeignKey(to='enhance.Word'),
preserve_default=True,
),
migrations.AlterIndexTogether(
name='messageword',
index_together=set([('dictionary', 'source')]),
),
migrations.AddField(
model_name='messagetopic',
name='topic',
field=models.ForeignKey(to='enhance.Topic'),
preserve_default=True,
),
migrations.AddField(
model_name='messagetopic',
name='topic_model',
field=models.ForeignKey(to='enhance.TopicModel', db_index=False),
preserve_default=True,
),
migrations.AlterIndexTogether(
name='messagetopic',
index_together=set([('topic_model', 'source')]),
),
] | 0.638948 | 0.171859 |
import unittest
import numpy as np
class ExtendedTestCase(unittest.TestCase):
# pylint: disable=invalid-name
"""
Extended ``TestCase`` class of the ``unittest`` module.
"""
def assertAlmostEqualArrays(self, obtained_array, expected_array):
"""
Assert that two NumPy arrays are element-wise almost equal and use the
same data type.
"""
np.testing.assert_allclose(obtained_array, expected_array)
self.assertEqual(obtained_array.dtype, expected_array.dtype)
def assertAlmostEqualRankings(self, obtained_ranking, expected_ranking):
"""
Assert that two lists of tuples contain the same alternatives in the
same order with almost equal scores.
"""
self.assertEqual(len(obtained_ranking), len(expected_ranking))
for i, tmp in enumerate(obtained_ranking):
self.assertEqual(tmp[0], expected_ranking[i][0])
self.assertAlmostEqual(tmp[1], expected_ranking[i][1], places=6)
def get_labels01():
"""
Return the labels with ID 01.
"""
return [
"a1",
"a2",
"a3",
"a4",
"a5",
"a6",
"a7",
"a8",
"a9",
"a10",
"a11",
"a12",
]
def get_labels02():
"""
Return the labels with ID 02.
"""
return [
"A",
"B",
"C",
"D",
"E",
"F",
]
def get_labels03():
"""
Return the labels with ID 03.
"""
return [
"A",
"B",
"C",
"D",
]
def get_labels04():
"""
Return the labels with ID 04.
"""
return [
"Epidemic",
"Direct",
"CnF.LTS",
"CnF.DestEnc",
"CnF.Enc",
"CnF.PRoPHET",
"CnR.LTS",
"CnR.DestEnc",
"CnR.Enc",
"CnR.PRoPHET",
"DF.LTS",
"DF.DestEnc",
"DF.Enc",
"DF.PRoPHET",
"COORD.LTS",
"COORD.DestEnc",
"COORD.Enc",
"COORD.PRoPHET",
"SnW.L2",
"SnW.L4",
"SnW.L8",
"SnW.L16",
"LSF-SnW.L2",
"LSF-SnW.L4",
"LSF-SnW.L8",
"LSF-SnW.L16",
"SnF.L2",
"SnF.L4",
"SnF.L8",
"SnF.L16",
"SimBetTS.L2",
"SimBetTS.L4",
"SimBetTS.L8",
"SimBetTS.L16",
"EBR.L2",
"EBR.L4",
"EBR.L8",
"EBR.L16",
]
def get_labels05():
"""
Return the labels with ID 05.
"""
return [
"A",
"B",
"C",
"D",
"E",
]
def get_matrix01():
"""
Return the matrix with ID 01.
"""
return [
[0.0, 0.0, 1.0],
[0.1, 0.2, 0.8],
[0.2, 0.4, 0.6],
[0.3, 0.7, 0.3],
[0.6, 0.8, 0.2],
[0.8, 0.9, 0.1],
[1.0, 1.0, 0.0],
]
def get_matrix02():
"""
Return the matrix with ID 02.
"""
return [
[0.0, 0.0, 0.0],
[0.0, 0.0, 1.0],
[0.2, 0.5, 0.0],
[0.2, 0.5, 1.0],
[0.4, 1.0, 0.0],
[0.4, 1.0, 1.0],
[0.6, 1.0, 0.0],
[0.6, 1.0, 1.0],
[0.8, 0.5, 0.0],
[0.8, 0.5, 1.0],
[1.0, 0.0, 0.0],
[1.0, 0.0, 1.0],
]
def get_matrix03():
"""
Return the matrix with ID 03.
"""
return [
[0.00, 1.00],
[0.25, 0.75],
[0.50, 0.50],
[0.75, 0.25],
[1.00, 0.00],
]
def get_matrix04():
"""
Return the matrix with ID 04.
"""
return [
[ 2.0, 12.0, 7.0, 7.0], # noqa: E201
[ 4.0, 100.0, 7.0, 7.0], # noqa: E201
[10.0, 200.0, 7.0, 7.0], # noqa: E201
[ 0.0, 300.0, 7.0, 7.0], # noqa: E201
[ 6.0, 400.0, 7.0, 7.0], # noqa: E201
[ 1.0, 600.0, 7.0, 7.0], # noqa: E201
]
def get_matrix05():
"""
Return the matrix with ID 05.
"""
return [
[ 8.0, 8.0, -1.0, -1.0, 5.0, 5.0], # noqa: E201
[24.0, 24.0, -11.0, -11.0, 0.0, 0.0], # noqa: E201
[ 4.0, 4.0, -10.0, -10.0, 40.0, 40.0], # noqa: E201
[14.0, 14.0, -9.0, -9.0, 15.0, 15.0], # noqa: E201
[ 6.0, 6.0, -7.0, -7.0, -5.0, -5.0], # noqa: E201
[18.0, 18.0, -5.0, -5.0, -10.0, -10.0], # noqa: E201
]
def get_matrix06():
"""
Return the matrix with ID 06.
"""
return [
[0.5, 0.6, 0.3, 0.2, 0.9],
[0.5, 0.5, 0.5, 0.5, 0.5],
[0.5, 0.4, 0.7, 0.8, 0.1],
]
def get_matrix07():
"""
Return the matrix with ID 07.
"""
return [
[0.9, 30.0, 500.0, 4.0],
[0.1, 50.0, 5.0, 6.0],
[0.5, 80.0, 8.0, 6.0],
[0.8, 40.0, 100.0, 4.0],
[0.7, 60.0, 20.0, 5.0],
[0.6, 60.0, 10.0, 5.0],
]
def get_matrix08():
"""
Return the matrix with ID 08.
"""
return [
[4.0, 5.0, 10.0],
[3.0, 10.0, 6.0],
[3.0, 20.0, 2.0],
[2.0, 15.0, 5.0],
]
def get_matrix09():
"""
Return the matrix with ID 09.
"""
return [
[1.000000, 1.000000, 0.017276],
[0.046296, 0.022222, 1.000000],
[0.259295, 0.106985, 0.783554],
[0.260509, 0.107106, 0.801962],
[0.090419, 0.044763, 0.245226],
[0.563999, 0.239328, 0.288358],
[0.320434, 0.147798, 0.738850],
[0.314969, 0.144773, 0.751384],
[0.714533, 0.364252, 0.092688],
[0.972336, 0.706954, 0.091856],
[0.283518, 0.127236, 0.805858],
[0.296781, 0.132676, 0.797796],
[0.265469, 0.122640, 0.202089],
[0.839930, 0.461981, 0.304980],
[0.282103, 0.126395, 0.808264],
[0.296100, 0.132096, 0.799922],
[0.212761, 0.104337, 0.229227],
[0.798002, 0.429797, 0.335956],
[0.068258, 0.035742, 0.519465],
[0.102412, 0.055489, 0.281905],
[0.155229, 0.085050, 0.163012],
[0.238498, 0.128995, 0.103688],
[0.177178, 0.075565, 0.854643],
[0.257650, 0.112055, 0.811516],
[0.294934, 0.131563, 0.781283],
[0.310552, 0.140593, 0.762520],
[0.368115, 0.159646, 0.449073],
[0.498578, 0.228317, 0.296180],
[0.635688, 0.310778, 0.210340],
[0.759518, 0.402583, 0.149893],
[0.499916, 0.188975, 0.302964],
[0.717516, 0.306092, 0.249340],
[0.790702, 0.359737, 0.221402],
[0.848093, 0.415040, 0.193533],
[0.068414, 0.035866, 0.519542],
[0.102469, 0.055554, 0.282188],
[0.155261, 0.085064, 0.162956],
[0.238748, 0.129114, 0.103684],
]
def get_matrix10():
"""
Return the matrix with ID 10.
"""
return [
[0.00, 1.00],
[0.25, 0.75],
[0.50, 0.50],
[0.75],
[1.00, 0.00],
]
def get_matrix11():
"""
Return the matrix with ID 11.
"""
return [
[0.0, 0.0, 1.0],
[0.1, 0.2, 0.8],
[0.2, 0.4, 0.6],
[0.3, 0.7, 0.3],
[0.6, 0.8, 0.2],
[0.8, 0.9],
[1.0, 1.0, 0.0],
]
def get_matrix12():
"""
Return the matrix with ID 12.
"""
return [
[0.0, 0.0, 1.1],
[0.1, 0.2, 0.8],
[0.2, 0.4, 0.6],
[0.3, 0.7, 0.3],
[0.6, 0.8, 0.2],
[0.8, 0.9, 0.1],
[1.0, 1.0, 0.0],
]
def get_matrix13():
"""
Return the matrix with ID 13.
"""
return [
[ 0.0, 0.0, 1.0], # noqa: E201
[-0.1, 0.2, 0.8], # noqa: E201
[ 0.2, 0.4, 0.6], # noqa: E201
[ 0.3, 0.7, 0.3], # noqa: E201
[ 0.6, 0.8, 0.2], # noqa: E201
[ 0.8, 0.9, 0.1], # noqa: E201
[ 1.0, 1.0, 0.0], # noqa: E201
]
def get_matrix14():
"""
Return the matrix with ID 14.
"""
return [
[0.2, 1.00, 1.0, 1.0],
[0.4, 0.12, 1.0, 1.0],
[1.0, 0.06, 1.0, 1.0],
[0.0, 0.04, 1.0, 1.0],
[0.6, 0.03, 1.0, 1.0],
[0.1, 0.02, 1.0, 1.0],
]
def get_matrix15():
"""
Return the matrix with ID 15.
"""
return [
[ 2.0, 12.0, 7.0, 7.0], # noqa: E201
[ 4.0, 100.0, 7.0, 7.0], # noqa: E201
[10.0, 200.0, 7.0, 7.0], # noqa: E201
[ 0.0, 300.0, 7.0, 7.0], # noqa: E201
[ 6.0, 400.0, 7.0], # noqa: E201
[ 1.0, 600.0, 7.0, 7.0], # noqa: E201
]
def get_matrix16():
"""
Return the matrix with ID 16.
"""
return [
[ 2.0, 12.0, 7.0, 7.0], # noqa: E201
[-4.0, 100.0, 7.0, 7.0], # noqa: E201
[10.0, 200.0, 7.0, 7.0], # noqa: E201
[ 0.0, 300.0, 7.0, 7.0], # noqa: E201
[ 6.0, 400.0, 7.0, 7.0], # noqa: E201
[ 1.0, 600.0, 7.0, 7.0], # noqa: E201
]
def get_matrix17():
"""
Return the matrix with ID 17.
"""
return [
[ 2.0, 12.0, 0.0, 7.0], # noqa: E201
[ 4.0, 100.0, 0.0, 7.0], # noqa: E201
[10.0, 200.0, 0.0, 7.0], # noqa: E201
[ 0.0, 300.0, 0.0, 7.0], # noqa: E201
[ 6.0, 400.0, 0.0, 7.0], # noqa: E201
[ 1.0, 600.0, 0.0, 7.0], # noqa: E201
]
def get_matrix18():
"""
Return the matrix with ID 18.
"""
return [
[ 2.0, 12.0, 7.0, 0.0], # noqa: E201
[ 4.0, 100.0, 7.0, 0.0], # noqa: E201
[10.0, 200.0, 7.0, 0.0], # noqa: E201
[ 0.0, 300.0, 7.0, 0.0], # noqa: E201
[ 6.0, 400.0, 7.0, 0.0], # noqa: E201
[ 1.0, 600.0, 7.0, 0.0], # noqa: E201
]
def get_matrix19():
"""
Return the matrix with ID 19.
"""
return [
[0.2, 0.8, 1.0, 0.0, 0.3, 0.7],
[1.0, 0.0, 0.0, 1.0, 0.2, 0.8],
[0.0, 1.0, 0.1, 0.9, 1.0, 0.0],
[0.5, 0.5, 0.2, 0.8, 0.5, 0.5],
[0.1, 0.9, 0.4, 0.6, 0.1, 0.9],
[0.7, 0.3, 0.6, 0.4, 0.0, 1.0],
]
def get_matrix20():
"""
Return the matrix with ID 20.
"""
return [
[ 8.0, 8.0, -1.0, -1.0, 5.0, 5.0], # noqa: E201
[24.0, 24.0, -11.0, -11.0, 0.0, 0.0], # noqa: E201
[ 4.0, 4.0, -10.0, -10.0, 40.0, 40.0], # noqa: E201
[14.0, 14.0, -9.0, -9.0, 15.0, 15.0], # noqa: E201
[ 6.0, 6.0, -7.0, -7.0, -5.0], # noqa: E201
[18.0, 18.0, -5.0, -5.0, -10.0, -10.0], # noqa: E201
]
def get_matrix21():
"""
Return the matrix with ID 21.
"""
return [
[7.0, 8.0, -1.0, -1.0, 5.0, 5.0],
[7.0, 24.0, -11.0, -11.0, 0.0, 0.0],
[7.0, 4.0, -10.0, -10.0, 40.0, 40.0],
[7.0, 14.0, -9.0, -9.0, 15.0, 15.0],
[7.0, 6.0, -7.0, -7.0, -5.0, -5.0],
[7.0, 18.0, -5.0, -5.0, -10.0, -10.0],
]
def get_matrix22():
"""
Return the matrix with ID 22.
"""
return [
[-7.0, 8.0, -1.0, -1.0, 5.0, 5.0],
[-7.0, 24.0, -11.0, -11.0, 0.0, 0.0],
[-7.0, 4.0, -10.0, -10.0, 40.0, 40.0],
[-7.0, 14.0, -9.0, -9.0, 15.0, 15.0],
[-7.0, 6.0, -7.0, -7.0, -5.0, -5.0],
[-7.0, 18.0, -5.0, -5.0, -10.0, -10.0],
]
def get_matrix23():
"""
Return the matrix with ID 23.
"""
return [
[0.0, 8.0, -1.0, -1.0, 5.0, 5.0],
[0.0, 24.0, -11.0, -11.0, 0.0, 0.0],
[0.0, 4.0, -10.0, -10.0, 40.0, 40.0],
[0.0, 14.0, -9.0, -9.0, 15.0, 15.0],
[0.0, 6.0, -7.0, -7.0, -5.0, -5.0],
[0.0, 18.0, -5.0, -5.0, -10.0, -10.0],
]
def get_matrix24():
"""
Return the matrix with ID 24.
"""
return [
[4.0, 4.0, 7.0, 7.0],
[3.0, 3.0, 7.0, 7.0],
[2.0, 2.0, 7.0, 7.0],
[1.0, 1.0, 7.0, 7.0],
[0.0, 0.0, 7.0, 7.0],
]
def get_matrix25():
"""
Return the matrix with ID 25.
"""
return [
[0.4, 0.4, 0.2, 0.2],
[0.3, 0.3, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2],
[0.1, 0.1, 0.2, 0.2],
[0.0, 0.0, 0.2, 0.2],
]
def get_matrix26():
"""
Return the matrix with ID 26.
"""
return [
[4.0, 4.0, 7.0, 7.0],
[3.0, 3.0, 7.0, 7.0],
[2.0, 2.0, 7.0, 7.0],
[1.0, 1.0, 7.0],
[0.0, 0.0, 7.0, 7.0],
]
def get_matrix27():
"""
Return the matrix with ID 27.
"""
return [
[ 4.0, 4.0, 7.0, 7.0], # noqa: E201
[ 3.0, 3.0, 7.0, 7.0], # noqa: E201
[-2.0, 2.0, 7.0, 7.0], # noqa: E201
[ 1.0, 1.0, 7.0, 7.0], # noqa: E201
[ 0.0, 0.0, 7.0, 7.0], # noqa: E201
]
def get_matrix28():
"""
Return the matrix with ID 28.
"""
return [
[4.0, 4.0, 7.0, 0.0],
[3.0, 3.0, 7.0, 0.0],
[2.0, 2.0, 7.0, 0.0],
[1.0, 1.0, 7.0, 0.0],
[0.0, 0.0, 7.0, 0.0],
]
def get_matrix29():
"""
Return the matrix with ID 29.
"""
return [
[0.0, 0.0, 5.0, 5.0],
[6.0, 6.0, 5.0, 5.0],
[0.0, 0.0, 5.0, 5.0],
[8.0, 8.0, 5.0, 5.0],
]
def get_matrix30():
"""
Return the matrix with ID 30.
"""
return [
[0.0, 0.0, 0.5, 0.5],
[0.6, 0.6, 0.5, 0.5],
[0.0, 0.0, 0.5, 0.5],
[0.8, 0.8, 0.5, 0.5],
]
def get_matrix31():
"""
Return the matrix with ID 31.
"""
return [
[0.0, 0.0, 5.0, 5.0],
[6.0, 6.0, 5.0, 5.0],
[0.0, 0.0, 5.0],
[8.0, 8.0, 5.0, 5.0],
]
def get_matrix32():
"""
Return the matrix with ID 32.
"""
return [
[0.0, 0.0, 5.0, 5.0],
[6.0, -6.0, 5.0, 5.0],
[0.0, 0.0, 5.0, 5.0],
[8.0, 8.0, 5.0, 5.0],
]
def get_matrix33():
"""
Return the matrix with ID 33.
"""
return [
[0.0, 0.0, 5.0, 0.0],
[6.0, 6.0, 5.0, 0.0],
[0.0, 0.0, 5.0, 0.0],
[8.0, 8.0, 5.0, 0.0],
]
def get_matrix34():
"""
Return the matrix with ID 34.
"""
return [
[ 1.0000000, 0.9314381, -0.9314381], # noqa: E201
[ 0.9314381, 1.0000000, -1.0000000], # noqa: E201
[-0.9314381, -1.0000000, 1.0000000], # noqa: E201
]
def get_matrix35():
"""
Return the matrix with ID 35.
"""
return [
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 1.0],
]
def get_matrix36():
"""
Return the matrix with ID 36.
"""
return [
[1.0000000, 0.9314381, 0.9314381],
[0.9314381, 1.0000000, 1.0000000],
[0.9314381, 1.0000000, 1.0000000],
]
def get_matrix37():
"""
Return the matrix with ID 37.
"""
return [
[1.0000000, 0.9369189, 0.9369189],
[0.9369189, 1.0000000, 1.0000000],
[0.9369189, 1.0000000, 1.0000000],
]
def get_matrix38():
"""
Return the matrix with ID 38.
"""
return [
[1.0000000, 0.5186014, 0.0000000],
[0.5186014, 1.0000000, 0.0000000],
[0.0000000, 0.0000000, 1.0000000],
]
def get_matrix39():
"""
Return the matrix with ID 39.
"""
return [
[0.0, 0.0],
[0.0, 1.0],
]
def get_matrix40():
"""
Return the matrix with ID 40.
"""
return [
[1.0000000, 0.0000000],
[0.0000000, 1.0000000],
]
def get_matrix41():
"""
Return the matrix with ID 41.
"""
return [
[0.000, 0.000, 0.333],
[0.033, 0.050, 0.267],
[0.067, 0.100, 0.200],
[0.100, 0.175, 0.100],
[0.200, 0.200, 0.067],
[0.267, 0.225, 0.033],
[0.333, 0.250, 0.000],
]
def get_matrix42():
"""
Return the matrix with ID 42.
"""
return [
[0.00000000, 0.00000000, 0.00000000],
[0.00000000, 0.00000000, 0.16666667],
[0.03333333, 0.08333333, 0.00000000],
[0.03333333, 0.08333333, 0.16666667],
[0.06666667, 0.16666667, 0.00000000],
[0.06666667, 0.16666667, 0.16666667],
[0.10000000, 0.16666667, 0.00000000],
[0.10000000, 0.16666667, 0.16666667],
[0.13333333, 0.08333333, 0.00000000],
[0.13333333, 0.08333333, 0.16666667],
[0.16666667, 0.00000000, 0.00000000],
[0.16666667, 0.00000000, 0.16666667],
]
def get_matrix43():
"""
Return the matrix with ID 43.
"""
return [
[0.000, 0.000, 0.333],
[0.033, 0.050, 0.267],
[0.067, 0.100, 0.200],
[0.100, 0.175, 0.100],
[0.200, 0.200, 0.067],
[0.267, 0.225],
[0.333, 0.250, 0.000],
]
def get_matrix44():
"""
Return the matrix with ID 44.
"""
return [
[0.000, 0.000, 1.333],
[0.033, 0.050, 0.267],
[0.067, 0.100, 0.200],
[0.100, 0.175, 0.100],
[0.200, 0.200, 0.067],
[0.267, 0.225, 0.033],
[0.333, 0.250, 0.000],
]
def get_matrix45():
"""
Return the matrix with ID 45.
"""
return [
[ 0.000, 0.000, 0.333], # noqa: E201
[-0.033, 0.050, 0.267], # noqa: E201
[ 0.067, 0.100, 0.200], # noqa: E201
[ 0.100, 0.175, 0.100], # noqa: E201
[ 0.200, 0.200, 0.067], # noqa: E201
[ 0.267, 0.225, 0.033], # noqa: E201
[ 0.333, 0.250, 0.000], # noqa: E201
]
def get_matrix46():
"""
Return the matrix with ID 46.
"""
return [
[0.000, 0.0, 0.333],
[0.033, 0.2, 0.267],
[0.067, 0.4, 0.200],
[0.100, 0.7, 0.100],
[0.200, 0.8, 0.067],
[0.267, 0.9, 0.033],
[0.333, 1.0, 0.000],
]
def get_matrix47():
"""
Return the matrix with ID 47.
"""
return [
[0.00, 1.01],
[0.25, 0.75],
[0.50, 0.50],
[0.75, 0.25],
[1.00, 0.00],
]
def get_matrix48():
"""
Return the matrix with ID 48.
"""
return [
[ 0.00, 1.00], # noqa: E201
[-0.25, 0.75], # noqa: E201
[ 0.50, 0.50], # noqa: E201
[ 0.75, 0.25], # noqa: E201
[ 1.00, 0.00], # noqa: E201
]
def get_ranking01():
"""
Return the ranking with ID 01.
"""
return [
("a1", 0.500000),
("a2", 0.500000),
("a3", 0.500000),
("a4", 0.500000),
("a5", 0.500000),
]
def get_ranking02():
"""
Return the ranking with ID 02.
"""
return [
("a5", 0.700000),
("a4", 0.600000),
("a3", 0.500000),
("a2", 0.400000),
("a1", 0.300000),
]
def get_ranking03():
"""
Return the ranking with ID 03.
"""
return [
("a1", 0.300000),
("a2", 0.400000),
("a3", 0.500000),
("a4", 0.600000),
("a5", 0.700000),
]
def get_ranking04():
"""
Return the ranking with ID 04.
"""
return [
("a2", 0.677778),
("a1", 0.669167),
("a3", 0.638889),
("a6", 0.625000),
("a5", 0.590278),
("a4", 0.588889),
]
def get_ranking05():
"""
Return the ranking with ID 05.
"""
return [
("a2", 0.653952),
("a3", 0.604472),
("a1", 0.601574),
("a6", 0.595749),
("a5", 0.539665),
("a4", 0.530537),
]
def get_ranking06():
"""
Return the ranking with ID 06.
"""
return [
("a2", 0.650527),
("a1", 0.612074),
("a3", 0.599994),
("a6", 0.594459),
("a5", 0.540496),
("a4", 0.537186),
]
def get_ranking07():
"""
Return the ranking with ID 07.
"""
return [
("a2", 0.644440),
("a1", 0.623018),
("a3", 0.593228),
("a6", 0.591963),
("a4", 0.543750),
("a5", 0.540097),
]
def get_ranking08():
"""
Return the ranking with ID 08.
"""
return [
("a6", 0.583347),
("a3", 0.574199),
("a5", 0.480220),
("a2", 0.469420),
("a4", 0.304194),
("a1", 0.192606),
]
def get_ranking09():
"""
Return the ranking with ID 09.
"""
return [
("a2", 0.669839),
("a5", 0.647361),
("a3", 0.645343),
("a6", 0.622660),
("a4", 0.587153),
("a1", 0.471261),
]
def get_ranking10():
"""
Return the ranking with ID 10.
"""
return [
("a2", 0.677366),
("a5", 0.675493),
("a3", 0.658395),
("a6", 0.652317),
("a4", 0.622630),
("a1", 0.456501),
]
def get_ranking11():
"""
Return the ranking with ID 11.
"""
return [
("a6", 0.983188),
("a3", 0.980454),
("a5", 0.968182),
("a2", 0.967595),
("a4", 0.808142),
("a1", 0.033316),
]
def get_ranking12():
"""
Return the ranking with ID 12.
"""
return [
("a6", 0.955577),
("a5", 0.954078),
("a3", 0.938579),
("a2", 0.909531),
("a4", 0.808416),
("a1", 0.096521),
]
def get_ranking13():
"""
Return the ranking with ID 13.
"""
return [
("a5", 0.868655),
("a6", 0.846338),
("a4", 0.812076),
("a3", 0.789327),
("a2", 0.718801),
("a1", 0.300742),
]
def get_ranking14():
"""
Return the ranking with ID 14.
"""
return [
("a5", 0.836287),
("a6", 0.814430),
("a4", 0.805387),
("a3", 0.745801),
("a2", 0.688769),
("a1", 0.341532),
]
def get_ranking15():
"""
Return the ranking with ID 15.
"""
return [
("Direct", 0.554250),
("COORD.DestEnc", 0.535107),
("COORD.LTS", 0.534726),
("DF.DestEnc", 0.534260),
("DF.LTS", 0.533976),
("LSF-SnW.L4", 0.527126),
("LSF-SnW.L8", 0.524672),
("CnF.DestEnc", 0.521799),
("LSF-SnW.L2", 0.521617),
("LSF-SnW.L16", 0.520533),
("CnR.DestEnc", 0.516544),
("CnR.LTS", 0.511861),
("CnF.LTS", 0.511555),
("DF.PRoPHET", 0.479107),
("COORD.PRoPHET", 0.478254),
("Epidemic", 0.471779),
("CnR.PRoPHET", 0.447615),
("SimBetTS.L16", 0.412294),
("SimBetTS.L8", 0.401135),
("SimBetTS.L4", 0.386093),
("SnF.L2", 0.371208),
("SnF.L16", 0.362631),
("CnF.PRoPHET", 0.352886),
("SnF.L8", 0.344061),
("SnF.L4", 0.337384),
("SimBetTS.L2", 0.333762),
("CnR.Enc", 0.312368),
("EBR.L2", 0.304587),
("SnW.L2", 0.304480),
("DF.Enc", 0.203707),
("COORD.Enc", 0.200588),
("EBR.L4", 0.189972),
("SnW.L4", 0.189792),
("CnF.Enc", 0.164776),
("SnW.L8", 0.145805),
("EBR.L8", 0.145786),
("EBR.L16", 0.144892),
("SnW.L16", 0.144804),
]
def get_ranking16():
"""
Return the ranking with ID 16.
"""
return [
("COORD.PRoPHET", 0.475401),
("DF.PRoPHET", 0.472054),
("CnR.LTS", 0.380770),
("SimBetTS.L8", 0.380006),
("SimBetTS.L16", 0.379992),
("CnR.DestEnc", 0.379448),
("LSF-SnW.L16", 0.377400),
("DF.DestEnc", 0.373788),
("COORD.DestEnc", 0.373536),
("SimBetTS.L4", 0.372440),
("LSF-SnW.L8", 0.368945),
("DF.LTS", 0.366043),
("COORD.LTS", 0.365320),
("LSF-SnW.L4", 0.344986),
("CnF.PRoPHET", 0.344899),
("CnF.DestEnc", 0.340809),
("CnF.LTS", 0.336824),
("SnF.L8", 0.333813),
("SnF.L4", 0.331080),
("CnR.PRoPHET", 0.328371),
("SnF.L2", 0.328271),
("SnF.L16", 0.325965),
("SimBetTS.L2", 0.319820),
("LSF-SnW.L2", 0.283363),
("CnR.Enc", 0.253889),
("DF.Enc", 0.196428),
("COORD.Enc", 0.185271),
("Epidemic", 0.176182),
("Direct", 0.144637),
("EBR.L16", 0.144275),
("SnW.L16", 0.144196),
("EBR.L2", 0.139577),
("SnW.L2", 0.139347),
("SnW.L8", 0.137288),
("EBR.L8", 0.137283),
("EBR.L4", 0.136547),
("SnW.L4", 0.136425),
("CnF.Enc", 0.117134),
]
def get_ranking17():
"""
Return the ranking with ID 17.
"""
return [
("a3", 0.500000),
("a2", 0.433013),
("a4", 0.433013),
("a1", 0.000000),
("a5", 0.000000),
]
def get_ranking18():
"""
Return the ranking with ID 18.
"""
return [
("a5", 0.700000),
("a4", 0.650413),
("a3", 0.500000),
("a2", 0.349587),
("a1", 0.300000),
]
def get_ranking19():
"""
Return the ranking with ID 19.
"""
return [
("a5", 1.000000),
("a4", 0.750000),
("a3", 0.500000),
("a2", 0.250000),
("a1", 0.000000),
]
def get_ranking20():
"""
Return the ranking with ID 20.
"""
return [
("A", 0.562314),
("D", 0.472564),
("C", 0.447428),
("B", 0.438744),
]
def get_ranking21():
"""
Return the ranking with ID 21.
"""
return [
("C", 0.586404),
("A", 0.536356),
("B", 0.422726),
("D", 0.418160),
]
def get_ranking22():
"""
Return the ranking with ID 22.
"""
return [
("A", 0.567198),
("D", 0.473771),
("B", 0.440236),
("C", 0.439791),
]
def get_ranking23():
"""
Return the ranking with ID 23.
"""
return [
("A", 0.596199),
("B", 0.592651),
("D", 0.581653),
("C", 0.507066),
]
def get_vector01():
"""
Return the vector with ID 01.
"""
return [
0.7,
0.3,
]
def get_vector02():
"""
Return the vector with ID 02.
"""
return [
0.3,
0.2,
0.4,
0.1,
]
def get_vector03():
"""
Return the vector with ID 03.
"""
return [
0.25,
0.25,
0.25,
0.25,
]
def get_vector04():
"""
Return the vector with ID 04.
"""
return [
0.5,
0.4,
]
def get_vector05():
"""
Return the vector with ID 05.
"""
return [
0.5,
0.5,
]
def get_vector06():
"""
Return the vector with ID 06.
"""
return [
0.5,
0.5,
0.5,
0.5,
0.5,
]
def get_vector07():
"""
Return the vector with ID 07.
"""
return [
0.0,
0.1,
0.2,
0.3,
0.4,
]
def get_vector08():
"""
Return the vector with ID 07.
"""
return [
0.54,
0.5,
0.46,
]
def get_vector09():
"""
Return the vector with ID 09.
"""
return [
0.0000000,
0.4330127,
0.5000000,
0.4330127,
0.0000000,
]
def get_vector10():
"""
Return the vector with ID 10.
"""
return [
0.4418200,
0.5000000,
0.3163389,
]
def get_vector11():
"""
Return the vector with ID 11.
"""
return [
0.6194425,
0.5000000,
0.3805575,
]
def get_vector12():
"""
Return the vector with ID 12.
"""
return [
0.3805575,
0.5000000,
0.6194425,
]
def get_vector13():
"""
Return the vector with ID 13.
"""
return [
0.6177727,
0.5000000,
0.3822273,
]
def get_vector14():
"""
Return the vector with ID 14.
"""
return [
0.5767680,
0.5000000,
0.4232320,
]
def get_vector15():
"""
Return the vector with ID 15.
"""
return [
0.4232320,
0.5000000,
0.5767680,
]
def get_vector16():
"""
Return the vector with ID 16.
"""
return [
0.5714286,
0.5000000,
0.4285714,
]
def get_vector17():
"""
Return the vector with ID 17.
"""
return [
0.33333333,
0.33333333,
0.33333333,
]
def get_vector18():
"""
Return the vector with ID 18.
"""
return [
0.37406776,
0.25186448,
0.37406776,
]
def get_vector19():
"""
Return the vector with ID 19.
"""
return [
0.20724531,
0.31710188,
0.47565280,
]
def get_vector20():
"""
Return the vector with ID 20.
"""
return [
0.27329284,
0.32664742,
0.40005975,
]
def get_vector21():
"""
Return the vector with ID 21.
"""
return [
0.25000000,
0.25857023,
0.49142977,
]
def get_vector22():
"""
Return the vector with ID 22.
"""
return [
0.50000000,
0.25000000,
0.25000000,
]
def get_vector23():
"""
Return the vector with ID 23.
"""
return [
0.23971980,
0.28651997,
0.47376023,
]
def get_vector24():
"""
Return the vector with ID 24.
"""
return [
0.33817571,
0.33091215,
0.33091215,
]
def get_vector25():
"""
Return the vector with ID 25.
"""
return [
0.22633480,
0.27052183,
0.50314336,
]
def get_vector26():
"""
Return the vector with ID 26.
"""
return [
0.33861310,
0.33069345,
0.33069345,
] | mcdm/tests/helper_testing.py | import unittest
import numpy as np
class ExtendedTestCase(unittest.TestCase):
# pylint: disable=invalid-name
"""
Extended ``TestCase`` class of the ``unittest`` module.
"""
def assertAlmostEqualArrays(self, obtained_array, expected_array):
"""
Assert that two NumPy arrays are element-wise almost equal and use the
same data type.
"""
np.testing.assert_allclose(obtained_array, expected_array)
self.assertEqual(obtained_array.dtype, expected_array.dtype)
def assertAlmostEqualRankings(self, obtained_ranking, expected_ranking):
"""
Assert that two lists of tuples contain the same alternatives in the
same order with almost equal scores.
"""
self.assertEqual(len(obtained_ranking), len(expected_ranking))
for i, tmp in enumerate(obtained_ranking):
self.assertEqual(tmp[0], expected_ranking[i][0])
self.assertAlmostEqual(tmp[1], expected_ranking[i][1], places=6)
def get_labels01():
"""
Return the labels with ID 01.
"""
return [
"a1",
"a2",
"a3",
"a4",
"a5",
"a6",
"a7",
"a8",
"a9",
"a10",
"a11",
"a12",
]
def get_labels02():
"""
Return the labels with ID 02.
"""
return [
"A",
"B",
"C",
"D",
"E",
"F",
]
def get_labels03():
"""
Return the labels with ID 03.
"""
return [
"A",
"B",
"C",
"D",
]
def get_labels04():
"""
Return the labels with ID 04.
"""
return [
"Epidemic",
"Direct",
"CnF.LTS",
"CnF.DestEnc",
"CnF.Enc",
"CnF.PRoPHET",
"CnR.LTS",
"CnR.DestEnc",
"CnR.Enc",
"CnR.PRoPHET",
"DF.LTS",
"DF.DestEnc",
"DF.Enc",
"DF.PRoPHET",
"COORD.LTS",
"COORD.DestEnc",
"COORD.Enc",
"COORD.PRoPHET",
"SnW.L2",
"SnW.L4",
"SnW.L8",
"SnW.L16",
"LSF-SnW.L2",
"LSF-SnW.L4",
"LSF-SnW.L8",
"LSF-SnW.L16",
"SnF.L2",
"SnF.L4",
"SnF.L8",
"SnF.L16",
"SimBetTS.L2",
"SimBetTS.L4",
"SimBetTS.L8",
"SimBetTS.L16",
"EBR.L2",
"EBR.L4",
"EBR.L8",
"EBR.L16",
]
def get_labels05():
"""
Return the labels with ID 05.
"""
return [
"A",
"B",
"C",
"D",
"E",
]
def get_matrix01():
"""
Return the matrix with ID 01.
"""
return [
[0.0, 0.0, 1.0],
[0.1, 0.2, 0.8],
[0.2, 0.4, 0.6],
[0.3, 0.7, 0.3],
[0.6, 0.8, 0.2],
[0.8, 0.9, 0.1],
[1.0, 1.0, 0.0],
]
def get_matrix02():
"""
Return the matrix with ID 02.
"""
return [
[0.0, 0.0, 0.0],
[0.0, 0.0, 1.0],
[0.2, 0.5, 0.0],
[0.2, 0.5, 1.0],
[0.4, 1.0, 0.0],
[0.4, 1.0, 1.0],
[0.6, 1.0, 0.0],
[0.6, 1.0, 1.0],
[0.8, 0.5, 0.0],
[0.8, 0.5, 1.0],
[1.0, 0.0, 0.0],
[1.0, 0.0, 1.0],
]
def get_matrix03():
"""
Return the matrix with ID 03.
"""
return [
[0.00, 1.00],
[0.25, 0.75],
[0.50, 0.50],
[0.75, 0.25],
[1.00, 0.00],
]
def get_matrix04():
"""
Return the matrix with ID 04.
"""
return [
[ 2.0, 12.0, 7.0, 7.0], # noqa: E201
[ 4.0, 100.0, 7.0, 7.0], # noqa: E201
[10.0, 200.0, 7.0, 7.0], # noqa: E201
[ 0.0, 300.0, 7.0, 7.0], # noqa: E201
[ 6.0, 400.0, 7.0, 7.0], # noqa: E201
[ 1.0, 600.0, 7.0, 7.0], # noqa: E201
]
def get_matrix05():
"""
Return the matrix with ID 05.
"""
return [
[ 8.0, 8.0, -1.0, -1.0, 5.0, 5.0], # noqa: E201
[24.0, 24.0, -11.0, -11.0, 0.0, 0.0], # noqa: E201
[ 4.0, 4.0, -10.0, -10.0, 40.0, 40.0], # noqa: E201
[14.0, 14.0, -9.0, -9.0, 15.0, 15.0], # noqa: E201
[ 6.0, 6.0, -7.0, -7.0, -5.0, -5.0], # noqa: E201
[18.0, 18.0, -5.0, -5.0, -10.0, -10.0], # noqa: E201
]
def get_matrix06():
"""
Return the matrix with ID 06.
"""
return [
[0.5, 0.6, 0.3, 0.2, 0.9],
[0.5, 0.5, 0.5, 0.5, 0.5],
[0.5, 0.4, 0.7, 0.8, 0.1],
]
def get_matrix07():
"""
Return the matrix with ID 07.
"""
return [
[0.9, 30.0, 500.0, 4.0],
[0.1, 50.0, 5.0, 6.0],
[0.5, 80.0, 8.0, 6.0],
[0.8, 40.0, 100.0, 4.0],
[0.7, 60.0, 20.0, 5.0],
[0.6, 60.0, 10.0, 5.0],
]
def get_matrix08():
"""
Return the matrix with ID 08.
"""
return [
[4.0, 5.0, 10.0],
[3.0, 10.0, 6.0],
[3.0, 20.0, 2.0],
[2.0, 15.0, 5.0],
]
def get_matrix09():
"""
Return the matrix with ID 09.
"""
return [
[1.000000, 1.000000, 0.017276],
[0.046296, 0.022222, 1.000000],
[0.259295, 0.106985, 0.783554],
[0.260509, 0.107106, 0.801962],
[0.090419, 0.044763, 0.245226],
[0.563999, 0.239328, 0.288358],
[0.320434, 0.147798, 0.738850],
[0.314969, 0.144773, 0.751384],
[0.714533, 0.364252, 0.092688],
[0.972336, 0.706954, 0.091856],
[0.283518, 0.127236, 0.805858],
[0.296781, 0.132676, 0.797796],
[0.265469, 0.122640, 0.202089],
[0.839930, 0.461981, 0.304980],
[0.282103, 0.126395, 0.808264],
[0.296100, 0.132096, 0.799922],
[0.212761, 0.104337, 0.229227],
[0.798002, 0.429797, 0.335956],
[0.068258, 0.035742, 0.519465],
[0.102412, 0.055489, 0.281905],
[0.155229, 0.085050, 0.163012],
[0.238498, 0.128995, 0.103688],
[0.177178, 0.075565, 0.854643],
[0.257650, 0.112055, 0.811516],
[0.294934, 0.131563, 0.781283],
[0.310552, 0.140593, 0.762520],
[0.368115, 0.159646, 0.449073],
[0.498578, 0.228317, 0.296180],
[0.635688, 0.310778, 0.210340],
[0.759518, 0.402583, 0.149893],
[0.499916, 0.188975, 0.302964],
[0.717516, 0.306092, 0.249340],
[0.790702, 0.359737, 0.221402],
[0.848093, 0.415040, 0.193533],
[0.068414, 0.035866, 0.519542],
[0.102469, 0.055554, 0.282188],
[0.155261, 0.085064, 0.162956],
[0.238748, 0.129114, 0.103684],
]
def get_matrix10():
"""
Return the matrix with ID 10.
"""
return [
[0.00, 1.00],
[0.25, 0.75],
[0.50, 0.50],
[0.75],
[1.00, 0.00],
]
def get_matrix11():
"""
Return the matrix with ID 11.
"""
return [
[0.0, 0.0, 1.0],
[0.1, 0.2, 0.8],
[0.2, 0.4, 0.6],
[0.3, 0.7, 0.3],
[0.6, 0.8, 0.2],
[0.8, 0.9],
[1.0, 1.0, 0.0],
]
def get_matrix12():
"""
Return the matrix with ID 12.
"""
return [
[0.0, 0.0, 1.1],
[0.1, 0.2, 0.8],
[0.2, 0.4, 0.6],
[0.3, 0.7, 0.3],
[0.6, 0.8, 0.2],
[0.8, 0.9, 0.1],
[1.0, 1.0, 0.0],
]
def get_matrix13():
"""
Return the matrix with ID 13.
"""
return [
[ 0.0, 0.0, 1.0], # noqa: E201
[-0.1, 0.2, 0.8], # noqa: E201
[ 0.2, 0.4, 0.6], # noqa: E201
[ 0.3, 0.7, 0.3], # noqa: E201
[ 0.6, 0.8, 0.2], # noqa: E201
[ 0.8, 0.9, 0.1], # noqa: E201
[ 1.0, 1.0, 0.0], # noqa: E201
]
def get_matrix14():
"""
Return the matrix with ID 14.
"""
return [
[0.2, 1.00, 1.0, 1.0],
[0.4, 0.12, 1.0, 1.0],
[1.0, 0.06, 1.0, 1.0],
[0.0, 0.04, 1.0, 1.0],
[0.6, 0.03, 1.0, 1.0],
[0.1, 0.02, 1.0, 1.0],
]
def get_matrix15():
"""
Return the matrix with ID 15.
"""
return [
[ 2.0, 12.0, 7.0, 7.0], # noqa: E201
[ 4.0, 100.0, 7.0, 7.0], # noqa: E201
[10.0, 200.0, 7.0, 7.0], # noqa: E201
[ 0.0, 300.0, 7.0, 7.0], # noqa: E201
[ 6.0, 400.0, 7.0], # noqa: E201
[ 1.0, 600.0, 7.0, 7.0], # noqa: E201
]
def get_matrix16():
"""
Return the matrix with ID 16.
"""
return [
[ 2.0, 12.0, 7.0, 7.0], # noqa: E201
[-4.0, 100.0, 7.0, 7.0], # noqa: E201
[10.0, 200.0, 7.0, 7.0], # noqa: E201
[ 0.0, 300.0, 7.0, 7.0], # noqa: E201
[ 6.0, 400.0, 7.0, 7.0], # noqa: E201
[ 1.0, 600.0, 7.0, 7.0], # noqa: E201
]
def get_matrix17():
"""
Return the matrix with ID 17.
"""
return [
[ 2.0, 12.0, 0.0, 7.0], # noqa: E201
[ 4.0, 100.0, 0.0, 7.0], # noqa: E201
[10.0, 200.0, 0.0, 7.0], # noqa: E201
[ 0.0, 300.0, 0.0, 7.0], # noqa: E201
[ 6.0, 400.0, 0.0, 7.0], # noqa: E201
[ 1.0, 600.0, 0.0, 7.0], # noqa: E201
]
def get_matrix18():
"""
Return the matrix with ID 18.
"""
return [
[ 2.0, 12.0, 7.0, 0.0], # noqa: E201
[ 4.0, 100.0, 7.0, 0.0], # noqa: E201
[10.0, 200.0, 7.0, 0.0], # noqa: E201
[ 0.0, 300.0, 7.0, 0.0], # noqa: E201
[ 6.0, 400.0, 7.0, 0.0], # noqa: E201
[ 1.0, 600.0, 7.0, 0.0], # noqa: E201
]
def get_matrix19():
"""
Return the matrix with ID 19.
"""
return [
[0.2, 0.8, 1.0, 0.0, 0.3, 0.7],
[1.0, 0.0, 0.0, 1.0, 0.2, 0.8],
[0.0, 1.0, 0.1, 0.9, 1.0, 0.0],
[0.5, 0.5, 0.2, 0.8, 0.5, 0.5],
[0.1, 0.9, 0.4, 0.6, 0.1, 0.9],
[0.7, 0.3, 0.6, 0.4, 0.0, 1.0],
]
def get_matrix20():
"""
Return the matrix with ID 20.
"""
return [
[ 8.0, 8.0, -1.0, -1.0, 5.0, 5.0], # noqa: E201
[24.0, 24.0, -11.0, -11.0, 0.0, 0.0], # noqa: E201
[ 4.0, 4.0, -10.0, -10.0, 40.0, 40.0], # noqa: E201
[14.0, 14.0, -9.0, -9.0, 15.0, 15.0], # noqa: E201
[ 6.0, 6.0, -7.0, -7.0, -5.0], # noqa: E201
[18.0, 18.0, -5.0, -5.0, -10.0, -10.0], # noqa: E201
]
def get_matrix21():
"""
Return the matrix with ID 21.
"""
return [
[7.0, 8.0, -1.0, -1.0, 5.0, 5.0],
[7.0, 24.0, -11.0, -11.0, 0.0, 0.0],
[7.0, 4.0, -10.0, -10.0, 40.0, 40.0],
[7.0, 14.0, -9.0, -9.0, 15.0, 15.0],
[7.0, 6.0, -7.0, -7.0, -5.0, -5.0],
[7.0, 18.0, -5.0, -5.0, -10.0, -10.0],
]
def get_matrix22():
"""
Return the matrix with ID 22.
"""
return [
[-7.0, 8.0, -1.0, -1.0, 5.0, 5.0],
[-7.0, 24.0, -11.0, -11.0, 0.0, 0.0],
[-7.0, 4.0, -10.0, -10.0, 40.0, 40.0],
[-7.0, 14.0, -9.0, -9.0, 15.0, 15.0],
[-7.0, 6.0, -7.0, -7.0, -5.0, -5.0],
[-7.0, 18.0, -5.0, -5.0, -10.0, -10.0],
]
def get_matrix23():
"""
Return the matrix with ID 23.
"""
return [
[0.0, 8.0, -1.0, -1.0, 5.0, 5.0],
[0.0, 24.0, -11.0, -11.0, 0.0, 0.0],
[0.0, 4.0, -10.0, -10.0, 40.0, 40.0],
[0.0, 14.0, -9.0, -9.0, 15.0, 15.0],
[0.0, 6.0, -7.0, -7.0, -5.0, -5.0],
[0.0, 18.0, -5.0, -5.0, -10.0, -10.0],
]
def get_matrix24():
"""
Return the matrix with ID 24.
"""
return [
[4.0, 4.0, 7.0, 7.0],
[3.0, 3.0, 7.0, 7.0],
[2.0, 2.0, 7.0, 7.0],
[1.0, 1.0, 7.0, 7.0],
[0.0, 0.0, 7.0, 7.0],
]
def get_matrix25():
"""
Return the matrix with ID 25.
"""
return [
[0.4, 0.4, 0.2, 0.2],
[0.3, 0.3, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2],
[0.1, 0.1, 0.2, 0.2],
[0.0, 0.0, 0.2, 0.2],
]
def get_matrix26():
"""
Return the matrix with ID 26.
"""
return [
[4.0, 4.0, 7.0, 7.0],
[3.0, 3.0, 7.0, 7.0],
[2.0, 2.0, 7.0, 7.0],
[1.0, 1.0, 7.0],
[0.0, 0.0, 7.0, 7.0],
]
def get_matrix27():
"""
Return the matrix with ID 27.
"""
return [
[ 4.0, 4.0, 7.0, 7.0], # noqa: E201
[ 3.0, 3.0, 7.0, 7.0], # noqa: E201
[-2.0, 2.0, 7.0, 7.0], # noqa: E201
[ 1.0, 1.0, 7.0, 7.0], # noqa: E201
[ 0.0, 0.0, 7.0, 7.0], # noqa: E201
]
def get_matrix28():
"""
Return the matrix with ID 28.
"""
return [
[4.0, 4.0, 7.0, 0.0],
[3.0, 3.0, 7.0, 0.0],
[2.0, 2.0, 7.0, 0.0],
[1.0, 1.0, 7.0, 0.0],
[0.0, 0.0, 7.0, 0.0],
]
def get_matrix29():
"""
Return the matrix with ID 29.
"""
return [
[0.0, 0.0, 5.0, 5.0],
[6.0, 6.0, 5.0, 5.0],
[0.0, 0.0, 5.0, 5.0],
[8.0, 8.0, 5.0, 5.0],
]
def get_matrix30():
"""
Return the matrix with ID 30.
"""
return [
[0.0, 0.0, 0.5, 0.5],
[0.6, 0.6, 0.5, 0.5],
[0.0, 0.0, 0.5, 0.5],
[0.8, 0.8, 0.5, 0.5],
]
def get_matrix31():
"""
Return the matrix with ID 31.
"""
return [
[0.0, 0.0, 5.0, 5.0],
[6.0, 6.0, 5.0, 5.0],
[0.0, 0.0, 5.0],
[8.0, 8.0, 5.0, 5.0],
]
def get_matrix32():
"""
Return the matrix with ID 32.
"""
return [
[0.0, 0.0, 5.0, 5.0],
[6.0, -6.0, 5.0, 5.0],
[0.0, 0.0, 5.0, 5.0],
[8.0, 8.0, 5.0, 5.0],
]
def get_matrix33():
"""
Return the matrix with ID 33.
"""
return [
[0.0, 0.0, 5.0, 0.0],
[6.0, 6.0, 5.0, 0.0],
[0.0, 0.0, 5.0, 0.0],
[8.0, 8.0, 5.0, 0.0],
]
def get_matrix34():
"""
Return the matrix with ID 34.
"""
return [
[ 1.0000000, 0.9314381, -0.9314381], # noqa: E201
[ 0.9314381, 1.0000000, -1.0000000], # noqa: E201
[-0.9314381, -1.0000000, 1.0000000], # noqa: E201
]
def get_matrix35():
"""
Return the matrix with ID 35.
"""
return [
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 1.0],
]
def get_matrix36():
"""
Return the matrix with ID 36.
"""
return [
[1.0000000, 0.9314381, 0.9314381],
[0.9314381, 1.0000000, 1.0000000],
[0.9314381, 1.0000000, 1.0000000],
]
def get_matrix37():
"""
Return the matrix with ID 37.
"""
return [
[1.0000000, 0.9369189, 0.9369189],
[0.9369189, 1.0000000, 1.0000000],
[0.9369189, 1.0000000, 1.0000000],
]
def get_matrix38():
"""
Return the matrix with ID 38.
"""
return [
[1.0000000, 0.5186014, 0.0000000],
[0.5186014, 1.0000000, 0.0000000],
[0.0000000, 0.0000000, 1.0000000],
]
def get_matrix39():
"""
Return the matrix with ID 39.
"""
return [
[0.0, 0.0],
[0.0, 1.0],
]
def get_matrix40():
"""
Return the matrix with ID 40.
"""
return [
[1.0000000, 0.0000000],
[0.0000000, 1.0000000],
]
def get_matrix41():
"""
Return the matrix with ID 41.
"""
return [
[0.000, 0.000, 0.333],
[0.033, 0.050, 0.267],
[0.067, 0.100, 0.200],
[0.100, 0.175, 0.100],
[0.200, 0.200, 0.067],
[0.267, 0.225, 0.033],
[0.333, 0.250, 0.000],
]
def get_matrix42():
"""
Return the matrix with ID 42.
"""
return [
[0.00000000, 0.00000000, 0.00000000],
[0.00000000, 0.00000000, 0.16666667],
[0.03333333, 0.08333333, 0.00000000],
[0.03333333, 0.08333333, 0.16666667],
[0.06666667, 0.16666667, 0.00000000],
[0.06666667, 0.16666667, 0.16666667],
[0.10000000, 0.16666667, 0.00000000],
[0.10000000, 0.16666667, 0.16666667],
[0.13333333, 0.08333333, 0.00000000],
[0.13333333, 0.08333333, 0.16666667],
[0.16666667, 0.00000000, 0.00000000],
[0.16666667, 0.00000000, 0.16666667],
]
def get_matrix43():
"""
Return the matrix with ID 43.
"""
return [
[0.000, 0.000, 0.333],
[0.033, 0.050, 0.267],
[0.067, 0.100, 0.200],
[0.100, 0.175, 0.100],
[0.200, 0.200, 0.067],
[0.267, 0.225],
[0.333, 0.250, 0.000],
]
def get_matrix44():
"""
Return the matrix with ID 44.
"""
return [
[0.000, 0.000, 1.333],
[0.033, 0.050, 0.267],
[0.067, 0.100, 0.200],
[0.100, 0.175, 0.100],
[0.200, 0.200, 0.067],
[0.267, 0.225, 0.033],
[0.333, 0.250, 0.000],
]
def get_matrix45():
"""
Return the matrix with ID 45.
"""
return [
[ 0.000, 0.000, 0.333], # noqa: E201
[-0.033, 0.050, 0.267], # noqa: E201
[ 0.067, 0.100, 0.200], # noqa: E201
[ 0.100, 0.175, 0.100], # noqa: E201
[ 0.200, 0.200, 0.067], # noqa: E201
[ 0.267, 0.225, 0.033], # noqa: E201
[ 0.333, 0.250, 0.000], # noqa: E201
]
def get_matrix46():
"""
Return the matrix with ID 46.
"""
return [
[0.000, 0.0, 0.333],
[0.033, 0.2, 0.267],
[0.067, 0.4, 0.200],
[0.100, 0.7, 0.100],
[0.200, 0.8, 0.067],
[0.267, 0.9, 0.033],
[0.333, 1.0, 0.000],
]
def get_matrix47():
"""
Return the matrix with ID 47.
"""
return [
[0.00, 1.01],
[0.25, 0.75],
[0.50, 0.50],
[0.75, 0.25],
[1.00, 0.00],
]
def get_matrix48():
"""
Return the matrix with ID 48.
"""
return [
[ 0.00, 1.00], # noqa: E201
[-0.25, 0.75], # noqa: E201
[ 0.50, 0.50], # noqa: E201
[ 0.75, 0.25], # noqa: E201
[ 1.00, 0.00], # noqa: E201
]
def get_ranking01():
"""
Return the ranking with ID 01.
"""
return [
("a1", 0.500000),
("a2", 0.500000),
("a3", 0.500000),
("a4", 0.500000),
("a5", 0.500000),
]
def get_ranking02():
"""
Return the ranking with ID 02.
"""
return [
("a5", 0.700000),
("a4", 0.600000),
("a3", 0.500000),
("a2", 0.400000),
("a1", 0.300000),
]
def get_ranking03():
"""
Return the ranking with ID 03.
"""
return [
("a1", 0.300000),
("a2", 0.400000),
("a3", 0.500000),
("a4", 0.600000),
("a5", 0.700000),
]
def get_ranking04():
"""
Return the ranking with ID 04.
"""
return [
("a2", 0.677778),
("a1", 0.669167),
("a3", 0.638889),
("a6", 0.625000),
("a5", 0.590278),
("a4", 0.588889),
]
def get_ranking05():
"""
Return the ranking with ID 05.
"""
return [
("a2", 0.653952),
("a3", 0.604472),
("a1", 0.601574),
("a6", 0.595749),
("a5", 0.539665),
("a4", 0.530537),
]
def get_ranking06():
"""
Return the ranking with ID 06.
"""
return [
("a2", 0.650527),
("a1", 0.612074),
("a3", 0.599994),
("a6", 0.594459),
("a5", 0.540496),
("a4", 0.537186),
]
def get_ranking07():
"""
Return the ranking with ID 07.
"""
return [
("a2", 0.644440),
("a1", 0.623018),
("a3", 0.593228),
("a6", 0.591963),
("a4", 0.543750),
("a5", 0.540097),
]
def get_ranking08():
"""
Return the ranking with ID 08.
"""
return [
("a6", 0.583347),
("a3", 0.574199),
("a5", 0.480220),
("a2", 0.469420),
("a4", 0.304194),
("a1", 0.192606),
]
def get_ranking09():
"""
Return the ranking with ID 09.
"""
return [
("a2", 0.669839),
("a5", 0.647361),
("a3", 0.645343),
("a6", 0.622660),
("a4", 0.587153),
("a1", 0.471261),
]
def get_ranking10():
"""
Return the ranking with ID 10.
"""
return [
("a2", 0.677366),
("a5", 0.675493),
("a3", 0.658395),
("a6", 0.652317),
("a4", 0.622630),
("a1", 0.456501),
]
def get_ranking11():
"""
Return the ranking with ID 11.
"""
return [
("a6", 0.983188),
("a3", 0.980454),
("a5", 0.968182),
("a2", 0.967595),
("a4", 0.808142),
("a1", 0.033316),
]
def get_ranking12():
"""
Return the ranking with ID 12.
"""
return [
("a6", 0.955577),
("a5", 0.954078),
("a3", 0.938579),
("a2", 0.909531),
("a4", 0.808416),
("a1", 0.096521),
]
def get_ranking13():
"""
Return the ranking with ID 13.
"""
return [
("a5", 0.868655),
("a6", 0.846338),
("a4", 0.812076),
("a3", 0.789327),
("a2", 0.718801),
("a1", 0.300742),
]
def get_ranking14():
"""
Return the ranking with ID 14.
"""
return [
("a5", 0.836287),
("a6", 0.814430),
("a4", 0.805387),
("a3", 0.745801),
("a2", 0.688769),
("a1", 0.341532),
]
def get_ranking15():
"""
Return the ranking with ID 15.
"""
return [
("Direct", 0.554250),
("COORD.DestEnc", 0.535107),
("COORD.LTS", 0.534726),
("DF.DestEnc", 0.534260),
("DF.LTS", 0.533976),
("LSF-SnW.L4", 0.527126),
("LSF-SnW.L8", 0.524672),
("CnF.DestEnc", 0.521799),
("LSF-SnW.L2", 0.521617),
("LSF-SnW.L16", 0.520533),
("CnR.DestEnc", 0.516544),
("CnR.LTS", 0.511861),
("CnF.LTS", 0.511555),
("DF.PRoPHET", 0.479107),
("COORD.PRoPHET", 0.478254),
("Epidemic", 0.471779),
("CnR.PRoPHET", 0.447615),
("SimBetTS.L16", 0.412294),
("SimBetTS.L8", 0.401135),
("SimBetTS.L4", 0.386093),
("SnF.L2", 0.371208),
("SnF.L16", 0.362631),
("CnF.PRoPHET", 0.352886),
("SnF.L8", 0.344061),
("SnF.L4", 0.337384),
("SimBetTS.L2", 0.333762),
("CnR.Enc", 0.312368),
("EBR.L2", 0.304587),
("SnW.L2", 0.304480),
("DF.Enc", 0.203707),
("COORD.Enc", 0.200588),
("EBR.L4", 0.189972),
("SnW.L4", 0.189792),
("CnF.Enc", 0.164776),
("SnW.L8", 0.145805),
("EBR.L8", 0.145786),
("EBR.L16", 0.144892),
("SnW.L16", 0.144804),
]
def get_ranking16():
"""
Return the ranking with ID 16.
"""
return [
("COORD.PRoPHET", 0.475401),
("DF.PRoPHET", 0.472054),
("CnR.LTS", 0.380770),
("SimBetTS.L8", 0.380006),
("SimBetTS.L16", 0.379992),
("CnR.DestEnc", 0.379448),
("LSF-SnW.L16", 0.377400),
("DF.DestEnc", 0.373788),
("COORD.DestEnc", 0.373536),
("SimBetTS.L4", 0.372440),
("LSF-SnW.L8", 0.368945),
("DF.LTS", 0.366043),
("COORD.LTS", 0.365320),
("LSF-SnW.L4", 0.344986),
("CnF.PRoPHET", 0.344899),
("CnF.DestEnc", 0.340809),
("CnF.LTS", 0.336824),
("SnF.L8", 0.333813),
("SnF.L4", 0.331080),
("CnR.PRoPHET", 0.328371),
("SnF.L2", 0.328271),
("SnF.L16", 0.325965),
("SimBetTS.L2", 0.319820),
("LSF-SnW.L2", 0.283363),
("CnR.Enc", 0.253889),
("DF.Enc", 0.196428),
("COORD.Enc", 0.185271),
("Epidemic", 0.176182),
("Direct", 0.144637),
("EBR.L16", 0.144275),
("SnW.L16", 0.144196),
("EBR.L2", 0.139577),
("SnW.L2", 0.139347),
("SnW.L8", 0.137288),
("EBR.L8", 0.137283),
("EBR.L4", 0.136547),
("SnW.L4", 0.136425),
("CnF.Enc", 0.117134),
]
def get_ranking17():
"""
Return the ranking with ID 17.
"""
return [
("a3", 0.500000),
("a2", 0.433013),
("a4", 0.433013),
("a1", 0.000000),
("a5", 0.000000),
]
def get_ranking18():
"""
Return the ranking with ID 18.
"""
return [
("a5", 0.700000),
("a4", 0.650413),
("a3", 0.500000),
("a2", 0.349587),
("a1", 0.300000),
]
def get_ranking19():
"""
Return the ranking with ID 19.
"""
return [
("a5", 1.000000),
("a4", 0.750000),
("a3", 0.500000),
("a2", 0.250000),
("a1", 0.000000),
]
def get_ranking20():
"""
Return the ranking with ID 20.
"""
return [
("A", 0.562314),
("D", 0.472564),
("C", 0.447428),
("B", 0.438744),
]
def get_ranking21():
"""
Return the ranking with ID 21.
"""
return [
("C", 0.586404),
("A", 0.536356),
("B", 0.422726),
("D", 0.418160),
]
def get_ranking22():
"""
Return the ranking with ID 22.
"""
return [
("A", 0.567198),
("D", 0.473771),
("B", 0.440236),
("C", 0.439791),
]
def get_ranking23():
"""
Return the ranking with ID 23.
"""
return [
("A", 0.596199),
("B", 0.592651),
("D", 0.581653),
("C", 0.507066),
]
def get_vector01():
"""
Return the vector with ID 01.
"""
return [
0.7,
0.3,
]
def get_vector02():
"""
Return the vector with ID 02.
"""
return [
0.3,
0.2,
0.4,
0.1,
]
def get_vector03():
"""
Return the vector with ID 03.
"""
return [
0.25,
0.25,
0.25,
0.25,
]
def get_vector04():
"""
Return the vector with ID 04.
"""
return [
0.5,
0.4,
]
def get_vector05():
"""
Return the vector with ID 05.
"""
return [
0.5,
0.5,
]
def get_vector06():
"""
Return the vector with ID 06.
"""
return [
0.5,
0.5,
0.5,
0.5,
0.5,
]
def get_vector07():
"""
Return the vector with ID 07.
"""
return [
0.0,
0.1,
0.2,
0.3,
0.4,
]
def get_vector08():
"""
Return the vector with ID 07.
"""
return [
0.54,
0.5,
0.46,
]
def get_vector09():
"""
Return the vector with ID 09.
"""
return [
0.0000000,
0.4330127,
0.5000000,
0.4330127,
0.0000000,
]
def get_vector10():
"""
Return the vector with ID 10.
"""
return [
0.4418200,
0.5000000,
0.3163389,
]
def get_vector11():
"""
Return the vector with ID 11.
"""
return [
0.6194425,
0.5000000,
0.3805575,
]
def get_vector12():
"""
Return the vector with ID 12.
"""
return [
0.3805575,
0.5000000,
0.6194425,
]
def get_vector13():
"""
Return the vector with ID 13.
"""
return [
0.6177727,
0.5000000,
0.3822273,
]
def get_vector14():
"""
Return the vector with ID 14.
"""
return [
0.5767680,
0.5000000,
0.4232320,
]
def get_vector15():
"""
Return the vector with ID 15.
"""
return [
0.4232320,
0.5000000,
0.5767680,
]
def get_vector16():
"""
Return the vector with ID 16.
"""
return [
0.5714286,
0.5000000,
0.4285714,
]
def get_vector17():
"""
Return the vector with ID 17.
"""
return [
0.33333333,
0.33333333,
0.33333333,
]
def get_vector18():
"""
Return the vector with ID 18.
"""
return [
0.37406776,
0.25186448,
0.37406776,
]
def get_vector19():
"""
Return the vector with ID 19.
"""
return [
0.20724531,
0.31710188,
0.47565280,
]
def get_vector20():
"""
Return the vector with ID 20.
"""
return [
0.27329284,
0.32664742,
0.40005975,
]
def get_vector21():
"""
Return the vector with ID 21.
"""
return [
0.25000000,
0.25857023,
0.49142977,
]
def get_vector22():
"""
Return the vector with ID 22.
"""
return [
0.50000000,
0.25000000,
0.25000000,
]
def get_vector23():
"""
Return the vector with ID 23.
"""
return [
0.23971980,
0.28651997,
0.47376023,
]
def get_vector24():
"""
Return the vector with ID 24.
"""
return [
0.33817571,
0.33091215,
0.33091215,
]
def get_vector25():
"""
Return the vector with ID 25.
"""
return [
0.22633480,
0.27052183,
0.50314336,
]
def get_vector26():
"""
Return the vector with ID 26.
"""
return [
0.33861310,
0.33069345,
0.33069345,
] | 0.734596 | 0.77223 |
from __future__ import (
division, absolute_import, print_function, unicode_literals,
)
from builtins import * # noqa
from future.builtins.disabled import * # noqa
from magic_constraints.exception import MagicSyntaxError, MagicTypeError
def transform_to_slots(constraints_package, *args, **kwargs):
class UnFill(object):
pass
plen = len(constraints_package.parameters)
if len(args) > plen:
raise MagicSyntaxError(
'argument length unmatched.',
parameters=constraints_package.parameters,
args=args,
)
slots = [UnFill] * plen
unfill_count = plen
# 1. fill args.
for i, val in enumerate(args):
slots[i] = val
unfill_count -= len(args)
# 2. fill kwargs.
for key, val in kwargs.items():
if key not in constraints_package.name_hash:
raise MagicSyntaxError(
'invalid keyword argument',
parameters=constraints_package.parameters,
key=key,
)
i = constraints_package.name_hash[key]
if slots[i] is not UnFill:
raise MagicSyntaxError(
'key reassignment error.',
parameters=constraints_package.parameters,
key=key,
)
slots[i] = val
unfill_count -= 1
# 3. fill defaults if not set.
# 3.1. deal with the case that default not exists.
default_begin = constraints_package.start_of_defaults
if default_begin < 0:
default_begin = plen
# 3.2 fill defaults.
for i in range(default_begin, plen):
parameter = constraints_package.parameters[i]
j = constraints_package.name_hash[parameter.name]
if slots[j] is UnFill:
slots[j] = parameter.default
unfill_count -= 1
# 4. test if slots contains UnFill.
if unfill_count != 0:
raise MagicSyntaxError(
'slots contains unfilled argument(s).',
parameters=constraints_package.parameters,
slots=slots,
)
return slots
def check_and_bind_arguments(parameters, slots, bind_callback):
plen = len(parameters)
for i in range(plen):
arg = slots[i]
parameter = parameters[i]
wrapper = parameter.wrapper_for_deferred_checking()
# defer checking by wrapping the element of slot.
if wrapper:
slots[i] = wrapper(arg)
# check now.
elif not parameter.check_instance(arg):
raise MagicTypeError(
'argument unmatched.',
parameter=parameter,
argument=arg,
)
# bind.
bind_callback(parameter.name, arg) | magic_constraints/argument.py | from __future__ import (
division, absolute_import, print_function, unicode_literals,
)
from builtins import * # noqa
from future.builtins.disabled import * # noqa
from magic_constraints.exception import MagicSyntaxError, MagicTypeError
def transform_to_slots(constraints_package, *args, **kwargs):
class UnFill(object):
pass
plen = len(constraints_package.parameters)
if len(args) > plen:
raise MagicSyntaxError(
'argument length unmatched.',
parameters=constraints_package.parameters,
args=args,
)
slots = [UnFill] * plen
unfill_count = plen
# 1. fill args.
for i, val in enumerate(args):
slots[i] = val
unfill_count -= len(args)
# 2. fill kwargs.
for key, val in kwargs.items():
if key not in constraints_package.name_hash:
raise MagicSyntaxError(
'invalid keyword argument',
parameters=constraints_package.parameters,
key=key,
)
i = constraints_package.name_hash[key]
if slots[i] is not UnFill:
raise MagicSyntaxError(
'key reassignment error.',
parameters=constraints_package.parameters,
key=key,
)
slots[i] = val
unfill_count -= 1
# 3. fill defaults if not set.
# 3.1. deal with the case that default not exists.
default_begin = constraints_package.start_of_defaults
if default_begin < 0:
default_begin = plen
# 3.2 fill defaults.
for i in range(default_begin, plen):
parameter = constraints_package.parameters[i]
j = constraints_package.name_hash[parameter.name]
if slots[j] is UnFill:
slots[j] = parameter.default
unfill_count -= 1
# 4. test if slots contains UnFill.
if unfill_count != 0:
raise MagicSyntaxError(
'slots contains unfilled argument(s).',
parameters=constraints_package.parameters,
slots=slots,
)
return slots
def check_and_bind_arguments(parameters, slots, bind_callback):
plen = len(parameters)
for i in range(plen):
arg = slots[i]
parameter = parameters[i]
wrapper = parameter.wrapper_for_deferred_checking()
# defer checking by wrapping the element of slot.
if wrapper:
slots[i] = wrapper(arg)
# check now.
elif not parameter.check_instance(arg):
raise MagicTypeError(
'argument unmatched.',
parameter=parameter,
argument=arg,
)
# bind.
bind_callback(parameter.name, arg) | 0.624179 | 0.208884 |
from random import *
# Generates exact cover instance for target length n with t subsets
# Returns (s, w) where s is a t-length list of subsets and w is a l length list of witness indices of s
def generate(n, l, t):
assert t >= n
assert 1 <= l <= n
target = list(range(1,n+1))
witness = []
# Randomly create a witness set of appropriate length
for _ in range(l):
next_element = sample(target, 1)[0]
target.remove(next_element)
witness.append([next_element])
# Randomly partition remaining target set between partitions
while target:
next_element = sample(target, 1)[0]
target.remove(next_element)
next_partition = randrange(l)
witness[next_partition].append(next_element)
witness = [sorted(w) for w in witness]
# Randomly generate other subsets
subsets = []
rem = t - len(witness)
for _ in range(rem):
subset = set()
length = randint(1, n)
for _ in range(length):
subset.add(randint(1, n))
subsets.append(sorted(subset))
# Shuffle witnesses into subsets
witness_indices = []
for w in witness:
index = randint(0, len(subsets))
subsets.insert(index, w)
# If this is inserted before any other witness element then those elements move up
witness_indices = [i + 1 if i >= index else i for i in witness_indices]
witness_indices.append(index)
return (subsets, sorted(witness_indices))
if __name__=='__main__':
import itertools
import sys
# run this code to test the exact cover instance generation. Ex: `python ecigen.py 10 5 20`
n = int(sys.argv[1])
l = int(sys.argv[2])
t = int(sys.argv[3])
subsets, witness = generate(n, l, t)
# Check that we have the right number of subsets
assert(len(subsets) == t)
# Check witness is the right length
assert(len(witness) == l)
witness_subsets = [subsets[i] for i in witness]
# Check that we have exactly n elements across all witness subsets
assert(sum([len(i) for i in witness_subsets]) == n)
# Check that their union is the {1, 2, ..., n}
assert(set(itertools.chain.from_iterable(witness_subsets)) == set(range(1, n+1)))
print(subsets)
print(witness) | witnessencrypt/ecigen.py | from random import *
# Generates exact cover instance for target length n with t subsets
# Returns (s, w) where s is a t-length list of subsets and w is a l length list of witness indices of s
def generate(n, l, t):
assert t >= n
assert 1 <= l <= n
target = list(range(1,n+1))
witness = []
# Randomly create a witness set of appropriate length
for _ in range(l):
next_element = sample(target, 1)[0]
target.remove(next_element)
witness.append([next_element])
# Randomly partition remaining target set between partitions
while target:
next_element = sample(target, 1)[0]
target.remove(next_element)
next_partition = randrange(l)
witness[next_partition].append(next_element)
witness = [sorted(w) for w in witness]
# Randomly generate other subsets
subsets = []
rem = t - len(witness)
for _ in range(rem):
subset = set()
length = randint(1, n)
for _ in range(length):
subset.add(randint(1, n))
subsets.append(sorted(subset))
# Shuffle witnesses into subsets
witness_indices = []
for w in witness:
index = randint(0, len(subsets))
subsets.insert(index, w)
# If this is inserted before any other witness element then those elements move up
witness_indices = [i + 1 if i >= index else i for i in witness_indices]
witness_indices.append(index)
return (subsets, sorted(witness_indices))
if __name__=='__main__':
import itertools
import sys
# run this code to test the exact cover instance generation. Ex: `python ecigen.py 10 5 20`
n = int(sys.argv[1])
l = int(sys.argv[2])
t = int(sys.argv[3])
subsets, witness = generate(n, l, t)
# Check that we have the right number of subsets
assert(len(subsets) == t)
# Check witness is the right length
assert(len(witness) == l)
witness_subsets = [subsets[i] for i in witness]
# Check that we have exactly n elements across all witness subsets
assert(sum([len(i) for i in witness_subsets]) == n)
# Check that their union is the {1, 2, ..., n}
assert(set(itertools.chain.from_iterable(witness_subsets)) == set(range(1, n+1)))
print(subsets)
print(witness) | 0.619011 | 0.394434 |
import logging
import re
from unittest.mock import patch
import pytest
from ebook_homebrew.exceptions import InvalidNumberParameterTypeError, \
TargetSrcFileNotFoundError, ChangeFileNameOSError, InvalidImageParameterTypeError
from ebook_homebrew.rename import ChangeFilename
_logger = logging.getLogger(name=__name__)
class TestChangeFilename(object):
def setup_method(self, method):
_logger.info("method{}".format(method.__name__))
with patch("os.chdir"):
self.target = ChangeFilename(directory_path="test", digits="3", extension="jpg")
@pytest.mark.parametrize("test_input, expected", [
(["test001.jpg", 5], "00001.jpg"),
(["test001foo2.jpg", 5], "00001.jpg"),
(["001.jpg", 3], "001.jpg"),
(["001.jpg", 2], "001.jpg")])
def test_ok_create_new_name(self, test_input, expected):
match_obj = re.search("\\d{3}", test_input[0])
actual = self.target._create_new_name(match_obj, test_input[1], ".jpg")
assert actual == expected
def test_error_create_new_name(self):
with pytest.raises(InvalidNumberParameterTypeError):
self.target._create_new_name("test", 5, ".jpg")
@pytest.mark.parametrize("file_list, is_file, expected", [
(["test001test.jpg", "test002foo.jpg"], False, 0),
(["test001test.jpg", "test002foo.png"], False, 0),
(["test001test.jpg", "testfoobar.jpg"], False, 0),
(["test001test.jpg", "test001foo.jpg"], True, 2),
([], False, 0)])
def test_ok_filename_to_digit_number(self, file_list, is_file, expected):
with patch("os.listdir") as mock_listdir, patch("os.path.isfile") as mock_isfile, \
patch.object(self.target, "_rename_file"):
mock_listdir.return_value = file_list
mock_isfile.return_value = is_file
actual = self.target.filename_to_digit_number()
assert len(actual) is expected
mock_listdir.assert_called_once_with("test")
def test_file_not_found_error_filename_to_digit(self):
with patch("os.listdir") as mock_listdir:
mock_listdir.side_effect = FileNotFoundError
with pytest.raises(TargetSrcFileNotFoundError):
self.target.filename_to_digit_number()
def test_os_error_filename_to_digit(self):
with patch("os.listdir") as mock_listdir, patch("os.path.isfile") as mock_isfile:
mock_listdir.return_value = ["test001foo.jpg"]
mock_isfile.side_effect = OSError
with pytest.raises(ChangeFileNameOSError):
self.target.filename_to_digit_number()
@staticmethod
def interactive_input(test_input):
for out in test_input:
yield out
@pytest.mark.parametrize("test_interactive, is_file_return, expected", [
(["y", "foo.jpg"], [True, False], True),
(["Y", ""], [True, False], True),
(["Y", "foo.jpg", "bar.jpg"], [True, True, False], True),
(["N", ""], [True, False], True),
(["r", "y", "y"], [True, False], True),
(["r", "r", "foo.jpg"], [True, False, False], True),
(["r", "c"], [True, False], True),
(["r", "n"], [True, False], True)])
def test_ok_change_name_manually(self, test_interactive, is_file_return, expected):
with patch("os.listdir") as mock_listdir, patch("os.path.isfile") as mock_isfile, \
patch.object(self.target, "_rename_file"), \
patch("builtins.input") as mock_input, \
patch.object(self.target, "_remove_file"), \
patch.object(self.target, "_check_image_file"):
mock_listdir.return_value = ["test001test.jpg"]
mock_isfile.side_effect = is_file_return
mock_input.side_effect = self.interactive_input(test_interactive)
self.target.filename_to_digit_number()
actual = self.target.change_name_manually(overwrite=False)
assert actual == expected
@pytest.mark.parametrize("test_interactive, is_file_return", [
(["r", "c"], [True, False])])
def test_skip_change_name_manually(self, test_interactive, is_file_return):
with patch("os.listdir") as mock_listdir, patch("os.path.isfile") as mock_isfile, \
patch("builtins.input") as mock_input, \
patch.object(self.target, "_check_image_file") as mock_image:
mock_listdir.return_value = ["test001test.jpg"]
mock_isfile.side_effect = is_file_return
mock_input.side_effect = self.interactive_input(test_interactive)
mock_image.side_effect = InvalidImageParameterTypeError
self.target.filename_to_digit_number()
actual = self.target.change_name_manually(overwrite=False)
assert actual is True
@pytest.mark.parametrize("test_file_list, is_file_return, test_input", [
(["001.jpg", "002.jpg"], False, ["foo", "bar"]),
(["001.jpg", "002.jpg"], False, ["foo", None]),
(["001.jpg", "002.jpg"], False, [None, "bar"]),
(["001.jpg", "002.txt"], False, ["foo", "bar"]),
(["001.jpg", "aaa.jpg"], True, ["foo", "bar"]),
(["001.jpg", "foo001bar.jpg"], [False, True], ["foo", "bar"])])
def test_add_before_after_str(self, test_file_list, is_file_return, test_input):
with patch("os.listdir") as mock_listdir, patch("os.path.isfile") as mock_isfile, \
patch.object(self.target, "_rename_file"):
mock_listdir.return_value = test_file_list
mock_isfile.return_value = is_file_return
actual = self.target.add_before_after_str(*test_input)
assert actual is True | tests/ut/test_rename.py | import logging
import re
from unittest.mock import patch
import pytest
from ebook_homebrew.exceptions import InvalidNumberParameterTypeError, \
TargetSrcFileNotFoundError, ChangeFileNameOSError, InvalidImageParameterTypeError
from ebook_homebrew.rename import ChangeFilename
_logger = logging.getLogger(name=__name__)
class TestChangeFilename(object):
def setup_method(self, method):
_logger.info("method{}".format(method.__name__))
with patch("os.chdir"):
self.target = ChangeFilename(directory_path="test", digits="3", extension="jpg")
@pytest.mark.parametrize("test_input, expected", [
(["test001.jpg", 5], "00001.jpg"),
(["test001foo2.jpg", 5], "00001.jpg"),
(["001.jpg", 3], "001.jpg"),
(["001.jpg", 2], "001.jpg")])
def test_ok_create_new_name(self, test_input, expected):
match_obj = re.search("\\d{3}", test_input[0])
actual = self.target._create_new_name(match_obj, test_input[1], ".jpg")
assert actual == expected
def test_error_create_new_name(self):
with pytest.raises(InvalidNumberParameterTypeError):
self.target._create_new_name("test", 5, ".jpg")
@pytest.mark.parametrize("file_list, is_file, expected", [
(["test001test.jpg", "test002foo.jpg"], False, 0),
(["test001test.jpg", "test002foo.png"], False, 0),
(["test001test.jpg", "testfoobar.jpg"], False, 0),
(["test001test.jpg", "test001foo.jpg"], True, 2),
([], False, 0)])
def test_ok_filename_to_digit_number(self, file_list, is_file, expected):
with patch("os.listdir") as mock_listdir, patch("os.path.isfile") as mock_isfile, \
patch.object(self.target, "_rename_file"):
mock_listdir.return_value = file_list
mock_isfile.return_value = is_file
actual = self.target.filename_to_digit_number()
assert len(actual) is expected
mock_listdir.assert_called_once_with("test")
def test_file_not_found_error_filename_to_digit(self):
with patch("os.listdir") as mock_listdir:
mock_listdir.side_effect = FileNotFoundError
with pytest.raises(TargetSrcFileNotFoundError):
self.target.filename_to_digit_number()
def test_os_error_filename_to_digit(self):
with patch("os.listdir") as mock_listdir, patch("os.path.isfile") as mock_isfile:
mock_listdir.return_value = ["test001foo.jpg"]
mock_isfile.side_effect = OSError
with pytest.raises(ChangeFileNameOSError):
self.target.filename_to_digit_number()
@staticmethod
def interactive_input(test_input):
for out in test_input:
yield out
@pytest.mark.parametrize("test_interactive, is_file_return, expected", [
(["y", "foo.jpg"], [True, False], True),
(["Y", ""], [True, False], True),
(["Y", "foo.jpg", "bar.jpg"], [True, True, False], True),
(["N", ""], [True, False], True),
(["r", "y", "y"], [True, False], True),
(["r", "r", "foo.jpg"], [True, False, False], True),
(["r", "c"], [True, False], True),
(["r", "n"], [True, False], True)])
def test_ok_change_name_manually(self, test_interactive, is_file_return, expected):
with patch("os.listdir") as mock_listdir, patch("os.path.isfile") as mock_isfile, \
patch.object(self.target, "_rename_file"), \
patch("builtins.input") as mock_input, \
patch.object(self.target, "_remove_file"), \
patch.object(self.target, "_check_image_file"):
mock_listdir.return_value = ["test001test.jpg"]
mock_isfile.side_effect = is_file_return
mock_input.side_effect = self.interactive_input(test_interactive)
self.target.filename_to_digit_number()
actual = self.target.change_name_manually(overwrite=False)
assert actual == expected
@pytest.mark.parametrize("test_interactive, is_file_return", [
(["r", "c"], [True, False])])
def test_skip_change_name_manually(self, test_interactive, is_file_return):
with patch("os.listdir") as mock_listdir, patch("os.path.isfile") as mock_isfile, \
patch("builtins.input") as mock_input, \
patch.object(self.target, "_check_image_file") as mock_image:
mock_listdir.return_value = ["test001test.jpg"]
mock_isfile.side_effect = is_file_return
mock_input.side_effect = self.interactive_input(test_interactive)
mock_image.side_effect = InvalidImageParameterTypeError
self.target.filename_to_digit_number()
actual = self.target.change_name_manually(overwrite=False)
assert actual is True
@pytest.mark.parametrize("test_file_list, is_file_return, test_input", [
(["001.jpg", "002.jpg"], False, ["foo", "bar"]),
(["001.jpg", "002.jpg"], False, ["foo", None]),
(["001.jpg", "002.jpg"], False, [None, "bar"]),
(["001.jpg", "002.txt"], False, ["foo", "bar"]),
(["001.jpg", "aaa.jpg"], True, ["foo", "bar"]),
(["001.jpg", "foo001bar.jpg"], [False, True], ["foo", "bar"])])
def test_add_before_after_str(self, test_file_list, is_file_return, test_input):
with patch("os.listdir") as mock_listdir, patch("os.path.isfile") as mock_isfile, \
patch.object(self.target, "_rename_file"):
mock_listdir.return_value = test_file_list
mock_isfile.return_value = is_file_return
actual = self.target.add_before_after_str(*test_input)
assert actual is True | 0.510008 | 0.476336 |
import datetime
import os
import django.utils.timezone
from django.contrib.auth.models import User
from django.core.validators import RegexValidator
from django.db import models
from django.db.models import Q
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
class Semester(models.Model):
SEASON_CHOICES = (
('0', 'Spring'),
('1', 'Summer'),
('2', 'Fall'),
)
YEAR_CHOICES = []
for r in range(2010, (datetime.datetime.now().year + 6)):
YEAR_CHOICES.append((r, r))
season = models.CharField(
max_length=1,
choices=SEASON_CHOICES,
default='0',
)
year = models.IntegerField(
choices=YEAR_CHOICES,
default=datetime.datetime.now().year,
)
def __str__(self):
return "%s - %s" % (self.year, self.get_season_display())
class Brother(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, unique=True)
# General profile information
first_name = models.CharField(max_length=45)
last_name = models.CharField(max_length=45)
class PronounChoices(models.TextChoices):
FEMININE = "FEM", _("she/her/hers")
MASCULINE = "MASC", _("he/him/his")
NONBINARY = "NON", _("they/them/theirs")
pronouns = models.CharField(max_length=4, choices=PronounChoices.choices, blank=True)
roster_number = models.IntegerField(blank=True, null=True)
semester_joined = models.ForeignKey(
Semester, on_delete=models.CASCADE, blank=True, null=True
)
semester_graduating = models.ForeignKey(
Semester, on_delete=models.CASCADE, blank=True, null=True, related_name='brother_graduating'
)
date_pledged = models.DateField(blank=True, null=True)
FRESHMAN = 'FR'
SOPHOMORE = 'SO'
JUNIOR = 'JR'
SENIOR = 'SR'
FIFTH_YEAR = 'FY'
ALUMNI = 'AL'
SCHOOL_STATUS_CHOICES = (
(FRESHMAN, 'Freshman'),
(SOPHOMORE, 'Sophomore'),
(JUNIOR, 'Junior'),
(SENIOR, 'Senior'),
(FIFTH_YEAR, 'Fifth Year'),
(ALUMNI, 'Alumni'),
)
school_status = models.CharField(
max_length=2,
choices=SCHOOL_STATUS_CHOICES,
default=FRESHMAN,
)
BROTHER_STATUS_CHOICES = (
('0', 'Candidate'),
('1', 'Brother'),
('2', 'Alumni'),
)
brother_status = models.CharField(
max_length=1,
choices=BROTHER_STATUS_CHOICES,
default='0',
)
# Secretary Information
major = models.CharField(max_length=200, default="Undecided")
minor = models.CharField(max_length=200, blank=True, null=True)
case_ID = models.CharField(max_length=10)
birthday = models.DateField()
hometown = models.CharField(max_length=200, default="Cleveland, OH")
t_shirt_size = models.CharField(max_length=5, default="M")
# regex for proper phone number entry
phone_regex = RegexValidator(
regex=r'^\+?1?\d{9,15}$',
message="Phone number must be entered in the format: "
"'+999999999'. Up to 15 digits allowed.")
# validators should be a list
phone_number = models.CharField(
validators=[phone_regex], blank=True, max_length=15
)
# President Information
emergency_contact = models.CharField(
max_length=200, default="Chapter President"
)
emergency_contact_phone_number = models.CharField(
validators=[phone_regex], blank=True, max_length=15
)
# Vice President Information
room_number = models.CharField(max_length=3, default="NA")
address = models.CharField(max_length=200, default="Theta Chi House")
# Treasurer Information
# TODO: Add treasury models
# Recruitment Information
# TODO: determine if there are any recruitment models
# Service Chair Information
# TODO: determine if there are any service models
# Philanthropy Chair Information
# TODO: determine if there are any philanthropy models
# Detail Manager Chair Information
# TODO: determine if there are any detail manager models
does_house_details = models.BooleanField(default=False)
does_kitchen_details = models.BooleanField(default=False)
in_house = models.BooleanField(default=True)
def __str__(self):
return self.first_name + " " + self.last_name
# returns the brother's attendance fraction for the associated event
def get_attendance(self, event_type):
month = datetime.datetime.now().month
year = datetime.datetime.now().year
if month <= 5:
season = '0'
elif month <= 7:
season = '1'
else:
season = '2'
semester, _ = Semester.objects.get_or_create(season=season, year=year)
return "%s / %s" % (
event_type.objects.filter(semester=semester, mandatory=True, attendees_brothers=self).count() + event_type.objects.filter(
semester=semester, mandatory=True, excuse__status=1).count(),
event_type.objects.filter(semester=semester, mandatory=True, eligible_attendees=self, date__lt=datetime.datetime.now()).count()
)
def get_chapter_attendance(self):
return self.get_attendance(ChapterEvent)
def get_recruitment_attendance(self):
return self.get_attendance(RecruitmentEvent)
def get_hs_attendance(self):
return self.get_attendance(HealthAndSafetyEvent)
def get_philanthropy_attendance(self):
return self.get_attendance(PhilanthropyEvent)
def get_service_attendance(self):
return self.get_attendance(ServiceEvent)
class MeetABrother(models.Model):
brother = models.ForeignKey(Brother, on_delete=models.CASCADE, related_name='brother_mab')
candidate = models.ForeignKey(Brother, on_delete=models.CASCADE, related_name='candidate_mab')
completed = models.BooleanField(default=False)
week = models.DateField(default=django.utils.timezone.now)
def __str__(self):
return self.candidate.first_name + " " + self.candidate.last_name + " meeting with " + self.brother.first_name + " " + self.brother.last_name
class Meta:
constraints = [
models.UniqueConstraint(fields=['brother', 'candidate'], name='unique_meet_a_brother')
]
class OnlineMedia(models.Model):
name = models.CharField(max_length=45, unique=True)
icon = models.ImageField(upload_to='media_icons')
def __str__(self):
return "%s" % self.name
class MediaAccount(models.Model):
brother = models.ForeignKey(Brother, on_delete=models.CASCADE, related_name='media_accounts')
media = models.ForeignKey(OnlineMedia, on_delete=models.CASCADE, related_name='media')
username = models.CharField(max_length=45)
profile_link = models.URLField(blank=True, null=True)
def __str__(self):
return str(self.brother) + "'s " + str(self.media) + " Account"
class CampusGroup(models.Model):
name = models.CharField(max_length=45)
brothers = models.ManyToManyField(Brother, related_name='groups')
def __str__(self):
return "%s" % self.name
class Classes(models.Model):
department = models.CharField(max_length=4)
number = models.CharField(max_length=4)
brothers = models.ManyToManyField(Brother, related_name='classes')
def ordered_brother_set(self):
return self.brothers.order_by('last_name', 'first_name')
class Meta:
verbose_name_plural = "Classes"
def __str__(self):
return "%s" % self.department + " " + str(self.number)
class Grade(models.Model):
class GradeChoices(models.TextChoices):
A = 'A'
B = 'B'
C = 'C'
D = 'D'
F = 'F'
AP = 'P', "AP"
grade = models.CharField(max_length=1, choices=GradeChoices.choices)
class_taken = models.ForeignKey(Classes, on_delete=models.CASCADE)
brother = models.ForeignKey(Brother, related_name='grades', on_delete=models.CASCADE)
class Meta:
constraints = [
models.UniqueConstraint(fields=['class_taken', 'brother'], name='unique_grade')
]
def query_positions_with_committee():
choices = Q()
for position in COMMITTEE_CHAIRS:
choices = choices | Q(title=position)
return choices
class Position(models.Model):
# This is a list of possible options for each position. The first term is the name of the choice object. A list of
# these can be called using PositionChoices.names. The second term is the value of the choices object, a list of
# which you can get via PositionChoices.values. The values are written as slugs that serve as the url for each
# position's main page. Since the title field gets set to the values options, you can use the title to redirect to
# the main page using HTTPResponseRedirect('/' + title). Lastly the values in _() at the end are the labels or human
# readable names of the choices. If nothing is set there, the label is automatically set from the name, in title
# case with words separated by _. A list of labels can be found using PositionChoices.labels
class PositionChoices(models.TextChoices):
PRESIDENT = 'president',
VICE_PRESIDENT = 'vice-president'
VICE_PRESIDENT_OF_HEALTH_AND_SAFETY = 'vphs', _('Vice President of Health and Safety')
SECRETARY = 'secretary'
TREASURER = 'treasurer'
MARSHAL = 'marshal'
RECRUITMENT_CHAIR = 'recruitment-chair'
SCHOLARSHIP_CHAIR = 'scholarship-chair'
DETAIL_MANAGER = 'detail-manager'
PHILANTHROPY_CHAIR = 'philanthropy-chair'
PUBLIC_RELATIONS_CHAIR = 'pr-chair'
SERVICE_CHAIR = 'service-chair'
ALUMNI_RELATIONS_CHAIR = 'alumni-relations-chair'
MEMBERSHIP_DEVELOPMENT_CHAIR = 'memdev-chair'
SOCIAL_CHAIR = 'social-chair'
COMMUNITY_STANDARDS_CHAIR = 'community-standards-chair'
OX_ROAST_CHAIR = 'ox-roast-chair', _('OX Roast Chair')
DAMAGE_CHAIR = 'damage-chair'
GREEK_GAMES_CHAIR = 'greek-games-chair'
HISTORIAN = 'historian'
FIRST_GUARD = 'first-guard'
SECOND_GUARD = 'second-guard'
INTERNAL_CHANGE_CHAIR = 'internal-change-chair'
STANDARDS_BOARD_JUSTICE = 'standards-board-justice'
EXECUTIVE_COUNCIL_MEMBER_AT_LARGE = 'executive-council-member-at-large'
HOUSE_MANAGER = 'house-manager'
RISK_MANAGER = 'risk-manager'
IFC_REP = 'ifc-rep', _('IFC Rep')
AWARDS_CHAIR = 'awards-chair'
FOOD_STEWARD = 'food-steward'
ATHLETICS_CHAIR = 'athletics-chair'
DASHBOARD_CHAIR = 'dashboard-chair'
ADVISER = 'adviser'
title = models.CharField(max_length=45, choices=PositionChoices.choices, unique=True, blank=False)
@property
def in_ec(self):
return self.title in (
EC_POSITIONS
)
brothers = models.ManyToManyField(Brother)
def __str__(self):
return str(self.PositionChoices(self.title).label)
EC_POSITIONS = (
Position.PositionChoices.PRESIDENT,
Position.PositionChoices.VICE_PRESIDENT,
Position.PositionChoices.VICE_PRESIDENT_OF_HEALTH_AND_SAFETY,
Position.PositionChoices.SECRETARY,
Position.PositionChoices.TREASURER,
Position.PositionChoices.MARSHAL,
Position.PositionChoices.RECRUITMENT_CHAIR,
Position.PositionChoices.SCHOLARSHIP_CHAIR,
)
COMMITTEE_CHAIRS = (
Position.PositionChoices.VICE_PRESIDENT_OF_HEALTH_AND_SAFETY,
Position.PositionChoices.RECRUITMENT_CHAIR,
Position.PositionChoices.SCHOLARSHIP_CHAIR,
Position.PositionChoices.PHILANTHROPY_CHAIR,
Position.PositionChoices.PUBLIC_RELATIONS_CHAIR,
Position.PositionChoices.ALUMNI_RELATIONS_CHAIR,
Position.PositionChoices.MEMBERSHIP_DEVELOPMENT_CHAIR,
Position.PositionChoices.SOCIAL_CHAIR
)
EVENT_CHAIRS = (
Position.PositionChoices.VICE_PRESIDENT_OF_HEALTH_AND_SAFETY,
Position.PositionChoices.SECRETARY,
Position.PositionChoices.RECRUITMENT_CHAIR,
Position.PositionChoices.PHILANTHROPY_CHAIR,
Position.PositionChoices.SERVICE_CHAIR,
)
class Report(models.Model):
is_officer = models.BooleanField(default=True)
position = models.ForeignKey(Position, on_delete=models.CASCADE, blank=True, null=True, related_name="reports")
brother = models.ForeignKey(Brother, on_delete=models.CASCADE, related_name="reports")
information = models.TextField()
class PotentialNewMember(models.Model):
first_name = models.CharField(max_length=45)
last_name = models.CharField(max_length=45, blank=True, null=False)
case_ID = models.CharField(max_length=10, blank=True, null=True)
# regex for proper phone number entry
phone_regex = RegexValidator(
regex=r'^\+?1?\d{9,15}$',
message="Phone number must be entered in the format: "
"'+999999999'. Up to 15 digits allowed.")
phone_number = models.CharField(
validators=[phone_regex], blank=True, null=True, max_length=15
)
# validators should be a list
primary_contact = models.ForeignKey(
Brother, on_delete=models.CASCADE, related_name="primary"
)
secondary_contact = models.ForeignKey(
Brother, on_delete=models.CASCADE, blank=True, null=True,
related_name="secondary"
)
tertiary_contact = models.ForeignKey(
Brother, on_delete=models.CASCADE, blank=True, null=True,
related_name="tertiary"
)
notes = models.TextField(blank=True, null=True)
def __str__(self):
return self.first_name + " " + self.last_name
class ServiceSubmission(models.Model):
name = models.CharField(max_length=200, default="Service Event")
description = models.TextField(default="I did the service thing")
hours = models.IntegerField(default=0)
date_applied = models.DateTimeField(default=django.utils.timezone.now)
STATUS_CHOICES = (
('0', 'Pending'),
('1', 'Awaiting Approval'),
('2', 'Approved'),
('3', 'Denied'),
)
status = models.CharField(
max_length=1,
choices=STATUS_CHOICES,
default='0',
)
date = models.DateField()
semester = models.ForeignKey(Semester, on_delete=models.CASCADE)
brother = models.ForeignKey(Brother, on_delete=models.CASCADE)
def __str__(self):
return self.name
# Given separate section to prevent accidental viewing while in admin views
class ScholarshipReport(models.Model):
brother = models.ForeignKey(Brother, on_delete=models.CASCADE)
semester = models.ForeignKey(Semester, on_delete=models.CASCADE)
active = models.BooleanField(default=False)
past_semester_gpa = models.DecimalField(
max_digits=5, decimal_places=2, default=4.0
)
cumulative_gpa = models.DecimalField(
max_digits=5, decimal_places=2, default=4.0
)
scholarship_plan = models.TextField(
default="Scholarship plan has not been setup yet if you past semester "
"GPA or cum GPA are below 3.0 you should "
"setup a meeting to have this corrected"
)
def __str__(self):
return "%s %s - %s %s" % (self.brother.first_name,
self.brother.last_name,
self.semester.get_season_display(),
self.semester.year)
# method used to set the default for event.eligible_brothers
def all_actives_and_candidates():
return Brother.objects.exclude(brother_status='2')
class TimeChoices(datetime.time, models.Choices):
T_9 = 9, '9:00 A.M.'
T_9_30 = 9,30, '9:30 A.M.'
T_10 = 10, '10:00 A.M.'
T_10_30 = 10, 30, '10:30 A.M.'
T_11 = 11, '11:00 A.M.'
T_11_30 = 11, 30, '11:30 A.M.'
T_12 = 12, '12:00 P.M.'
T_12_30 = 12, 30, '12:30 P.M.'
T_13 = 13, '1:00 P.M.'
T_13_30 = 13, 30, '1:30 P.M.'
T_14 = 14, '2:00 P.M.'
T_14_30 = 14, 30, '2:30 P.M.'
T_15 = 15, '3:00 P.M.'
T_15_30 = 15, 30, '3:30 P.M.'
T_16 = 16, '4:00 P.M.'
T_16_30 = 16, 30, '4:30 P.M.'
T_17 = 17, '5:00 P.M.'
T_17_30 = 17, 30, '5:30 P.M.'
T_18 = 18, '6:00 P.M.'
T_18_30 = 18, 30, '6:30 P.M.'
T_19 = 19, '7:00 P.M.'
T_19_30 = 19, 30, '7:30 P.M.'
T_20 = 20, '8:00 P.M.'
T_20_30 = 20, 30, '8:30 P.M.'
T_21 = 21, '9:00 P.M.'
T_21_30 = 21, 30, '9:30 P.M.'
T_22 = 22, '10:00 P.M.'
T_22_30 = 22, 30, '10:30 P.M.'
T_23 = 23, '11:00 P.M.'
T_23_30 = 23, 30, '11:30 P.M.'
class Event(models.Model):
name = models.CharField(max_length=200, default="Event")
date = models.DateField(default=django.utils.timezone.now)
all_day = models.BooleanField(default=False)
start_time = models.TimeField(default=datetime.time(hour=0, minute=0), choices=TimeChoices.choices)
end_time = models.TimeField(blank=True, null=True, choices=TimeChoices.choices)
attendees_brothers = models.ManyToManyField(Brother, blank=True)
eligible_attendees = models.ManyToManyField(Brother, blank=False, related_name='+', default=all_actives_and_candidates)
semester = models.ForeignKey(
Semester, on_delete=models.CASCADE, blank=True, null=True
)
description = models.TextField(blank=True, null=True)
minutes = models.URLField(blank=True, null=True)
mandatory = models.BooleanField(default=True)
slug = models.SlugField(blank=True) # a field which stores the url to redirect to after running operations on the event
def __str__(self):
return self.name + " " + str(self.date)
def set_event_kwarg_defaults(kwargs, slug, name):
if 'slug' not in kwargs:
kwargs['slug'] = slug
if 'name' not in kwargs:
kwargs['name'] = name
class RecruitmentEvent(Event):
attendees_pnms = models.ManyToManyField(PotentialNewMember, blank=True)
rush = models.BooleanField(default=True)
picture = models.ImageField(upload_to='recruitment', null=True)
location = models.TextField(blank=True, null=True)
def __str__(self):
return "Recruitment Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.RECRUITMENT_CHAIR, name='Recruitment Event')
super(RecruitmentEvent, self).__init__(*args, **kwargs)
class SecretaryEvent(Event):
def __str__(self):
return "Secretary Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.SECRETARY, name='Secretary Event')
super(SecretaryEvent, self).__init__(*args, **kwargs)
class ChapterEvent(SecretaryEvent):
def __str__(self):
return "Chapter Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.SECRETARY, name='Chapter Event')
super(ChapterEvent, self).__init__(*args, **kwargs)
class PhilanthropyEvent(SecretaryEvent):
def __str__(self):
return "Philanthropy Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.PHILANTHROPY_CHAIR, name='Philanthropy Event')
super(PhilanthropyEvent, self).__init__(*args, **kwargs)
class ServiceEvent(SecretaryEvent):
def __str__(self):
return "Service Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.SERVICE_CHAIR, name='Service Event')
super(ServiceEvent, self).__init__(*args, **kwargs)
class HealthAndSafetyEvent(SecretaryEvent):
def __str__(self):
return "Health and Safety Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.VICE_PRESIDENT_OF_HEALTH_AND_SAFETY, name='Sacred Purpose Event')
super(HealthAndSafetyEvent, self).__init__(*args, **kwargs)
class ScholarshipEvent(SecretaryEvent):
def __str__(self):
return "Scholarship Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.SCHOLARSHIP_CHAIR, name='Scholarship Event')
super(ScholarshipEvent, self).__init__(*args, **kwargs)
def get_standing_committees(brother):
committees = []
for committee in brother.committee_set.all():
if committee.in_standing():
committees.append(committee.committee)
return committees
def get_operational_committees(brother):
committees = []
for committee in brother.committee_set.all():
if committee.in_operational():
committees.append(committee.committee)
return committees
class Committee(models.Model):
class CommitteeChoices(models.TextChoices):
ALUMNI_RELATIONS = 'AR'
MEMBERSHIP_DEVELOPMENT = 'MD'
PHILANTHROPY = 'PH'
PUBLIC_RELATIONS = 'PR'
RECRUITMENT = 'RE'
SCHOLARSHIP = 'SC'
SOCIAL = 'SO'
HEALTH_AND_SAFETY = 'HS'
STANDING_COMMITTEE_CHOICES = [
('PR', 'Public Relations'),
('RE', 'Recruitment'),
('SO', 'Social'),
('HS', 'Health and Safety'),
]
OPERATIONAL_COMMITTEE_CHOICES = [
('AR', 'Alumni Relations'),
('MD', 'Membership Development'),
('PH', 'Philanthropy'),
('SC', 'Scholarship'),
]
committee = models.CharField(max_length=2, choices=CommitteeChoices.choices, unique=True, blank=False)
def in_standing(self):
return self.committee in (x[0] for x in self.STANDING_COMMITTEE_CHOICES)
def in_operational(self):
return self.committee in (x[0] for x in self.OPERATIONAL_COMMITTEE_CHOICES)
members = models.ManyToManyField(Brother, blank=True)
chair = models.OneToOneField(Position, on_delete=models.PROTECT, limit_choices_to=query_positions_with_committee())
class MeetingIntervals(models.IntegerChoices):
WEEKLY = 7, 'Weekly'
BIWEEKLY = 14, 'Biweekly'
MONTHLY = 28, 'Monthly'
meeting_interval = models.IntegerField(choices=MeetingIntervals.choices)
MEETING_DAY = [
(0, 'Monday'),
(1, 'Tuesday'),
(2, 'Wednesday'),
(3, 'Thursday'),
(4, 'Friday'),
(5, 'Saturday'),
(6, 'Sunday'),
]
meeting_day = models.IntegerField(choices=MEETING_DAY)
meeting_time = models.TimeField(choices=TimeChoices.choices)
def __str__(self):
return self.CommitteeChoices(self.committee).label
class CommitteeMeetingEvent(Event):
committee = models.ForeignKey(Committee, on_delete=models.PROTECT, related_name='meetings')
recurring = models.BooleanField(default=False)
def __str__(self):
return str(self.committee) + " - " + str(self.date)
class Excuse(models.Model):
event = models.ForeignKey(Event, on_delete=models.CASCADE)
brother = models.ForeignKey(Brother, on_delete=models.CASCADE)
date_submitted = models.DateField(default=django.utils.timezone.now)
description = models.TextField(
"Reasoning", default="I will not be attending because"
)
response_message = models.TextField(blank=True, null=True)
STATUS_CHOICES = (
('0', 'Pending'),
('1', 'Approved'),
('2', 'Denied'),
('3', 'Non-Mandatory'),
)
status = models.CharField(
max_length=1,
choices=STATUS_CHOICES,
default='0',
)
def __str__(self):
return self.brother.first_name \
+ " " + self.brother.last_name + " - " + str(self.event.name)
class Supplies(models.Model):
what = models.CharField(max_length=256)
done = models.BooleanField(default=False)
when = models.DateField(auto_now_add=True)
class Meta:
verbose_name_plural = "Supplies"
def __str__(self):
return self.what
class DetailGroup(models.Model):
"""A detail group. Contains brothers and a semester"""
brothers = models.ManyToManyField(Brother)
semester = models.ForeignKey(Semester, on_delete=models.CASCADE)
def size(self):
return len(self.brothers.all())
def __str__(self):
return ", ".join([str(b) for b in self.brothers.all()])
class Detail(models.Model):
"""Abstract class for details"""
short_description = models.CharField(max_length=64)
long_description = models.TextField(null=False)
done = models.BooleanField(default=False)
due_date = models.DateField(null=False)
finished_time = models.DateTimeField(null=True)
def full_text(self):
text = "%s\n----------\n" % self.short_description
text += "%s\n----------\n" % self.long_description
text += "Due: %s\n\n" % str(self.due_date)
return text
class Meta:
abstract = True
def __str__(self):
return self.short_description
class ThursdayDetail(Detail):
"""A thursday detail. Adds the brother who it's assigned to"""
brother = models.ForeignKey(Brother, on_delete=models.CASCADE, null=False)
def finish_link(self):
return reverse(
'dashboard:finish_thursday', args=[self.pk]
)
def __str__(self):
return str(self.due_date) + ": " +\
super(ThursdayDetail, self).__str__()
class SundayDetail(Detail):
"""A single Sunday detail. Keeps track of who marks it done"""
finished_by = models.ForeignKey(Brother, on_delete=models.CASCADE, null=True)
def __str__(self):
return str(self.due_date) + ": " +\
super(SundayDetail, self).__str__()
class SundayGroupDetail(models.Model):
"""A group detail. Contains a group and a number of SundayDetails"""
group = models.ForeignKey(DetailGroup, on_delete=models.CASCADE)
details = models.ManyToManyField(SundayDetail)
due_date = models.DateField()
def finish_link(self):
return reverse(
'dashboard:finish_sunday', args=[self.pk]
)
def done(self):
done = True
for detail in self.details.all():
done = done and detail.done
return done
def __str__(self):
return "%s: %s" % (
self.due_date, ", ".join([str(d) for d in self.details.all()])
)
class Photo(models.Model):
photo = models.ImageField(upload_to='photos')
def __str__(self):
return os.path.basename(str(self.photo))
class MinecraftPhoto(models.Model):
photo = models.ImageField(upload_to='minecraft')
def __str__(self):
return os.path.basename(str(self.photo))
class PhoneTreeNode(models.Model):
brother = models.ForeignKey(Brother, on_delete=models.PROTECT, related_name='phone_tree_brother')
notified_by = models.ForeignKey(Brother, on_delete=models.PROTECT, null=True, related_name='phone_tree_notified_by') # null is the root (ie president)
def __str__(self):
if self.brother.position_set.filter(title=Position.PositionChoices.PRESIDENT):
return self.brother.first_name + " " + self.brother.last_name
return self.brother.first_name + " " + self.brother.last_name + " notified by " + self.notified_by.first_name + " " + self.notified_by.last_name | dashboard/models.py | import datetime
import os
import django.utils.timezone
from django.contrib.auth.models import User
from django.core.validators import RegexValidator
from django.db import models
from django.db.models import Q
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
class Semester(models.Model):
SEASON_CHOICES = (
('0', 'Spring'),
('1', 'Summer'),
('2', 'Fall'),
)
YEAR_CHOICES = []
for r in range(2010, (datetime.datetime.now().year + 6)):
YEAR_CHOICES.append((r, r))
season = models.CharField(
max_length=1,
choices=SEASON_CHOICES,
default='0',
)
year = models.IntegerField(
choices=YEAR_CHOICES,
default=datetime.datetime.now().year,
)
def __str__(self):
return "%s - %s" % (self.year, self.get_season_display())
class Brother(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, unique=True)
# General profile information
first_name = models.CharField(max_length=45)
last_name = models.CharField(max_length=45)
class PronounChoices(models.TextChoices):
FEMININE = "FEM", _("she/her/hers")
MASCULINE = "MASC", _("he/him/his")
NONBINARY = "NON", _("they/them/theirs")
pronouns = models.CharField(max_length=4, choices=PronounChoices.choices, blank=True)
roster_number = models.IntegerField(blank=True, null=True)
semester_joined = models.ForeignKey(
Semester, on_delete=models.CASCADE, blank=True, null=True
)
semester_graduating = models.ForeignKey(
Semester, on_delete=models.CASCADE, blank=True, null=True, related_name='brother_graduating'
)
date_pledged = models.DateField(blank=True, null=True)
FRESHMAN = 'FR'
SOPHOMORE = 'SO'
JUNIOR = 'JR'
SENIOR = 'SR'
FIFTH_YEAR = 'FY'
ALUMNI = 'AL'
SCHOOL_STATUS_CHOICES = (
(FRESHMAN, 'Freshman'),
(SOPHOMORE, 'Sophomore'),
(JUNIOR, 'Junior'),
(SENIOR, 'Senior'),
(FIFTH_YEAR, 'Fifth Year'),
(ALUMNI, 'Alumni'),
)
school_status = models.CharField(
max_length=2,
choices=SCHOOL_STATUS_CHOICES,
default=FRESHMAN,
)
BROTHER_STATUS_CHOICES = (
('0', 'Candidate'),
('1', 'Brother'),
('2', 'Alumni'),
)
brother_status = models.CharField(
max_length=1,
choices=BROTHER_STATUS_CHOICES,
default='0',
)
# Secretary Information
major = models.CharField(max_length=200, default="Undecided")
minor = models.CharField(max_length=200, blank=True, null=True)
case_ID = models.CharField(max_length=10)
birthday = models.DateField()
hometown = models.CharField(max_length=200, default="Cleveland, OH")
t_shirt_size = models.CharField(max_length=5, default="M")
# regex for proper phone number entry
phone_regex = RegexValidator(
regex=r'^\+?1?\d{9,15}$',
message="Phone number must be entered in the format: "
"'+999999999'. Up to 15 digits allowed.")
# validators should be a list
phone_number = models.CharField(
validators=[phone_regex], blank=True, max_length=15
)
# President Information
emergency_contact = models.CharField(
max_length=200, default="Chapter President"
)
emergency_contact_phone_number = models.CharField(
validators=[phone_regex], blank=True, max_length=15
)
# Vice President Information
room_number = models.CharField(max_length=3, default="NA")
address = models.CharField(max_length=200, default="Theta Chi House")
# Treasurer Information
# TODO: Add treasury models
# Recruitment Information
# TODO: determine if there are any recruitment models
# Service Chair Information
# TODO: determine if there are any service models
# Philanthropy Chair Information
# TODO: determine if there are any philanthropy models
# Detail Manager Chair Information
# TODO: determine if there are any detail manager models
does_house_details = models.BooleanField(default=False)
does_kitchen_details = models.BooleanField(default=False)
in_house = models.BooleanField(default=True)
def __str__(self):
return self.first_name + " " + self.last_name
# returns the brother's attendance fraction for the associated event
def get_attendance(self, event_type):
month = datetime.datetime.now().month
year = datetime.datetime.now().year
if month <= 5:
season = '0'
elif month <= 7:
season = '1'
else:
season = '2'
semester, _ = Semester.objects.get_or_create(season=season, year=year)
return "%s / %s" % (
event_type.objects.filter(semester=semester, mandatory=True, attendees_brothers=self).count() + event_type.objects.filter(
semester=semester, mandatory=True, excuse__status=1).count(),
event_type.objects.filter(semester=semester, mandatory=True, eligible_attendees=self, date__lt=datetime.datetime.now()).count()
)
def get_chapter_attendance(self):
return self.get_attendance(ChapterEvent)
def get_recruitment_attendance(self):
return self.get_attendance(RecruitmentEvent)
def get_hs_attendance(self):
return self.get_attendance(HealthAndSafetyEvent)
def get_philanthropy_attendance(self):
return self.get_attendance(PhilanthropyEvent)
def get_service_attendance(self):
return self.get_attendance(ServiceEvent)
class MeetABrother(models.Model):
brother = models.ForeignKey(Brother, on_delete=models.CASCADE, related_name='brother_mab')
candidate = models.ForeignKey(Brother, on_delete=models.CASCADE, related_name='candidate_mab')
completed = models.BooleanField(default=False)
week = models.DateField(default=django.utils.timezone.now)
def __str__(self):
return self.candidate.first_name + " " + self.candidate.last_name + " meeting with " + self.brother.first_name + " " + self.brother.last_name
class Meta:
constraints = [
models.UniqueConstraint(fields=['brother', 'candidate'], name='unique_meet_a_brother')
]
class OnlineMedia(models.Model):
name = models.CharField(max_length=45, unique=True)
icon = models.ImageField(upload_to='media_icons')
def __str__(self):
return "%s" % self.name
class MediaAccount(models.Model):
brother = models.ForeignKey(Brother, on_delete=models.CASCADE, related_name='media_accounts')
media = models.ForeignKey(OnlineMedia, on_delete=models.CASCADE, related_name='media')
username = models.CharField(max_length=45)
profile_link = models.URLField(blank=True, null=True)
def __str__(self):
return str(self.brother) + "'s " + str(self.media) + " Account"
class CampusGroup(models.Model):
name = models.CharField(max_length=45)
brothers = models.ManyToManyField(Brother, related_name='groups')
def __str__(self):
return "%s" % self.name
class Classes(models.Model):
department = models.CharField(max_length=4)
number = models.CharField(max_length=4)
brothers = models.ManyToManyField(Brother, related_name='classes')
def ordered_brother_set(self):
return self.brothers.order_by('last_name', 'first_name')
class Meta:
verbose_name_plural = "Classes"
def __str__(self):
return "%s" % self.department + " " + str(self.number)
class Grade(models.Model):
class GradeChoices(models.TextChoices):
A = 'A'
B = 'B'
C = 'C'
D = 'D'
F = 'F'
AP = 'P', "AP"
grade = models.CharField(max_length=1, choices=GradeChoices.choices)
class_taken = models.ForeignKey(Classes, on_delete=models.CASCADE)
brother = models.ForeignKey(Brother, related_name='grades', on_delete=models.CASCADE)
class Meta:
constraints = [
models.UniqueConstraint(fields=['class_taken', 'brother'], name='unique_grade')
]
def query_positions_with_committee():
choices = Q()
for position in COMMITTEE_CHAIRS:
choices = choices | Q(title=position)
return choices
class Position(models.Model):
# This is a list of possible options for each position. The first term is the name of the choice object. A list of
# these can be called using PositionChoices.names. The second term is the value of the choices object, a list of
# which you can get via PositionChoices.values. The values are written as slugs that serve as the url for each
# position's main page. Since the title field gets set to the values options, you can use the title to redirect to
# the main page using HTTPResponseRedirect('/' + title). Lastly the values in _() at the end are the labels or human
# readable names of the choices. If nothing is set there, the label is automatically set from the name, in title
# case with words separated by _. A list of labels can be found using PositionChoices.labels
class PositionChoices(models.TextChoices):
PRESIDENT = 'president',
VICE_PRESIDENT = 'vice-president'
VICE_PRESIDENT_OF_HEALTH_AND_SAFETY = 'vphs', _('Vice President of Health and Safety')
SECRETARY = 'secretary'
TREASURER = 'treasurer'
MARSHAL = 'marshal'
RECRUITMENT_CHAIR = 'recruitment-chair'
SCHOLARSHIP_CHAIR = 'scholarship-chair'
DETAIL_MANAGER = 'detail-manager'
PHILANTHROPY_CHAIR = 'philanthropy-chair'
PUBLIC_RELATIONS_CHAIR = 'pr-chair'
SERVICE_CHAIR = 'service-chair'
ALUMNI_RELATIONS_CHAIR = 'alumni-relations-chair'
MEMBERSHIP_DEVELOPMENT_CHAIR = 'memdev-chair'
SOCIAL_CHAIR = 'social-chair'
COMMUNITY_STANDARDS_CHAIR = 'community-standards-chair'
OX_ROAST_CHAIR = 'ox-roast-chair', _('OX Roast Chair')
DAMAGE_CHAIR = 'damage-chair'
GREEK_GAMES_CHAIR = 'greek-games-chair'
HISTORIAN = 'historian'
FIRST_GUARD = 'first-guard'
SECOND_GUARD = 'second-guard'
INTERNAL_CHANGE_CHAIR = 'internal-change-chair'
STANDARDS_BOARD_JUSTICE = 'standards-board-justice'
EXECUTIVE_COUNCIL_MEMBER_AT_LARGE = 'executive-council-member-at-large'
HOUSE_MANAGER = 'house-manager'
RISK_MANAGER = 'risk-manager'
IFC_REP = 'ifc-rep', _('IFC Rep')
AWARDS_CHAIR = 'awards-chair'
FOOD_STEWARD = 'food-steward'
ATHLETICS_CHAIR = 'athletics-chair'
DASHBOARD_CHAIR = 'dashboard-chair'
ADVISER = 'adviser'
title = models.CharField(max_length=45, choices=PositionChoices.choices, unique=True, blank=False)
@property
def in_ec(self):
return self.title in (
EC_POSITIONS
)
brothers = models.ManyToManyField(Brother)
def __str__(self):
return str(self.PositionChoices(self.title).label)
EC_POSITIONS = (
Position.PositionChoices.PRESIDENT,
Position.PositionChoices.VICE_PRESIDENT,
Position.PositionChoices.VICE_PRESIDENT_OF_HEALTH_AND_SAFETY,
Position.PositionChoices.SECRETARY,
Position.PositionChoices.TREASURER,
Position.PositionChoices.MARSHAL,
Position.PositionChoices.RECRUITMENT_CHAIR,
Position.PositionChoices.SCHOLARSHIP_CHAIR,
)
COMMITTEE_CHAIRS = (
Position.PositionChoices.VICE_PRESIDENT_OF_HEALTH_AND_SAFETY,
Position.PositionChoices.RECRUITMENT_CHAIR,
Position.PositionChoices.SCHOLARSHIP_CHAIR,
Position.PositionChoices.PHILANTHROPY_CHAIR,
Position.PositionChoices.PUBLIC_RELATIONS_CHAIR,
Position.PositionChoices.ALUMNI_RELATIONS_CHAIR,
Position.PositionChoices.MEMBERSHIP_DEVELOPMENT_CHAIR,
Position.PositionChoices.SOCIAL_CHAIR
)
EVENT_CHAIRS = (
Position.PositionChoices.VICE_PRESIDENT_OF_HEALTH_AND_SAFETY,
Position.PositionChoices.SECRETARY,
Position.PositionChoices.RECRUITMENT_CHAIR,
Position.PositionChoices.PHILANTHROPY_CHAIR,
Position.PositionChoices.SERVICE_CHAIR,
)
class Report(models.Model):
is_officer = models.BooleanField(default=True)
position = models.ForeignKey(Position, on_delete=models.CASCADE, blank=True, null=True, related_name="reports")
brother = models.ForeignKey(Brother, on_delete=models.CASCADE, related_name="reports")
information = models.TextField()
class PotentialNewMember(models.Model):
first_name = models.CharField(max_length=45)
last_name = models.CharField(max_length=45, blank=True, null=False)
case_ID = models.CharField(max_length=10, blank=True, null=True)
# regex for proper phone number entry
phone_regex = RegexValidator(
regex=r'^\+?1?\d{9,15}$',
message="Phone number must be entered in the format: "
"'+999999999'. Up to 15 digits allowed.")
phone_number = models.CharField(
validators=[phone_regex], blank=True, null=True, max_length=15
)
# validators should be a list
primary_contact = models.ForeignKey(
Brother, on_delete=models.CASCADE, related_name="primary"
)
secondary_contact = models.ForeignKey(
Brother, on_delete=models.CASCADE, blank=True, null=True,
related_name="secondary"
)
tertiary_contact = models.ForeignKey(
Brother, on_delete=models.CASCADE, blank=True, null=True,
related_name="tertiary"
)
notes = models.TextField(blank=True, null=True)
def __str__(self):
return self.first_name + " " + self.last_name
class ServiceSubmission(models.Model):
name = models.CharField(max_length=200, default="Service Event")
description = models.TextField(default="I did the service thing")
hours = models.IntegerField(default=0)
date_applied = models.DateTimeField(default=django.utils.timezone.now)
STATUS_CHOICES = (
('0', 'Pending'),
('1', 'Awaiting Approval'),
('2', 'Approved'),
('3', 'Denied'),
)
status = models.CharField(
max_length=1,
choices=STATUS_CHOICES,
default='0',
)
date = models.DateField()
semester = models.ForeignKey(Semester, on_delete=models.CASCADE)
brother = models.ForeignKey(Brother, on_delete=models.CASCADE)
def __str__(self):
return self.name
# Given separate section to prevent accidental viewing while in admin views
class ScholarshipReport(models.Model):
brother = models.ForeignKey(Brother, on_delete=models.CASCADE)
semester = models.ForeignKey(Semester, on_delete=models.CASCADE)
active = models.BooleanField(default=False)
past_semester_gpa = models.DecimalField(
max_digits=5, decimal_places=2, default=4.0
)
cumulative_gpa = models.DecimalField(
max_digits=5, decimal_places=2, default=4.0
)
scholarship_plan = models.TextField(
default="Scholarship plan has not been setup yet if you past semester "
"GPA or cum GPA are below 3.0 you should "
"setup a meeting to have this corrected"
)
def __str__(self):
return "%s %s - %s %s" % (self.brother.first_name,
self.brother.last_name,
self.semester.get_season_display(),
self.semester.year)
# method used to set the default for event.eligible_brothers
def all_actives_and_candidates():
return Brother.objects.exclude(brother_status='2')
class TimeChoices(datetime.time, models.Choices):
T_9 = 9, '9:00 A.M.'
T_9_30 = 9,30, '9:30 A.M.'
T_10 = 10, '10:00 A.M.'
T_10_30 = 10, 30, '10:30 A.M.'
T_11 = 11, '11:00 A.M.'
T_11_30 = 11, 30, '11:30 A.M.'
T_12 = 12, '12:00 P.M.'
T_12_30 = 12, 30, '12:30 P.M.'
T_13 = 13, '1:00 P.M.'
T_13_30 = 13, 30, '1:30 P.M.'
T_14 = 14, '2:00 P.M.'
T_14_30 = 14, 30, '2:30 P.M.'
T_15 = 15, '3:00 P.M.'
T_15_30 = 15, 30, '3:30 P.M.'
T_16 = 16, '4:00 P.M.'
T_16_30 = 16, 30, '4:30 P.M.'
T_17 = 17, '5:00 P.M.'
T_17_30 = 17, 30, '5:30 P.M.'
T_18 = 18, '6:00 P.M.'
T_18_30 = 18, 30, '6:30 P.M.'
T_19 = 19, '7:00 P.M.'
T_19_30 = 19, 30, '7:30 P.M.'
T_20 = 20, '8:00 P.M.'
T_20_30 = 20, 30, '8:30 P.M.'
T_21 = 21, '9:00 P.M.'
T_21_30 = 21, 30, '9:30 P.M.'
T_22 = 22, '10:00 P.M.'
T_22_30 = 22, 30, '10:30 P.M.'
T_23 = 23, '11:00 P.M.'
T_23_30 = 23, 30, '11:30 P.M.'
class Event(models.Model):
name = models.CharField(max_length=200, default="Event")
date = models.DateField(default=django.utils.timezone.now)
all_day = models.BooleanField(default=False)
start_time = models.TimeField(default=datetime.time(hour=0, minute=0), choices=TimeChoices.choices)
end_time = models.TimeField(blank=True, null=True, choices=TimeChoices.choices)
attendees_brothers = models.ManyToManyField(Brother, blank=True)
eligible_attendees = models.ManyToManyField(Brother, blank=False, related_name='+', default=all_actives_and_candidates)
semester = models.ForeignKey(
Semester, on_delete=models.CASCADE, blank=True, null=True
)
description = models.TextField(blank=True, null=True)
minutes = models.URLField(blank=True, null=True)
mandatory = models.BooleanField(default=True)
slug = models.SlugField(blank=True) # a field which stores the url to redirect to after running operations on the event
def __str__(self):
return self.name + " " + str(self.date)
def set_event_kwarg_defaults(kwargs, slug, name):
if 'slug' not in kwargs:
kwargs['slug'] = slug
if 'name' not in kwargs:
kwargs['name'] = name
class RecruitmentEvent(Event):
attendees_pnms = models.ManyToManyField(PotentialNewMember, blank=True)
rush = models.BooleanField(default=True)
picture = models.ImageField(upload_to='recruitment', null=True)
location = models.TextField(blank=True, null=True)
def __str__(self):
return "Recruitment Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.RECRUITMENT_CHAIR, name='Recruitment Event')
super(RecruitmentEvent, self).__init__(*args, **kwargs)
class SecretaryEvent(Event):
def __str__(self):
return "Secretary Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.SECRETARY, name='Secretary Event')
super(SecretaryEvent, self).__init__(*args, **kwargs)
class ChapterEvent(SecretaryEvent):
def __str__(self):
return "Chapter Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.SECRETARY, name='Chapter Event')
super(ChapterEvent, self).__init__(*args, **kwargs)
class PhilanthropyEvent(SecretaryEvent):
def __str__(self):
return "Philanthropy Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.PHILANTHROPY_CHAIR, name='Philanthropy Event')
super(PhilanthropyEvent, self).__init__(*args, **kwargs)
class ServiceEvent(SecretaryEvent):
def __str__(self):
return "Service Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.SERVICE_CHAIR, name='Service Event')
super(ServiceEvent, self).__init__(*args, **kwargs)
class HealthAndSafetyEvent(SecretaryEvent):
def __str__(self):
return "Health and Safety Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.VICE_PRESIDENT_OF_HEALTH_AND_SAFETY, name='Sacred Purpose Event')
super(HealthAndSafetyEvent, self).__init__(*args, **kwargs)
class ScholarshipEvent(SecretaryEvent):
def __str__(self):
return "Scholarship Event - " + str(self.date)
def __init__(self, *args, **kwargs):
set_event_kwarg_defaults(kwargs=kwargs, slug=Position.PositionChoices.SCHOLARSHIP_CHAIR, name='Scholarship Event')
super(ScholarshipEvent, self).__init__(*args, **kwargs)
def get_standing_committees(brother):
committees = []
for committee in brother.committee_set.all():
if committee.in_standing():
committees.append(committee.committee)
return committees
def get_operational_committees(brother):
committees = []
for committee in brother.committee_set.all():
if committee.in_operational():
committees.append(committee.committee)
return committees
class Committee(models.Model):
class CommitteeChoices(models.TextChoices):
ALUMNI_RELATIONS = 'AR'
MEMBERSHIP_DEVELOPMENT = 'MD'
PHILANTHROPY = 'PH'
PUBLIC_RELATIONS = 'PR'
RECRUITMENT = 'RE'
SCHOLARSHIP = 'SC'
SOCIAL = 'SO'
HEALTH_AND_SAFETY = 'HS'
STANDING_COMMITTEE_CHOICES = [
('PR', 'Public Relations'),
('RE', 'Recruitment'),
('SO', 'Social'),
('HS', 'Health and Safety'),
]
OPERATIONAL_COMMITTEE_CHOICES = [
('AR', 'Alumni Relations'),
('MD', 'Membership Development'),
('PH', 'Philanthropy'),
('SC', 'Scholarship'),
]
committee = models.CharField(max_length=2, choices=CommitteeChoices.choices, unique=True, blank=False)
def in_standing(self):
return self.committee in (x[0] for x in self.STANDING_COMMITTEE_CHOICES)
def in_operational(self):
return self.committee in (x[0] for x in self.OPERATIONAL_COMMITTEE_CHOICES)
members = models.ManyToManyField(Brother, blank=True)
chair = models.OneToOneField(Position, on_delete=models.PROTECT, limit_choices_to=query_positions_with_committee())
class MeetingIntervals(models.IntegerChoices):
WEEKLY = 7, 'Weekly'
BIWEEKLY = 14, 'Biweekly'
MONTHLY = 28, 'Monthly'
meeting_interval = models.IntegerField(choices=MeetingIntervals.choices)
MEETING_DAY = [
(0, 'Monday'),
(1, 'Tuesday'),
(2, 'Wednesday'),
(3, 'Thursday'),
(4, 'Friday'),
(5, 'Saturday'),
(6, 'Sunday'),
]
meeting_day = models.IntegerField(choices=MEETING_DAY)
meeting_time = models.TimeField(choices=TimeChoices.choices)
def __str__(self):
return self.CommitteeChoices(self.committee).label
class CommitteeMeetingEvent(Event):
committee = models.ForeignKey(Committee, on_delete=models.PROTECT, related_name='meetings')
recurring = models.BooleanField(default=False)
def __str__(self):
return str(self.committee) + " - " + str(self.date)
class Excuse(models.Model):
event = models.ForeignKey(Event, on_delete=models.CASCADE)
brother = models.ForeignKey(Brother, on_delete=models.CASCADE)
date_submitted = models.DateField(default=django.utils.timezone.now)
description = models.TextField(
"Reasoning", default="I will not be attending because"
)
response_message = models.TextField(blank=True, null=True)
STATUS_CHOICES = (
('0', 'Pending'),
('1', 'Approved'),
('2', 'Denied'),
('3', 'Non-Mandatory'),
)
status = models.CharField(
max_length=1,
choices=STATUS_CHOICES,
default='0',
)
def __str__(self):
return self.brother.first_name \
+ " " + self.brother.last_name + " - " + str(self.event.name)
class Supplies(models.Model):
what = models.CharField(max_length=256)
done = models.BooleanField(default=False)
when = models.DateField(auto_now_add=True)
class Meta:
verbose_name_plural = "Supplies"
def __str__(self):
return self.what
class DetailGroup(models.Model):
"""A detail group. Contains brothers and a semester"""
brothers = models.ManyToManyField(Brother)
semester = models.ForeignKey(Semester, on_delete=models.CASCADE)
def size(self):
return len(self.brothers.all())
def __str__(self):
return ", ".join([str(b) for b in self.brothers.all()])
class Detail(models.Model):
"""Abstract class for details"""
short_description = models.CharField(max_length=64)
long_description = models.TextField(null=False)
done = models.BooleanField(default=False)
due_date = models.DateField(null=False)
finished_time = models.DateTimeField(null=True)
def full_text(self):
text = "%s\n----------\n" % self.short_description
text += "%s\n----------\n" % self.long_description
text += "Due: %s\n\n" % str(self.due_date)
return text
class Meta:
abstract = True
def __str__(self):
return self.short_description
class ThursdayDetail(Detail):
"""A thursday detail. Adds the brother who it's assigned to"""
brother = models.ForeignKey(Brother, on_delete=models.CASCADE, null=False)
def finish_link(self):
return reverse(
'dashboard:finish_thursday', args=[self.pk]
)
def __str__(self):
return str(self.due_date) + ": " +\
super(ThursdayDetail, self).__str__()
class SundayDetail(Detail):
"""A single Sunday detail. Keeps track of who marks it done"""
finished_by = models.ForeignKey(Brother, on_delete=models.CASCADE, null=True)
def __str__(self):
return str(self.due_date) + ": " +\
super(SundayDetail, self).__str__()
class SundayGroupDetail(models.Model):
"""A group detail. Contains a group and a number of SundayDetails"""
group = models.ForeignKey(DetailGroup, on_delete=models.CASCADE)
details = models.ManyToManyField(SundayDetail)
due_date = models.DateField()
def finish_link(self):
return reverse(
'dashboard:finish_sunday', args=[self.pk]
)
def done(self):
done = True
for detail in self.details.all():
done = done and detail.done
return done
def __str__(self):
return "%s: %s" % (
self.due_date, ", ".join([str(d) for d in self.details.all()])
)
class Photo(models.Model):
photo = models.ImageField(upload_to='photos')
def __str__(self):
return os.path.basename(str(self.photo))
class MinecraftPhoto(models.Model):
photo = models.ImageField(upload_to='minecraft')
def __str__(self):
return os.path.basename(str(self.photo))
class PhoneTreeNode(models.Model):
brother = models.ForeignKey(Brother, on_delete=models.PROTECT, related_name='phone_tree_brother')
notified_by = models.ForeignKey(Brother, on_delete=models.PROTECT, null=True, related_name='phone_tree_notified_by') # null is the root (ie president)
def __str__(self):
if self.brother.position_set.filter(title=Position.PositionChoices.PRESIDENT):
return self.brother.first_name + " " + self.brother.last_name
return self.brother.first_name + " " + self.brother.last_name + " notified by " + self.notified_by.first_name + " " + self.notified_by.last_name | 0.235988 | 0.10217 |
import sys
sys.path.append('./py')
from tests.test import *
from tests.random import *
from ga import GA
from multivector import MultiVector
ga = GA(3)
assert ga.n == 3
x = 4 + 3*ga[1] + 4*ga[2] + 5*ga[1,2]
y = 3 + 2*ga[1] + 3*ga[2] + 4*ga[1,2]
z = 10 + 16*ga[1] + 26*ga[2] + 32*ga[1,2]
assert x*y == z
ga2 = GA(2)
xx =ga2.coerce_multivector(x)
assert x != xx
assert xx == ga2.scalar(4) + ga2.blade(3, 1) + ga2.blade(4, 2) + ga2.blade(5, 1, 2)
e1 = ga[1]
e2 = ga[2]
e3 = ga[3]
assert e1.cross_product(e2) == e3
assert e2.cross_product(e3) == e1
assert e3.cross_product(e1) == e2
i = ga.blade(1,2,3)
j = ga.blade(1,1,3)
k = ga.blade(1,1,2)
asserteq(i*i,-1)
asserteq(j*j,-1)
asserteq(k*k,-1)
asserteq(i*j*k,-1)
one = ga.scalar(1)
asserteq(ga.I.left_inv(), ga.I*ga.I*ga.I)
asserteq(ga.I.right_inv(), ga.I*ga.I*ga.I)
asserteq(i.left_inv(), -i)
asserteq(j.left_inv(), -j)
asserteq(k.left_inv(), -k)
asserteq(i.right_inv(), -i)
asserteq(j.right_inv(), -j)
asserteq(k.right_inv(), -k)
asserteq(i*j, k)
asserteq(j*k, i)
asserteq(k*i, j)
for iter in range(100):
x = random_multivector()
y = random_multivector()
z = random_multivector()
asserteq((x*y)*z, x*(y*z))
asserteq(x*y, x @ y + (x & y))
asserteq(x+y, y+x)
asserteq(x+(y+z), (x+y)+z)
asserteq(x+y - y, x)
asserteq(2*x, x+x)
asserteq(0*x, ga.scalar(0))
asserteq(1*x, x)
asserteq(-1*x, -x)
asserteq(x-y, -(y-x))
asserteq(x+0, x)
asserteq(0+x, x)
a = random_scalar()
asserteq(a*x, x*a)
b = random_scalar()
c = random_scalar()
d = random_scalar()
q = a+b*i+c*j+d*k
r = a-b*i-c*j-d*k
asserteq(q*r, abs(q)**2)
try:
xx = x.left_inv()
if xx is not NotImplemented:
asserteq(xx*x, one)
except(TypeError, ZeroDivisionError):
pass
try:
xx = x.right_inv()
if xx is not NotImplemented:
asserteq(x*xx, one)
except(TypeError, ZeroDivisionError):
pass
def signum(l):
s = 1
for i in range(len(l)):
for j in range(len(l)-1, i, -1):
if l[j] < l[j-1]:
x = l[j-1]
l[j-1] = l[j]
l[j] = x
s = -s
return s
assert signum([1,2,3,6,12]) == 1
assert signum([1,2,6,3,12]) == -1
def dedup(l):
x = []
for v in l:
if x and x[-1] == v:
x.pop()
else:
x.append(v)
return x
def blade_to_list(x):
if not x:
return []
if len(x) > 1:
return NotImplemented
j = -1
for i in x:
# print('btol:',i, x[i])
j = bin(i)[2:]
l = [len(j) - i for i in range(len(j)) if j[i] == '1']
l.reverse()
return l
for iter in range(100):
x = random_blade()
y = random_blade()
l = blade_to_list(x) + blade_to_list(y)
s = signum(l)
l = dedup(l)
z = x*y
assert blade_to_list(z) == l
for i in z:
assert z[i] == s
a = random_blade()
b = random_blade()
c = random_blade()
a1 = random_blade()
b1 = random_blade()
c1 = random_blade()
x = a+2*b-c + 3
y = 2*a -b - 2*c
z = x*y
zz = a*2*a - a*b - a*2*c +2*b*2*a - 2*b*b -2*b*2*c - c*2*a + c*b + c*2*c + 3*2*a - 3*b - 3*2*c
assert z == zz
#test MultiVector.__init__
a = random_rank()
x = MultiVector(a)
assert x.dim == a
assert not x
assert len(x._data) == 2**a
for i in x._data:
assert not i
#test MultiVector.__getitem__
x = random_multivector(a)
y = MultiVector(a)
# print(x, a)
for i in x:
j = bin(i)[2:]
l = [len(j) - i for i in range(len(j)) if j[i] == '1']
l.reverse()
# print(i, j, l, a)
b = GA(a)[tuple(l)]
# print(x[i], b)
y += x[i] * b
# print( )
asserteq(x, y)
#test MultiVector.__setitem__
x = random_multivector(a)
y = MultiVector(a)
for i in x:
y[i] = x[i]
asserteq(x, y)
#test MultiVector.__delitem__
x = random_multivector(a)
y = MultiVector(a)
j = -1
for i in x:
if j == -1:
j = i
else:
y[i] = x[i]
del x[j]
asserteq(x, y)
#test MultiVector.__contains__
x = random_multivector(a)
s = set(range(2**a))
for i in x:
assert i in x
s.discard(i)
for i in s:
assert i not in x
#test MultiVector.__iter__
x = random_multivector(a)
l = [i for i in x]
for i in range(2**a):
if x[i] != 0:
assert l[0] == i
l.pop(0)
else:
assert i not in l
#test MultiVector.__len__
x = random_multivector(a)
l = [i for i in x]
assert len(x) == len(l)
#test MultiVector.__eq__
x = random_multivector(a)
y = MultiVector(a)
for i in x:
y[i] = x[i]
asserteq(x, y)
z = random_blade(a)
if not z:
z = 1
y += z
assert x != y
#test MultiVector.__pos__
x = random_multivector(a)
assert x == +x
assert x is not +x
#test MultiVector.__neg__
x = random_multivector(a)
assert x == --x
assert not (x + -x)
#test MultiVector.__float__
x = random_scalar(a)
assert float(x) == x[0]
#test MultiVector.__add__
x = random_multivector(a)
y = random_multivector(a)
z = MultiVector(a)
for i in range(0, 2**a):
z[i] = x[i] + y[i]
asserteq(x+y, z)
y = random_scalar(a)
z = +x
z[0] += y[0]
asserteq(x+y[0], z)
asserteq(x+y, z)
#test MultiVector.__radd__
x = random_multivector(a)
y = random_multivector(a)
z = MultiVector(a)
for i in range(0, 2**a):
z[i] = x[i] + y[i]
asserteq(x+y, z)
y = random_scalar(a)
z = +x
z[0] += y[0]
asserteq(y[0]+x, z)
asserteq(y+x, z)
#test MultiVector.__sub__
x = random_multivector(a)
y = random_multivector(a)
z = MultiVector(a)
for i in range(0, 2**a):
z[i] = x[i] - y[i]
asserteq(x-y, z)
y = random_scalar(a)
z = +x
z[0] -= y[0]
asserteq(x-y[0], z)
asserteq(x-y, z)
#test MultiVector.__rsub__
x = random_multivector(a)
y = random_multivector(a)
z = MultiVector(a)
for i in range(0, 2**a):
z[i] = x[i] - y[i]
asserteq(x-y, z)
y = random_scalar(a)
z = -x
z[0] = y[0] + z[0]
asserteq(y[0] - x, z)
asserteq(y - x, z)
#test MultiVector.__mul__
x = random_multivector(a)
y = random_multivector(a)
z = MultiVector(a)
for i in x:
ii = []
bit=1
k=1
while bit <= i:
if bit & i:
ii.append(k)
k += 1
bit *=2
for j in y:
jj = []
bit = 1
k=1
while bit <= j:
if bit & j:
jj.append(k)
k += 1
bit *=2
l = ii + jj
s = signum(l)
l = dedup(l)
k = 0
for kk in l:
k += 2**(kk-1)
z[k] += s * x[i] * y[j]
asserteq(x*y, z)
y = random_scalar(a)
z = x*y
assert(x*y[0] == z)
#test MultiVector.__rmul__
x = random_multivector(a)
y = random_scalar(a)
z = x*y
assert(y[0]*x == z)
#test MultiVector.__and__
x = random_multivector(a)
y = random_multivector(a)
asserteq((x&y) + (x@y), x*y)
asserteq(x&y, -(y&x))
#test MultiVector.__matmul__
x = random_multivector(a)
y = random_scalar(a)
asserteq(x*y, x @ y)
y = random_multivector(a)
asserteq(x@y, y@x)
#test MultiVector.__or__ TODO
#test MultiVector.__abs__
x = random_vector(a)
asserteq(abs(x)*abs(x), x @ x)
#test MultiVector.__invert__ TODO
#test MultiVector.rank
x = random_vector(a)
assert not x or x.rank() == 1
assert not x or (x*x).rank() == 2
assert not (x+x*x) or (x+x*x).rank() == 2
assert not x or (x*x*x).rank() == 3
#test MultiVector.cross_product
x = random_vector(a)
y = random_vector(a)
z = x.cross_product(y)
asserteq(z @ x, 0)
asserteq(z @ y, 0)
assert not z or z.rank() == 1
#test MultiVector.left_inv
x = random_multivector(a)
try:
z = x.left_inv()
if z is not NotImplemented:
asserteq(x*z, one)
except(TypeError, ZeroDivisionError):
pass
#test MultiVector.right_inv
x = random_multivector(a)
try:
z = x.right_inv()
if z is not NotImplemented:
asserteq(z*x, one)
except(TypeError, ZeroDivisionError):
pass
#test MultiVector.__truediv__
x = random_multivector(a)
y = random_multivector(a)
try:
z = x/y
if z is not NotImplemented:
asserteq(z*y, x)
except(TypeError, ZeroDivisionError):
pass
#test MultiVector.dual TODO
#test MultiVector.I
x = random_multivector(a)
assert x.I == GA(a).I
assert x.I*x.I*x.I*x.I == GA(a).scalar(1)
#test MultiVector.__str__ TODO
#test MultiVector.__repr__ TODO | test-ga.py |
import sys
sys.path.append('./py')
from tests.test import *
from tests.random import *
from ga import GA
from multivector import MultiVector
ga = GA(3)
assert ga.n == 3
x = 4 + 3*ga[1] + 4*ga[2] + 5*ga[1,2]
y = 3 + 2*ga[1] + 3*ga[2] + 4*ga[1,2]
z = 10 + 16*ga[1] + 26*ga[2] + 32*ga[1,2]
assert x*y == z
ga2 = GA(2)
xx =ga2.coerce_multivector(x)
assert x != xx
assert xx == ga2.scalar(4) + ga2.blade(3, 1) + ga2.blade(4, 2) + ga2.blade(5, 1, 2)
e1 = ga[1]
e2 = ga[2]
e3 = ga[3]
assert e1.cross_product(e2) == e3
assert e2.cross_product(e3) == e1
assert e3.cross_product(e1) == e2
i = ga.blade(1,2,3)
j = ga.blade(1,1,3)
k = ga.blade(1,1,2)
asserteq(i*i,-1)
asserteq(j*j,-1)
asserteq(k*k,-1)
asserteq(i*j*k,-1)
one = ga.scalar(1)
asserteq(ga.I.left_inv(), ga.I*ga.I*ga.I)
asserteq(ga.I.right_inv(), ga.I*ga.I*ga.I)
asserteq(i.left_inv(), -i)
asserteq(j.left_inv(), -j)
asserteq(k.left_inv(), -k)
asserteq(i.right_inv(), -i)
asserteq(j.right_inv(), -j)
asserteq(k.right_inv(), -k)
asserteq(i*j, k)
asserteq(j*k, i)
asserteq(k*i, j)
for iter in range(100):
x = random_multivector()
y = random_multivector()
z = random_multivector()
asserteq((x*y)*z, x*(y*z))
asserteq(x*y, x @ y + (x & y))
asserteq(x+y, y+x)
asserteq(x+(y+z), (x+y)+z)
asserteq(x+y - y, x)
asserteq(2*x, x+x)
asserteq(0*x, ga.scalar(0))
asserteq(1*x, x)
asserteq(-1*x, -x)
asserteq(x-y, -(y-x))
asserteq(x+0, x)
asserteq(0+x, x)
a = random_scalar()
asserteq(a*x, x*a)
b = random_scalar()
c = random_scalar()
d = random_scalar()
q = a+b*i+c*j+d*k
r = a-b*i-c*j-d*k
asserteq(q*r, abs(q)**2)
try:
xx = x.left_inv()
if xx is not NotImplemented:
asserteq(xx*x, one)
except(TypeError, ZeroDivisionError):
pass
try:
xx = x.right_inv()
if xx is not NotImplemented:
asserteq(x*xx, one)
except(TypeError, ZeroDivisionError):
pass
def signum(l):
s = 1
for i in range(len(l)):
for j in range(len(l)-1, i, -1):
if l[j] < l[j-1]:
x = l[j-1]
l[j-1] = l[j]
l[j] = x
s = -s
return s
assert signum([1,2,3,6,12]) == 1
assert signum([1,2,6,3,12]) == -1
def dedup(l):
x = []
for v in l:
if x and x[-1] == v:
x.pop()
else:
x.append(v)
return x
def blade_to_list(x):
if not x:
return []
if len(x) > 1:
return NotImplemented
j = -1
for i in x:
# print('btol:',i, x[i])
j = bin(i)[2:]
l = [len(j) - i for i in range(len(j)) if j[i] == '1']
l.reverse()
return l
for iter in range(100):
x = random_blade()
y = random_blade()
l = blade_to_list(x) + blade_to_list(y)
s = signum(l)
l = dedup(l)
z = x*y
assert blade_to_list(z) == l
for i in z:
assert z[i] == s
a = random_blade()
b = random_blade()
c = random_blade()
a1 = random_blade()
b1 = random_blade()
c1 = random_blade()
x = a+2*b-c + 3
y = 2*a -b - 2*c
z = x*y
zz = a*2*a - a*b - a*2*c +2*b*2*a - 2*b*b -2*b*2*c - c*2*a + c*b + c*2*c + 3*2*a - 3*b - 3*2*c
assert z == zz
#test MultiVector.__init__
a = random_rank()
x = MultiVector(a)
assert x.dim == a
assert not x
assert len(x._data) == 2**a
for i in x._data:
assert not i
#test MultiVector.__getitem__
x = random_multivector(a)
y = MultiVector(a)
# print(x, a)
for i in x:
j = bin(i)[2:]
l = [len(j) - i for i in range(len(j)) if j[i] == '1']
l.reverse()
# print(i, j, l, a)
b = GA(a)[tuple(l)]
# print(x[i], b)
y += x[i] * b
# print( )
asserteq(x, y)
#test MultiVector.__setitem__
x = random_multivector(a)
y = MultiVector(a)
for i in x:
y[i] = x[i]
asserteq(x, y)
#test MultiVector.__delitem__
x = random_multivector(a)
y = MultiVector(a)
j = -1
for i in x:
if j == -1:
j = i
else:
y[i] = x[i]
del x[j]
asserteq(x, y)
#test MultiVector.__contains__
x = random_multivector(a)
s = set(range(2**a))
for i in x:
assert i in x
s.discard(i)
for i in s:
assert i not in x
#test MultiVector.__iter__
x = random_multivector(a)
l = [i for i in x]
for i in range(2**a):
if x[i] != 0:
assert l[0] == i
l.pop(0)
else:
assert i not in l
#test MultiVector.__len__
x = random_multivector(a)
l = [i for i in x]
assert len(x) == len(l)
#test MultiVector.__eq__
x = random_multivector(a)
y = MultiVector(a)
for i in x:
y[i] = x[i]
asserteq(x, y)
z = random_blade(a)
if not z:
z = 1
y += z
assert x != y
#test MultiVector.__pos__
x = random_multivector(a)
assert x == +x
assert x is not +x
#test MultiVector.__neg__
x = random_multivector(a)
assert x == --x
assert not (x + -x)
#test MultiVector.__float__
x = random_scalar(a)
assert float(x) == x[0]
#test MultiVector.__add__
x = random_multivector(a)
y = random_multivector(a)
z = MultiVector(a)
for i in range(0, 2**a):
z[i] = x[i] + y[i]
asserteq(x+y, z)
y = random_scalar(a)
z = +x
z[0] += y[0]
asserteq(x+y[0], z)
asserteq(x+y, z)
#test MultiVector.__radd__
x = random_multivector(a)
y = random_multivector(a)
z = MultiVector(a)
for i in range(0, 2**a):
z[i] = x[i] + y[i]
asserteq(x+y, z)
y = random_scalar(a)
z = +x
z[0] += y[0]
asserteq(y[0]+x, z)
asserteq(y+x, z)
#test MultiVector.__sub__
x = random_multivector(a)
y = random_multivector(a)
z = MultiVector(a)
for i in range(0, 2**a):
z[i] = x[i] - y[i]
asserteq(x-y, z)
y = random_scalar(a)
z = +x
z[0] -= y[0]
asserteq(x-y[0], z)
asserteq(x-y, z)
#test MultiVector.__rsub__
x = random_multivector(a)
y = random_multivector(a)
z = MultiVector(a)
for i in range(0, 2**a):
z[i] = x[i] - y[i]
asserteq(x-y, z)
y = random_scalar(a)
z = -x
z[0] = y[0] + z[0]
asserteq(y[0] - x, z)
asserteq(y - x, z)
#test MultiVector.__mul__
x = random_multivector(a)
y = random_multivector(a)
z = MultiVector(a)
for i in x:
ii = []
bit=1
k=1
while bit <= i:
if bit & i:
ii.append(k)
k += 1
bit *=2
for j in y:
jj = []
bit = 1
k=1
while bit <= j:
if bit & j:
jj.append(k)
k += 1
bit *=2
l = ii + jj
s = signum(l)
l = dedup(l)
k = 0
for kk in l:
k += 2**(kk-1)
z[k] += s * x[i] * y[j]
asserteq(x*y, z)
y = random_scalar(a)
z = x*y
assert(x*y[0] == z)
#test MultiVector.__rmul__
x = random_multivector(a)
y = random_scalar(a)
z = x*y
assert(y[0]*x == z)
#test MultiVector.__and__
x = random_multivector(a)
y = random_multivector(a)
asserteq((x&y) + (x@y), x*y)
asserteq(x&y, -(y&x))
#test MultiVector.__matmul__
x = random_multivector(a)
y = random_scalar(a)
asserteq(x*y, x @ y)
y = random_multivector(a)
asserteq(x@y, y@x)
#test MultiVector.__or__ TODO
#test MultiVector.__abs__
x = random_vector(a)
asserteq(abs(x)*abs(x), x @ x)
#test MultiVector.__invert__ TODO
#test MultiVector.rank
x = random_vector(a)
assert not x or x.rank() == 1
assert not x or (x*x).rank() == 2
assert not (x+x*x) or (x+x*x).rank() == 2
assert not x or (x*x*x).rank() == 3
#test MultiVector.cross_product
x = random_vector(a)
y = random_vector(a)
z = x.cross_product(y)
asserteq(z @ x, 0)
asserteq(z @ y, 0)
assert not z or z.rank() == 1
#test MultiVector.left_inv
x = random_multivector(a)
try:
z = x.left_inv()
if z is not NotImplemented:
asserteq(x*z, one)
except(TypeError, ZeroDivisionError):
pass
#test MultiVector.right_inv
x = random_multivector(a)
try:
z = x.right_inv()
if z is not NotImplemented:
asserteq(z*x, one)
except(TypeError, ZeroDivisionError):
pass
#test MultiVector.__truediv__
x = random_multivector(a)
y = random_multivector(a)
try:
z = x/y
if z is not NotImplemented:
asserteq(z*y, x)
except(TypeError, ZeroDivisionError):
pass
#test MultiVector.dual TODO
#test MultiVector.I
x = random_multivector(a)
assert x.I == GA(a).I
assert x.I*x.I*x.I*x.I == GA(a).scalar(1)
#test MultiVector.__str__ TODO
#test MultiVector.__repr__ TODO | 0.177312 | 0.511412 |
import weakref
class Message:
def __init__(self, message_id, content, metadata):
self.message_id = message_id
self.content = content
self.metadata = metadata
class MessagingDriver:
def __init__(self):
self._finalizer = weakref.finalize(self, self.close_connection)
def declare_topic(self, topic_name):
"""
Declares a topic exchange with the name "topic name" and
returns an object that represent the topic
:param topic_name: The name of the topic to create
:return: An object that represents a topic. The type of the object
is only relevant inside the context of the driver, so what you
return as a topic will be passed in next calls to the driver
where a topic is required
"""
raise NotImplementedError
def get_queue(self, queue_name):
raise NotImplementedError
def declare_queue(self, queue_name, *topics_to_bind, dead_letter_queue_name=None, **kwargs):
"""
Declares a queue with the name "queue_name". Optionally, this
queue may be binded to the topic "topic_to_bind" and associated
to a dead_letter_queue "dead_letter_queue_name" where messages that
were unable to deliver will be placed.
:param queue_name: The name of the queue to create
:param topic_to_bind: The topic object where you will bind your queue
:param dead_letter_queue_name: The name of the dead letter queue to
create and associate to the queue "queue_name"
:return: A tuple, with the first element being the object queue
created, and the second element is the dead letter queue object.
The type of the queue object is only relevant inside the context of the driver, so what you
return as a queue will be passed in next calls to the driver
where a queue is required
"""
raise NotImplementedError
def retrieve_messages(self, queue, attempt_id=None):
"""
Returns a list of messages (instances of Message type) that have
been received from the queue.
:param queue: queue to poll
:return: a list of messages to process
"""
raise NotImplementedError
def publish(self, content, topic, event_type_name):
"""
Publishes the content to the topic. The content must be a
string (which is the json representation of an event)
"""
raise NotImplementedError
def queue_publish(
self, content, queue, event_type_name,
message_group_id=None, message_deduplication_id=None):
raise NotImplementedError
def acknowledge(self, message):
"""
Acknowledges a message so that it won't be redelivered by
the messaging infrastructure in the future
"""
raise NotImplementedError
def close_connection(self):
"""
Override this function if you want to use some finalizer code
to shutdown your driver in a clean way
"""
pass
def delete_queue(self, queue):
"""
Deletes the queue
"""
raise NotImplementedError
def delete_topic(self, topic):
"""
Deletes the topic
"""
raise NotImplementedError | melange/messaging/messaging_driver.py | import weakref
class Message:
def __init__(self, message_id, content, metadata):
self.message_id = message_id
self.content = content
self.metadata = metadata
class MessagingDriver:
def __init__(self):
self._finalizer = weakref.finalize(self, self.close_connection)
def declare_topic(self, topic_name):
"""
Declares a topic exchange with the name "topic name" and
returns an object that represent the topic
:param topic_name: The name of the topic to create
:return: An object that represents a topic. The type of the object
is only relevant inside the context of the driver, so what you
return as a topic will be passed in next calls to the driver
where a topic is required
"""
raise NotImplementedError
def get_queue(self, queue_name):
raise NotImplementedError
def declare_queue(self, queue_name, *topics_to_bind, dead_letter_queue_name=None, **kwargs):
"""
Declares a queue with the name "queue_name". Optionally, this
queue may be binded to the topic "topic_to_bind" and associated
to a dead_letter_queue "dead_letter_queue_name" where messages that
were unable to deliver will be placed.
:param queue_name: The name of the queue to create
:param topic_to_bind: The topic object where you will bind your queue
:param dead_letter_queue_name: The name of the dead letter queue to
create and associate to the queue "queue_name"
:return: A tuple, with the first element being the object queue
created, and the second element is the dead letter queue object.
The type of the queue object is only relevant inside the context of the driver, so what you
return as a queue will be passed in next calls to the driver
where a queue is required
"""
raise NotImplementedError
def retrieve_messages(self, queue, attempt_id=None):
"""
Returns a list of messages (instances of Message type) that have
been received from the queue.
:param queue: queue to poll
:return: a list of messages to process
"""
raise NotImplementedError
def publish(self, content, topic, event_type_name):
"""
Publishes the content to the topic. The content must be a
string (which is the json representation of an event)
"""
raise NotImplementedError
def queue_publish(
self, content, queue, event_type_name,
message_group_id=None, message_deduplication_id=None):
raise NotImplementedError
def acknowledge(self, message):
"""
Acknowledges a message so that it won't be redelivered by
the messaging infrastructure in the future
"""
raise NotImplementedError
def close_connection(self):
"""
Override this function if you want to use some finalizer code
to shutdown your driver in a clean way
"""
pass
def delete_queue(self, queue):
"""
Deletes the queue
"""
raise NotImplementedError
def delete_topic(self, topic):
"""
Deletes the topic
"""
raise NotImplementedError | 0.791821 | 0.301285 |
import cv2
import albumentations as A
from typing import Any
from typing import Tuple
from typing import Union
from typing import Optional
from albumentations.pytorch import ToTensorV2
from .general import ToRGB
from .general import ToGray
from .general import BatchWrapper
from .....data import Transforms
from .....constants import INPUT_KEY
from .....constants import LABEL_KEY
class ATransforms(Transforms):
input_alias = "image"
def __init__(self, *, label_alias: Optional[str] = None):
super().__init__()
self.label_alias = label_alias
def __call__(self, inp: Any, **kwargs: Any) -> Any: # type: ignore
if not self.need_batch_process:
kwargs[self.input_alias] = inp
return self.fn(**kwargs)[self.input_alias]
inp_keys_mapping = {
self.input_alias
if k == INPUT_KEY
else self.label_alias
if k == LABEL_KEY
else k: k
for k in inp
}
inp = {k: inp[v] for k, v in inp_keys_mapping.items()}
return {inp_keys_mapping[k]: v for k, v in self.fn(**inp).items()}
@property
def need_batch_process(self) -> bool:
return self.label_alias is not None
@property
def need_numpy(self) -> bool:
return True
AToRGB = lambda: BatchWrapper(ToRGB(), ATransforms.input_alias)
AToGray = lambda: BatchWrapper(ToGray(), ATransforms.input_alias)
@Transforms.register("a_resize")
class AResize(ATransforms):
def __init__(
self,
size: Union[int, tuple] = 224,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
if isinstance(size, int):
size = size, size
self.fn = A.Resize(*size)
@Transforms.register("a_random_crop")
class ARandomCrop(ATransforms):
def __init__(self, size: Union[int, tuple], *, label_alias: Optional[str] = None):
super().__init__(label_alias=label_alias)
if isinstance(size, int):
size = size, size
self.fn = A.RandomCrop(*size)
@Transforms.register("a_shift_scale_rotate")
class AShiftScaleRotate(ATransforms):
def __init__(
self,
p: float = 0.5,
border_mode: int = cv2.BORDER_REFLECT_101,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.ShiftScaleRotate(border_mode=border_mode, p=p)
@Transforms.register("a_hflip")
class AHFlip(ATransforms):
def __init__(self, p: float = 0.5, *, label_alias: Optional[str] = None):
super().__init__(label_alias=label_alias)
self.fn = A.HorizontalFlip(p=p)
@Transforms.register("a_vflip")
class AVFlip(ATransforms):
def __init__(self, p: float = 0.5, *, label_alias: Optional[str] = None):
super().__init__(label_alias=label_alias)
self.fn = A.VerticalFlip(p=p)
@Transforms.register("a_normalize")
class ANormalize(ATransforms):
def __init__(
self,
mean: Tuple[float, float, float] = (0.485, 0.456, 0.406),
std: Tuple[float, float, float] = (0.229, 0.224, 0.225),
max_pixel_value: float = 1.0,
p: float = 1.0,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.Normalize(mean, std, max_pixel_value, p=p)
@Transforms.register("a_rgb_shift")
class ARGBShift(ATransforms):
def __init__(
self,
r_shift_limit: float = 0.08,
g_shift_limit: float = 0.08,
b_shift_limit: float = 0.08,
p: float = 0.5,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.RGBShift(r_shift_limit, g_shift_limit, b_shift_limit, p=p)
@Transforms.register("a_solarize")
class ASolarize(ATransforms):
def __init__(
self,
threshold: float = 0.5,
p: float = 0.5,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.Solarize(threshold, p=p)
@Transforms.register("a_gaussian_blur")
class AGaussianBlur(ATransforms):
def __init__(
self,
blur_limit: Tuple[int, int] = (3, 7),
sigma_limit: int = 0,
p: float = 0.5,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.GaussianBlur(blur_limit, sigma_limit, p=p)
@Transforms.register("a_hue_saturation")
class AHueSaturationValue(ATransforms):
def __init__(
self,
hue_shift_limit: float = 0.08,
sat_shift_limit: float = 0.12,
val_shift_limit: float = 0.08,
p: float = 0.5,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.HueSaturationValue(
hue_shift_limit,
sat_shift_limit,
val_shift_limit,
p,
)
@Transforms.register("a_brightness_contrast")
class ARandomBrightnessContrast(ATransforms):
def __init__(
self,
brightness_limit: float = 0.2,
contrast_limit: float = 0.2,
brightness_by_max: bool = True,
p: float = 0.5,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.RandomBrightnessContrast(
brightness_limit,
contrast_limit,
brightness_by_max,
p,
)
@Transforms.register("a_to_tensor")
class AToTensor(ATransforms):
def __init__(
self,
transpose_mask: bool = True,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = ToTensorV2(transpose_mask)
__all__ = [
"AToRGB",
"AToGray",
"AResize",
"ARandomCrop",
"AShiftScaleRotate",
"AHFlip",
"AVFlip",
"ANormalize",
"ARGBShift",
"ASolarize",
"AGaussianBlur",
"AHueSaturationValue",
"ARandomBrightnessContrast",
"AToTensor",
"ATransforms",
] | cflearn/api/cv/data/transforms/A.py | import cv2
import albumentations as A
from typing import Any
from typing import Tuple
from typing import Union
from typing import Optional
from albumentations.pytorch import ToTensorV2
from .general import ToRGB
from .general import ToGray
from .general import BatchWrapper
from .....data import Transforms
from .....constants import INPUT_KEY
from .....constants import LABEL_KEY
class ATransforms(Transforms):
input_alias = "image"
def __init__(self, *, label_alias: Optional[str] = None):
super().__init__()
self.label_alias = label_alias
def __call__(self, inp: Any, **kwargs: Any) -> Any: # type: ignore
if not self.need_batch_process:
kwargs[self.input_alias] = inp
return self.fn(**kwargs)[self.input_alias]
inp_keys_mapping = {
self.input_alias
if k == INPUT_KEY
else self.label_alias
if k == LABEL_KEY
else k: k
for k in inp
}
inp = {k: inp[v] for k, v in inp_keys_mapping.items()}
return {inp_keys_mapping[k]: v for k, v in self.fn(**inp).items()}
@property
def need_batch_process(self) -> bool:
return self.label_alias is not None
@property
def need_numpy(self) -> bool:
return True
AToRGB = lambda: BatchWrapper(ToRGB(), ATransforms.input_alias)
AToGray = lambda: BatchWrapper(ToGray(), ATransforms.input_alias)
@Transforms.register("a_resize")
class AResize(ATransforms):
def __init__(
self,
size: Union[int, tuple] = 224,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
if isinstance(size, int):
size = size, size
self.fn = A.Resize(*size)
@Transforms.register("a_random_crop")
class ARandomCrop(ATransforms):
def __init__(self, size: Union[int, tuple], *, label_alias: Optional[str] = None):
super().__init__(label_alias=label_alias)
if isinstance(size, int):
size = size, size
self.fn = A.RandomCrop(*size)
@Transforms.register("a_shift_scale_rotate")
class AShiftScaleRotate(ATransforms):
def __init__(
self,
p: float = 0.5,
border_mode: int = cv2.BORDER_REFLECT_101,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.ShiftScaleRotate(border_mode=border_mode, p=p)
@Transforms.register("a_hflip")
class AHFlip(ATransforms):
def __init__(self, p: float = 0.5, *, label_alias: Optional[str] = None):
super().__init__(label_alias=label_alias)
self.fn = A.HorizontalFlip(p=p)
@Transforms.register("a_vflip")
class AVFlip(ATransforms):
def __init__(self, p: float = 0.5, *, label_alias: Optional[str] = None):
super().__init__(label_alias=label_alias)
self.fn = A.VerticalFlip(p=p)
@Transforms.register("a_normalize")
class ANormalize(ATransforms):
def __init__(
self,
mean: Tuple[float, float, float] = (0.485, 0.456, 0.406),
std: Tuple[float, float, float] = (0.229, 0.224, 0.225),
max_pixel_value: float = 1.0,
p: float = 1.0,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.Normalize(mean, std, max_pixel_value, p=p)
@Transforms.register("a_rgb_shift")
class ARGBShift(ATransforms):
def __init__(
self,
r_shift_limit: float = 0.08,
g_shift_limit: float = 0.08,
b_shift_limit: float = 0.08,
p: float = 0.5,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.RGBShift(r_shift_limit, g_shift_limit, b_shift_limit, p=p)
@Transforms.register("a_solarize")
class ASolarize(ATransforms):
def __init__(
self,
threshold: float = 0.5,
p: float = 0.5,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.Solarize(threshold, p=p)
@Transforms.register("a_gaussian_blur")
class AGaussianBlur(ATransforms):
def __init__(
self,
blur_limit: Tuple[int, int] = (3, 7),
sigma_limit: int = 0,
p: float = 0.5,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.GaussianBlur(blur_limit, sigma_limit, p=p)
@Transforms.register("a_hue_saturation")
class AHueSaturationValue(ATransforms):
def __init__(
self,
hue_shift_limit: float = 0.08,
sat_shift_limit: float = 0.12,
val_shift_limit: float = 0.08,
p: float = 0.5,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.HueSaturationValue(
hue_shift_limit,
sat_shift_limit,
val_shift_limit,
p,
)
@Transforms.register("a_brightness_contrast")
class ARandomBrightnessContrast(ATransforms):
def __init__(
self,
brightness_limit: float = 0.2,
contrast_limit: float = 0.2,
brightness_by_max: bool = True,
p: float = 0.5,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = A.RandomBrightnessContrast(
brightness_limit,
contrast_limit,
brightness_by_max,
p,
)
@Transforms.register("a_to_tensor")
class AToTensor(ATransforms):
def __init__(
self,
transpose_mask: bool = True,
*,
label_alias: Optional[str] = None,
):
super().__init__(label_alias=label_alias)
self.fn = ToTensorV2(transpose_mask)
__all__ = [
"AToRGB",
"AToGray",
"AResize",
"ARandomCrop",
"AShiftScaleRotate",
"AHFlip",
"AVFlip",
"ANormalize",
"ARGBShift",
"ASolarize",
"AGaussianBlur",
"AHueSaturationValue",
"ARandomBrightnessContrast",
"AToTensor",
"ATransforms",
] | 0.903796 | 0.216053 |
# debug
import hTools2
reload(hTools2)
if hTools2.DEBUG:
import hTools2.modules.fontutils
reload(hTools2.modules.fontutils)
# imports
from vanilla import *
try:
from mojo.roboFont import AllFonts, CurrentFont, CurrentGlyph
except:
from robofab.world import AllFonts, CurrentFont, CurrentGlyph
from mojo.UI import CurrentGlyphWindow, OpenGlyphWindow
from hTools2.modules.fontutils import get_full_name, get_glyphs
# functions
def next_glyph(font, index):
try:
next = font.glyphOrder[index+1]
except IndexError:
next = font.glyphOrder[0]
return next
def previous_glyph(font, index):
try:
prev = font.glyphOrder[index-1]
except IndexError:
prev = font.glyphOrder[-1]
return prev
# objects
class switchGlyphDialog(object):
_title = "switch"
_padding_top = 8
_padding = 10
_button_1 = 30
_button_2 = 18
_line_height = 18
_box_height = 23
_width = 320
_height = (_button_1 * 3) + (_padding_top * 2)
_move_default = 70
def __init__(self):
# get fonts
self.all_fonts = AllFonts()
if len(self.all_fonts) > 0:
self.w = FloatingWindow(
(self._width,
self._height),
self._title)
# move buttons
p = self._padding
b1 = self._button_1
b2 = self._button_2
box = self._box_height
x = self._padding
y = self._padding_top
x1 = x + b1 - 1
x2 = x + (b1 * 2) - 2
# buttons
self.w._up = SquareButton(
(x1, y,
b1, b1),
unichr(8673),
callback=self._up_callback)
self.w._up_right = SquareButton(
(x2 + 8, y,
b1 - 8, b1 - 8),
unichr(8599),
callback=self._up_right_callback,
sizeStyle='small')
y += b1 - 1
self.w._left = SquareButton(
(x, y,
b1, b1),
unichr(8672),
callback=self._left_callback)
self.w._right = SquareButton(
(x2, y,
b1, b1),
unichr(8674),
callback=self._right_callback)
y += b1 - 1
self.w._down = SquareButton(
(x1, y,
b1, b1),
unichr(8675),
callback=self._down_callback)
self.w._down_left = SquareButton(
(x, y + 8,
b1 - 8, b1 - 8),
unichr(8601),
callback=self._down_left_callback,
sizeStyle='small')
# location
y = p
x3 = x2 + b1 + 16
self.w.box_font = Box(
(x3, y,
-self._padding,
self._box_height))
self.w.box_font.text = TextBox(
(5, 0,
-self._padding,
-0),
'',
sizeStyle='small')
y += self._box_height + self._padding_top
self.w.box_glyph = Box(
(x3, y,
-self._padding,
self._box_height))
self.w.box_glyph.text = TextBox(
(5, 0,
-self._padding,
-0),
'',
sizeStyle='small')
y += self._box_height + self._padding_top
self.w.box_layer = Box(
(x3, y,
-self._padding,
self._box_height))
self.w.box_layer.text = TextBox(
(5, 0,
-self._padding,
-0),
'',
sizeStyle='small')
# open
if self.update():
self.w.open()
else:
print 'please open at least one font first.\n'
# methods
def next_glyph(self):
next = next_glyph(self.font, self.glyph_index)
try:
self.glyph_window.setGlyphByName(next)
except AttributeError:
self.glyph_window = CurrentGlyphWindow()
self.glyph_window.setGlyphByName(next)
self.update()
def previous_glyph(self):
prev = previous_glyph(self.font, self.glyph_index)
try:
self.glyph_window.setGlyphByName(prev)
except AttributeError:
self.glyph_window = CurrentGlyphWindow()
self.glyph_window.setGlyphByName(prev)
self.update()
def layer_down(self):
try:
self.glyph_window.layerDown()
except AttributeError:
self.glyph_window = CurrentGlyphWindow()
self.glyph_window.layerDown()
self.update()
def layer_up(self):
try:
self.glyph_window.layerUp()
except AttributeError:
self.glyph_window = CurrentGlyphWindow()
self.glyph_window.layerUp()
self.update()
def _update_text_box(self):
self.w.box_font.text.set('%s [%s]' % (get_full_name(self.font), self.font_index))
self.w.box_glyph.text.set('%s [%s]' % (self.glyph.name, self.glyph_index))
self.w.box_layer.text.set(self.glyph.layerName)
def update(self):
self.glyph_window = CurrentGlyphWindow()
if self.glyph_window is not None:
self.glyph = CurrentGlyph()
self.font = self.glyph.getParent()
self.glyph_index = self.font.glyphOrder.index(self.glyph.name)
self.font_index = self.all_fonts.index(self.font)
self._update_text_box()
return True
else:
f = CurrentFont()
if f is not None:
self.font = f
self.font_index = self.all_fonts.index(self.font)
glyph_names = get_glyphs(f)
if len(glyph_names) > 0:
self.glyph = self.font[glyph_names[0]]
self.glyph_index = self.font.glyphOrder.index(self.glyph.name)
self.glyph_window = OpenGlyphWindow(self.glyph, newWindow=False)
self._update_text_box()
return True
else:
print 'please select a glyph first.\n'
return False
else:
print 'please open a font first.\n'
return False
# callbacks
def _left_callback(self, sender):
self.previous_glyph()
def _right_callback(self, sender):
self.next_glyph()
def _up_callback(self, sender):
self.layer_up()
def _down_callback(self, sender):
self.layer_down()
def _up_right_callback(self, sender):
if len(self.all_fonts) > 1:
# get next font
f = CurrentFont()
i = self.all_fonts.index(f)
try:
next_i = i + 1
next_font = self.all_fonts[next_i]
except IndexError:
next_i = 0
next_font = self.all_fonts[next_i]
# get glyph
g_current = CurrentGlyph()
if g_current is not None:
if next_font.has_key(g_current.name):
next_glyph = next_font[g_current.name]
else:
next_glyph = next_font[next_font.glyphOrder[0]]
# switch to glyph window
G = OpenGlyphWindow(next_glyph, newWindow=False)
# update UI
self.update()
def _down_left_callback(self, sender):
if len(self.all_fonts) > 1:
# get next font
f = CurrentFont()
i = self.all_fonts.index(f)
try:
prev_i = i - 1
prev_font = self.all_fonts[prev_i]
except IndexError:
prev_i = -1
prev_font = self.all_fonts[prev_i]
# get glyph
g_current = CurrentGlyph()
if g_current is not None:
if prev_font.has_key(g_current.name):
prev_glyph = prev_font[g_current.name]
else:
prev_glyph = prev_font[prev_font.glyphOrder[0]]
# switch to glyph window
G = OpenGlyphWindow(prev_glyph, newWindow=False)
# update UI
self.update() | Lib/hTools2/dialogs/glyph/switch_glyph.py |
# debug
import hTools2
reload(hTools2)
if hTools2.DEBUG:
import hTools2.modules.fontutils
reload(hTools2.modules.fontutils)
# imports
from vanilla import *
try:
from mojo.roboFont import AllFonts, CurrentFont, CurrentGlyph
except:
from robofab.world import AllFonts, CurrentFont, CurrentGlyph
from mojo.UI import CurrentGlyphWindow, OpenGlyphWindow
from hTools2.modules.fontutils import get_full_name, get_glyphs
# functions
def next_glyph(font, index):
try:
next = font.glyphOrder[index+1]
except IndexError:
next = font.glyphOrder[0]
return next
def previous_glyph(font, index):
try:
prev = font.glyphOrder[index-1]
except IndexError:
prev = font.glyphOrder[-1]
return prev
# objects
class switchGlyphDialog(object):
_title = "switch"
_padding_top = 8
_padding = 10
_button_1 = 30
_button_2 = 18
_line_height = 18
_box_height = 23
_width = 320
_height = (_button_1 * 3) + (_padding_top * 2)
_move_default = 70
def __init__(self):
# get fonts
self.all_fonts = AllFonts()
if len(self.all_fonts) > 0:
self.w = FloatingWindow(
(self._width,
self._height),
self._title)
# move buttons
p = self._padding
b1 = self._button_1
b2 = self._button_2
box = self._box_height
x = self._padding
y = self._padding_top
x1 = x + b1 - 1
x2 = x + (b1 * 2) - 2
# buttons
self.w._up = SquareButton(
(x1, y,
b1, b1),
unichr(8673),
callback=self._up_callback)
self.w._up_right = SquareButton(
(x2 + 8, y,
b1 - 8, b1 - 8),
unichr(8599),
callback=self._up_right_callback,
sizeStyle='small')
y += b1 - 1
self.w._left = SquareButton(
(x, y,
b1, b1),
unichr(8672),
callback=self._left_callback)
self.w._right = SquareButton(
(x2, y,
b1, b1),
unichr(8674),
callback=self._right_callback)
y += b1 - 1
self.w._down = SquareButton(
(x1, y,
b1, b1),
unichr(8675),
callback=self._down_callback)
self.w._down_left = SquareButton(
(x, y + 8,
b1 - 8, b1 - 8),
unichr(8601),
callback=self._down_left_callback,
sizeStyle='small')
# location
y = p
x3 = x2 + b1 + 16
self.w.box_font = Box(
(x3, y,
-self._padding,
self._box_height))
self.w.box_font.text = TextBox(
(5, 0,
-self._padding,
-0),
'',
sizeStyle='small')
y += self._box_height + self._padding_top
self.w.box_glyph = Box(
(x3, y,
-self._padding,
self._box_height))
self.w.box_glyph.text = TextBox(
(5, 0,
-self._padding,
-0),
'',
sizeStyle='small')
y += self._box_height + self._padding_top
self.w.box_layer = Box(
(x3, y,
-self._padding,
self._box_height))
self.w.box_layer.text = TextBox(
(5, 0,
-self._padding,
-0),
'',
sizeStyle='small')
# open
if self.update():
self.w.open()
else:
print 'please open at least one font first.\n'
# methods
def next_glyph(self):
next = next_glyph(self.font, self.glyph_index)
try:
self.glyph_window.setGlyphByName(next)
except AttributeError:
self.glyph_window = CurrentGlyphWindow()
self.glyph_window.setGlyphByName(next)
self.update()
def previous_glyph(self):
prev = previous_glyph(self.font, self.glyph_index)
try:
self.glyph_window.setGlyphByName(prev)
except AttributeError:
self.glyph_window = CurrentGlyphWindow()
self.glyph_window.setGlyphByName(prev)
self.update()
def layer_down(self):
try:
self.glyph_window.layerDown()
except AttributeError:
self.glyph_window = CurrentGlyphWindow()
self.glyph_window.layerDown()
self.update()
def layer_up(self):
try:
self.glyph_window.layerUp()
except AttributeError:
self.glyph_window = CurrentGlyphWindow()
self.glyph_window.layerUp()
self.update()
def _update_text_box(self):
self.w.box_font.text.set('%s [%s]' % (get_full_name(self.font), self.font_index))
self.w.box_glyph.text.set('%s [%s]' % (self.glyph.name, self.glyph_index))
self.w.box_layer.text.set(self.glyph.layerName)
def update(self):
self.glyph_window = CurrentGlyphWindow()
if self.glyph_window is not None:
self.glyph = CurrentGlyph()
self.font = self.glyph.getParent()
self.glyph_index = self.font.glyphOrder.index(self.glyph.name)
self.font_index = self.all_fonts.index(self.font)
self._update_text_box()
return True
else:
f = CurrentFont()
if f is not None:
self.font = f
self.font_index = self.all_fonts.index(self.font)
glyph_names = get_glyphs(f)
if len(glyph_names) > 0:
self.glyph = self.font[glyph_names[0]]
self.glyph_index = self.font.glyphOrder.index(self.glyph.name)
self.glyph_window = OpenGlyphWindow(self.glyph, newWindow=False)
self._update_text_box()
return True
else:
print 'please select a glyph first.\n'
return False
else:
print 'please open a font first.\n'
return False
# callbacks
def _left_callback(self, sender):
self.previous_glyph()
def _right_callback(self, sender):
self.next_glyph()
def _up_callback(self, sender):
self.layer_up()
def _down_callback(self, sender):
self.layer_down()
def _up_right_callback(self, sender):
if len(self.all_fonts) > 1:
# get next font
f = CurrentFont()
i = self.all_fonts.index(f)
try:
next_i = i + 1
next_font = self.all_fonts[next_i]
except IndexError:
next_i = 0
next_font = self.all_fonts[next_i]
# get glyph
g_current = CurrentGlyph()
if g_current is not None:
if next_font.has_key(g_current.name):
next_glyph = next_font[g_current.name]
else:
next_glyph = next_font[next_font.glyphOrder[0]]
# switch to glyph window
G = OpenGlyphWindow(next_glyph, newWindow=False)
# update UI
self.update()
def _down_left_callback(self, sender):
if len(self.all_fonts) > 1:
# get next font
f = CurrentFont()
i = self.all_fonts.index(f)
try:
prev_i = i - 1
prev_font = self.all_fonts[prev_i]
except IndexError:
prev_i = -1
prev_font = self.all_fonts[prev_i]
# get glyph
g_current = CurrentGlyph()
if g_current is not None:
if prev_font.has_key(g_current.name):
prev_glyph = prev_font[g_current.name]
else:
prev_glyph = prev_font[prev_font.glyphOrder[0]]
# switch to glyph window
G = OpenGlyphWindow(prev_glyph, newWindow=False)
# update UI
self.update() | 0.360602 | 0.078254 |
# Import all packages
import tensorflow as tf
class QmixNet(tf.keras.Model):
def __init__(self, matrix_dims, name='Qmix', **kwargs):
super(QmixNet, self).__init__(name=name, **kwargs)
q_init = tf.zeros_initializer()
self.q_1 = tf.Variable(initial_value=q_init(shape=(matrix_dims[0],), dtype='float32'), trainable=True)
self.q_2 = tf.Variable(initial_value=q_init(shape=(matrix_dims[1],), dtype='float32'), trainable=True)
nmbr_units = 5
b_init = tf.zeros_initializer()
self.b_0 = tf.Variable(initial_value=b_init(shape=(nmbr_units,), dtype='float32'), trainable=True)
self.b_1 = tf.Variable(initial_value=b_init(shape=(1,), dtype='float32'), trainable=True)
w_init = tf.random_normal_initializer()
self.w_0 = tf.Variable(initial_value=w_init(shape=(2, nmbr_units), dtype='float32'), trainable=True)
self.w_1 = tf.Variable(initial_value=w_init(shape=(nmbr_units, 1), dtype='float32'), trainable=True)
@tf.function
def call(self, actions):
x = tf.expand_dims(tf.stack([self.q_1[actions[0]], self.q_2[actions[1]]]), axis=0)
x = tf.matmul(x, tf.math.exp(self.w_0)) + self.b_0
x = tf.nn.elu(x)
output = tf.matmul(x, tf.math.exp(self.w_1)) + self.b_1
return self.q_1[actions[0]], self.q_2[actions[1]], output
class Qmix(object):
"""Qmix for matrix game."""
def __init__(self, matrix_dims, step_size):
self._optimizer = tf.keras.optimizers.SGD(learning_rate=step_size)
self._q_mixer = QmixNet(matrix_dims)
@tf.function
def learn(self, actions, r):
with tf.GradientTape(persistent=True, watch_accessed_variables=False) as tape:
tape.watch(self._q_mixer.trainable_weights)
q1, q2, q_out = self._q_mixer(actions, training=True)
loss = 0.5 * tf.square(q_out - r, name='loss')
grads = tape.gradient(loss, self._q_mixer.trainable_weights)
self._optimizer.apply_gradients(list(zip(grads, self._q_mixer.trainable_weights)))
return q1, q2, q_out
@tf.function
def obtain_q(self, actions):
"""Obtain q's."""
return self._q_mixer(actions, training=False) | matrix_game/q_mix.py |
# Import all packages
import tensorflow as tf
class QmixNet(tf.keras.Model):
def __init__(self, matrix_dims, name='Qmix', **kwargs):
super(QmixNet, self).__init__(name=name, **kwargs)
q_init = tf.zeros_initializer()
self.q_1 = tf.Variable(initial_value=q_init(shape=(matrix_dims[0],), dtype='float32'), trainable=True)
self.q_2 = tf.Variable(initial_value=q_init(shape=(matrix_dims[1],), dtype='float32'), trainable=True)
nmbr_units = 5
b_init = tf.zeros_initializer()
self.b_0 = tf.Variable(initial_value=b_init(shape=(nmbr_units,), dtype='float32'), trainable=True)
self.b_1 = tf.Variable(initial_value=b_init(shape=(1,), dtype='float32'), trainable=True)
w_init = tf.random_normal_initializer()
self.w_0 = tf.Variable(initial_value=w_init(shape=(2, nmbr_units), dtype='float32'), trainable=True)
self.w_1 = tf.Variable(initial_value=w_init(shape=(nmbr_units, 1), dtype='float32'), trainable=True)
@tf.function
def call(self, actions):
x = tf.expand_dims(tf.stack([self.q_1[actions[0]], self.q_2[actions[1]]]), axis=0)
x = tf.matmul(x, tf.math.exp(self.w_0)) + self.b_0
x = tf.nn.elu(x)
output = tf.matmul(x, tf.math.exp(self.w_1)) + self.b_1
return self.q_1[actions[0]], self.q_2[actions[1]], output
class Qmix(object):
"""Qmix for matrix game."""
def __init__(self, matrix_dims, step_size):
self._optimizer = tf.keras.optimizers.SGD(learning_rate=step_size)
self._q_mixer = QmixNet(matrix_dims)
@tf.function
def learn(self, actions, r):
with tf.GradientTape(persistent=True, watch_accessed_variables=False) as tape:
tape.watch(self._q_mixer.trainable_weights)
q1, q2, q_out = self._q_mixer(actions, training=True)
loss = 0.5 * tf.square(q_out - r, name='loss')
grads = tape.gradient(loss, self._q_mixer.trainable_weights)
self._optimizer.apply_gradients(list(zip(grads, self._q_mixer.trainable_weights)))
return q1, q2, q_out
@tf.function
def obtain_q(self, actions):
"""Obtain q's."""
return self._q_mixer(actions, training=False) | 0.885186 | 0.451387 |
import argparse
import os
import scipy.misc
import numpy as np
from ada_rendering import pose2image
import tensorflow as tf
from pdb import set_trace as st
parser = argparse.ArgumentParser(description='')
parser.add_argument('--dataset_name', dest='dataset_name', default='facades', help='name of the dataset')
parser.add_argument('--epoch', dest='epoch', type=int, default=350, help='# of epoch')
parser.add_argument('--batch_size', dest='batch_size', type=int, default=1, help='# images in batch')
parser.add_argument('--train_size', dest='train_size', type=int, default=1e8, help='# images used to train')
parser.add_argument('--load_size', dest='load_size', type=int, default=134, help='scale images to this size')
parser.add_argument('--fine_size', dest='fine_size', type=int, default=128, help='then crop to this size')
parser.add_argument('--input_nc', dest='input_nc', type=int, default=3, help='# of input image channels')
parser.add_argument('--output_nc', dest='output_nc', type=int, default=3, help='# of output image channels')
parser.add_argument('--niter', dest='niter', type=int, default=200, help='# of iter at starting learning rate')
parser.add_argument('--lr', dest='lr', type=float, default=1e-3, help='initial learning rate for adam')
parser.add_argument('--beta1', dest='beta1', type=float, default=0.5, help='momentum term of adam')
parser.add_argument('--flip', dest='flip', type=bool, default=False, help='if flip the images for data argumentation')
parser.add_argument('--which_direction', dest='which_direction', default='AtoB', help='AtoB or BtoA')
parser.add_argument('--phase', dest='phase', default='test', help='train, test')
parser.add_argument('--save_epoch_freq', dest='save_epoch_freq', type=int, default=50,
help='save a model every save_epoch_freq epochs (does not overwrite previously saved models)')
parser.add_argument('--print_freq', dest='print_freq', type=int, default=50,
help='print the debug information every print_freq iterations')
parser.add_argument('--save_latest_freq', dest='save_latest_freq', type=int, default=5000,
help='save the latest model every latest_freq sgd iterations (overwrites the previous latest model)')
parser.add_argument('--continue_train', dest='continue_train', type=bool, default=False,
help='if continue training, load the latest model: 1: true, 0: false')
parser.add_argument('--serial_batches', dest='serial_batches', type=bool, default=False,
help='f 1, takes images in order to make batches, otherwise takes them randomly')
parser.add_argument('--serial_batch_iter', dest='serial_batch_iter', type=bool, default=True,
help='iter into serial image list')
# parser.add_argument('--checkpoint_dir', dest='checkpoint_dir', default='/local-scratch/cvpr18/dataset/checkpoint/', help='models are saved here')
parser.add_argument('--checkpoint_dir', dest='checkpoint_dir', default='./pretrained_ckpt/fashion/checkpoint-debug-352epoch',
help='models are saved here')
parser.add_argument('--dataset', default='fashion')
parser.add_argument('--dataset_dir', default='./dataset')
# parser.add_argument('--root_dir', dest='root_dir',
# default='/local-scratch2/mzhai/cvpr18/fashion-pose2image-batchsize1/', help='root_dir')
parser.add_argument('--sample_dir', dest='sample_dir', default='./sample', help='sample are saved here')
# parser.add_argument('--test_dir', dest='test_dir', default='/local-scratch2/mzhai/ComputeCanada/final_models/final_models/pose2image-batchsize1/test', help='test sample are saved here')
parser.add_argument('--test_dir', dest='test_dir', default='./test-result', help='test sample are saved here')
parser.add_argument('--vgg_path', dest='vgg_path',
default='./pretrained_vgg/imagenet-vgg-verydeep-19.mat',
help='path of the pretrained vgg model')
args = parser.parse_args()
def main(_):
if not os.path.exists(args.checkpoint_dir):
os.makedirs(args.checkpoint_dir)
if not os.path.exists(args.sample_dir):
os.makedirs(args.sample_dir)
if not os.path.exists(args.test_dir):
os.makedirs(args.test_dir)
with tf.Session(config=tf.ConfigProto(device_count={'GPU': 1})) as sess:
print("Creating Model...")
model = pose2image(sess, image_size=args.fine_size, batch_size=args.batch_size,
output_size=args.fine_size, dataset_name=args.dataset_name,
checkpoint_dir=args.checkpoint_dir, sample_dir=args.sample_dir,
dataset = args.dataset, dataset_dir=args.dataset_dir, vgg_path=args.vgg_path)
print("Model Created...")
# st()
if args.phase == 'train':
print("Start to train model...")
model.train(args)
else:
print("Start to test model...")
model.test(args)
if __name__ == '__main__':
tf.app.run() | main.py | import argparse
import os
import scipy.misc
import numpy as np
from ada_rendering import pose2image
import tensorflow as tf
from pdb import set_trace as st
parser = argparse.ArgumentParser(description='')
parser.add_argument('--dataset_name', dest='dataset_name', default='facades', help='name of the dataset')
parser.add_argument('--epoch', dest='epoch', type=int, default=350, help='# of epoch')
parser.add_argument('--batch_size', dest='batch_size', type=int, default=1, help='# images in batch')
parser.add_argument('--train_size', dest='train_size', type=int, default=1e8, help='# images used to train')
parser.add_argument('--load_size', dest='load_size', type=int, default=134, help='scale images to this size')
parser.add_argument('--fine_size', dest='fine_size', type=int, default=128, help='then crop to this size')
parser.add_argument('--input_nc', dest='input_nc', type=int, default=3, help='# of input image channels')
parser.add_argument('--output_nc', dest='output_nc', type=int, default=3, help='# of output image channels')
parser.add_argument('--niter', dest='niter', type=int, default=200, help='# of iter at starting learning rate')
parser.add_argument('--lr', dest='lr', type=float, default=1e-3, help='initial learning rate for adam')
parser.add_argument('--beta1', dest='beta1', type=float, default=0.5, help='momentum term of adam')
parser.add_argument('--flip', dest='flip', type=bool, default=False, help='if flip the images for data argumentation')
parser.add_argument('--which_direction', dest='which_direction', default='AtoB', help='AtoB or BtoA')
parser.add_argument('--phase', dest='phase', default='test', help='train, test')
parser.add_argument('--save_epoch_freq', dest='save_epoch_freq', type=int, default=50,
help='save a model every save_epoch_freq epochs (does not overwrite previously saved models)')
parser.add_argument('--print_freq', dest='print_freq', type=int, default=50,
help='print the debug information every print_freq iterations')
parser.add_argument('--save_latest_freq', dest='save_latest_freq', type=int, default=5000,
help='save the latest model every latest_freq sgd iterations (overwrites the previous latest model)')
parser.add_argument('--continue_train', dest='continue_train', type=bool, default=False,
help='if continue training, load the latest model: 1: true, 0: false')
parser.add_argument('--serial_batches', dest='serial_batches', type=bool, default=False,
help='f 1, takes images in order to make batches, otherwise takes them randomly')
parser.add_argument('--serial_batch_iter', dest='serial_batch_iter', type=bool, default=True,
help='iter into serial image list')
# parser.add_argument('--checkpoint_dir', dest='checkpoint_dir', default='/local-scratch/cvpr18/dataset/checkpoint/', help='models are saved here')
parser.add_argument('--checkpoint_dir', dest='checkpoint_dir', default='./pretrained_ckpt/fashion/checkpoint-debug-352epoch',
help='models are saved here')
parser.add_argument('--dataset', default='fashion')
parser.add_argument('--dataset_dir', default='./dataset')
# parser.add_argument('--root_dir', dest='root_dir',
# default='/local-scratch2/mzhai/cvpr18/fashion-pose2image-batchsize1/', help='root_dir')
parser.add_argument('--sample_dir', dest='sample_dir', default='./sample', help='sample are saved here')
# parser.add_argument('--test_dir', dest='test_dir', default='/local-scratch2/mzhai/ComputeCanada/final_models/final_models/pose2image-batchsize1/test', help='test sample are saved here')
parser.add_argument('--test_dir', dest='test_dir', default='./test-result', help='test sample are saved here')
parser.add_argument('--vgg_path', dest='vgg_path',
default='./pretrained_vgg/imagenet-vgg-verydeep-19.mat',
help='path of the pretrained vgg model')
args = parser.parse_args()
def main(_):
if not os.path.exists(args.checkpoint_dir):
os.makedirs(args.checkpoint_dir)
if not os.path.exists(args.sample_dir):
os.makedirs(args.sample_dir)
if not os.path.exists(args.test_dir):
os.makedirs(args.test_dir)
with tf.Session(config=tf.ConfigProto(device_count={'GPU': 1})) as sess:
print("Creating Model...")
model = pose2image(sess, image_size=args.fine_size, batch_size=args.batch_size,
output_size=args.fine_size, dataset_name=args.dataset_name,
checkpoint_dir=args.checkpoint_dir, sample_dir=args.sample_dir,
dataset = args.dataset, dataset_dir=args.dataset_dir, vgg_path=args.vgg_path)
print("Model Created...")
# st()
if args.phase == 'train':
print("Start to train model...")
model.train(args)
else:
print("Start to test model...")
model.test(args)
if __name__ == '__main__':
tf.app.run() | 0.498291 | 0.085327 |
from .common import *
mod = Blueprint('submission', __name__)
class SubmitForm(FlaskForm):
problem_id = IntegerField('Problem ID', [InputRequired('This field is required.')])
language_id = SelectField('Language', [InputRequired('This field is required.')], coerce=int)
code = TextAreaField('Compile Command', [InputRequired('This field is required.')])
@mod.route('/submit/')
@mod.route('/submit/<int:problem_id>')
@login_required
def get_submit(problem_id=None):
form = SubmitForm()
if problem_id:
form.problem_id.data = problem_id
languages = db.session.query(Language).order_by(Language.id).all()
form.language_id.choices = [(l.id, l.name) for l in languages]
return render_template('submission/submit.html', form=form)
@mod.route('/submit/', methods=['POST'])
@login_required
def post_submit():
form = SubmitForm()
languages = db.session.query(Language).order_by(Language.id).all()
form.language_id.choices = [(l.id, l.name) for l in languages]
if not form.validate():
return render_template('submission/submit.html', form=form)
problem = db.session.query(Problem).filter(Problem.id == form.problem_id.data).first()
if not problem:
flash('Cannot find the problem {}'.format(form.problem_id.data), 'danger')
return render_template('submission/submit.html', form=form)
submission = Submission(user_id=session['user_id'],
problem_id=form.problem_id.data,
language_id=form.language_id.data,
status=constants.SUBMISSION_PENDING,
code=form.code.data,
created_at=datetime.utcnow())
db.session.add(submission)
db.session.commit()
return redirect(url_for('.get_status'))
@mod.route('/status/')
def get_status():
# TODO: paginate
# TODO: filter
submissions = db.session.query(Submission).order_by(Submission.id.desc()).all()
user_ids = [s.user_id for s in submissions]
users = {u.id: u for u in db.session.query(User).filter(User.id.in_(user_ids))}
languages = {l.id: l for l in db.session.query(Language)}
return render_template('submission/status.html', submissions=submissions, users=users, languages=languages) | web/codepass_web/views/submission.py | from .common import *
mod = Blueprint('submission', __name__)
class SubmitForm(FlaskForm):
problem_id = IntegerField('Problem ID', [InputRequired('This field is required.')])
language_id = SelectField('Language', [InputRequired('This field is required.')], coerce=int)
code = TextAreaField('Compile Command', [InputRequired('This field is required.')])
@mod.route('/submit/')
@mod.route('/submit/<int:problem_id>')
@login_required
def get_submit(problem_id=None):
form = SubmitForm()
if problem_id:
form.problem_id.data = problem_id
languages = db.session.query(Language).order_by(Language.id).all()
form.language_id.choices = [(l.id, l.name) for l in languages]
return render_template('submission/submit.html', form=form)
@mod.route('/submit/', methods=['POST'])
@login_required
def post_submit():
form = SubmitForm()
languages = db.session.query(Language).order_by(Language.id).all()
form.language_id.choices = [(l.id, l.name) for l in languages]
if not form.validate():
return render_template('submission/submit.html', form=form)
problem = db.session.query(Problem).filter(Problem.id == form.problem_id.data).first()
if not problem:
flash('Cannot find the problem {}'.format(form.problem_id.data), 'danger')
return render_template('submission/submit.html', form=form)
submission = Submission(user_id=session['user_id'],
problem_id=form.problem_id.data,
language_id=form.language_id.data,
status=constants.SUBMISSION_PENDING,
code=form.code.data,
created_at=datetime.utcnow())
db.session.add(submission)
db.session.commit()
return redirect(url_for('.get_status'))
@mod.route('/status/')
def get_status():
# TODO: paginate
# TODO: filter
submissions = db.session.query(Submission).order_by(Submission.id.desc()).all()
user_ids = [s.user_id for s in submissions]
users = {u.id: u for u in db.session.query(User).filter(User.id.in_(user_ids))}
languages = {l.id: l for l in db.session.query(Language)}
return render_template('submission/status.html', submissions=submissions, users=users, languages=languages) | 0.322099 | 0.083404 |
import argparse
import datetime
import hashlib
import os
import os.path as osp
import uuid
import torch
import yaml
from torch.optim.lr_scheduler import MultiStepLR
from torch.utils.data import DataLoader
import torchfcn
from cmu_airlab.datasets.dataset_air_lab import AirLabClassSegBase
from torchfcn.models.fcn_utils import get_parameters
from torchfcn.utils import git_hash
# This is used to differentiate a kind of 'debug' mode on my notebook, which does not have enough graphics memory.
nb_hashs = [b'\x88\x95\xe23\x9b\xff_RN8\xfe\xd0\x08\xe6r\x05m1\x9e\x94\xac!\xef\xb2\xc2\xc9k\x18\x0f\xc6\xda\xbf',
b'YTZ\x13J4f\xda;)E\xb1\x82i\xbe\x87\xc3\xf2=\x90"\x1c\xa3\xfb\t>9\xb5\xb8\x89\x1au']
here = osp.dirname(osp.abspath(__file__))
def main():
m = hashlib.sha256()
m.update(str(uuid.getnode()).encode('utf-8'))
on_my_notebook = m.digest() in nb_hashs
args = argument_parsing()
args.model = 'FCN8s'
args.git_hash = git_hash() # This is a nice idea: Makes results reproducible by logging current git commit.
args.use_cuda = prepare_cuda(args, torch_seed=42)
args.use_cuda = False if on_my_notebook else args.use_cuda
settings_to_logfile(args)
print("Output folder:\n{}".format(args.out))
for k in range(args.k_fold):
print("Training fold {}/{}".format(k, args.k_fold))
out = osp.join(args.out, "fold_{}".format(k))
# Prepare Dataset
root = osp.expanduser('~/Daten/datasets/cmu-airlab/assignment-task-5/data')
if on_my_notebook:
root = "../data"
kwargs = {'num_workers': 8, 'pin_memory': True} if args.use_cuda else {}
train_dst = AirLabClassSegBase(root, transform=True, max_len=3 if on_my_notebook else None,
k_fold=args.k_fold, k_fold_val=k, use_augmented=False)
test_dst = AirLabClassSegBase(root, val=True, transform=True, max_len=3 if on_my_notebook else None,
k_fold=args.k_fold, k_fold_val=k, use_augmented=False)
train_loader = DataLoader(train_dst, batch_size=5, shuffle=False, **kwargs)
val_loader = DataLoader(test_dst, batch_size=1, shuffle=False, **kwargs)
# Check for checkpoint.
start_epoch = 0
start_iteration = 0
checkpoint = None
if args.resume:
checkpoint = torch.load(args.resume)
start_epoch = checkpoint['epoch']
start_iteration = checkpoint['iteration']
# Prepare model. Load weights from checkpoint if available.
fcn_model = prepare_model(args, freeze_cnn_weights=True, checkpoint=checkpoint)
# Prepare optimizer and learning rate scheduler-
optim = torch.optim.SGD(
[
{'params': get_parameters(fcn_model, bias=False)},
{'params': get_parameters(fcn_model, bias=True),
'lr': args.lr * 2, 'weight_decay': 0},
],
lr=args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
if args.resume:
checkpoint = torch.load(args.resume)
optim.load_state_dict(checkpoint['optim_state_dict'])
scheduler = MultiStepLR(optim, milestones=[64, 67, 70], gamma=0.1, last_epoch=start_epoch - 1)
weight_unfreezer = prepare_weight_unfreezer(optim, fcn_model, cnn_weights_frozen=True)
model_refiner = prepare_model_refinement(fcn_model)
trainer = torchfcn.Trainer(
cuda=args.use_cuda,
model=fcn_model,
optimizer=optim,
lr_scheduler=scheduler,
train_loader=train_loader,
val_loader=val_loader,
out=out,
max_epoch=args.max_epoch,
interval_val_viz=5,
epoch_callback_tuples=[(70, weight_unfreezer)]
)
trainer.epoch = start_epoch
trainer.iteration = start_iteration
trainer.train()
def prepare_model_refinement(fcn_model):
def set_model_refinement():
fcn_model.use_refinement = True
for name, layer in fcn_model.named_children():
if name is not "refinement_1":
for param in layer.parameters():
param.requires_grad = False
print("Model is using refinement layer, now, other layers frozen.")
return set_model_refinement
def prepare_weight_unfreezer(optim, fcn_model, cnn_weights_frozen):
def weight_unfreezer():
if cnn_weights_frozen: # Freezing cnn weights.
for name, layer in fcn_model.named_children():
for param in layer.parameters():
param.requires_grad = True
print("All weights unfrozen.")
return weight_unfreezer
def prepare_model(args, freeze_cnn_weights=True, checkpoint=None):
fcn_model = torchfcn.models.FCN8s(n_class=11)
if checkpoint is not None:
fcn_model.load_state_dict(checkpoint['model_state_dict'])
else:
# It seem to be tedious to load the pretrained model into FCN16s first and then copy the params from there.
# I assume this is due to the available pretrained models.
fcn16s = torchfcn.models.FCN16s()
state_dict = torch.load(args.pretrained_model)
try:
fcn16s.load_state_dict(state_dict)
except RuntimeError:
fcn16s.load_state_dict(state_dict['model_state_dict'])
fcn_model.copy_params_from_fcn16s(fcn16s, n_class_changed=True)
if args.use_cuda:
print("Using CUDA.")
fcn_model = fcn_model.cuda()
if freeze_cnn_weights: # Freezing cnn weights.
for name, layer in fcn_model.named_children():
if name not in fcn_model.class_dependent_layers:
for param in layer.parameters():
param.requires_grad = False
return fcn_model
def settings_to_logfile(args):
now = datetime.datetime.now()
args.out = osp.join(here, 'logs', now.strftime('%Y%m%d_%H%M%S.%f'))
os.makedirs(args.out)
with open(osp.join(args.out, 'config.yaml'), 'w') as f:
yaml.safe_dump(args.__dict__, f, default_flow_style=False)
def prepare_cuda(args, torch_seed=42):
os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu)
use_cuda = torch.cuda.is_available()
torch.manual_seed(torch_seed)
if use_cuda:
torch.cuda.manual_seed(torch_seed)
return use_cuda
def argument_parsing():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument('-g', '--gpu', type=int, default=0, help='gpu img_id')
parser.add_argument('--resume', help='checkpoint path')
# configurations (same configuration as original work)
# https://github.com/shelhamer/fcn.berkeleyvision.org
parser.add_argument(
'--max-epoch', type=int, default=101, help='max epoch'
)
parser.add_argument(
'--k-fold', type=int, default=4, help='k for k-fold validation'
)
parser.add_argument(
'--lr', type=float, default=1.0e-7, help='learning rate',
)
parser.add_argument(
'--weight-decay', type=float, default=0.0005, help='weight decay',
)
parser.add_argument(
'--momentum', type=float, default=0.99, help='momentum',
)
parser.add_argument(
'--pretrained-model',
default=torchfcn.models.FCN16s.download(),
help='pretrained model of FCN16s',
)
return parser.parse_args()
if __name__ == '__main__':
main() | train_airlab_data.py |
import argparse
import datetime
import hashlib
import os
import os.path as osp
import uuid
import torch
import yaml
from torch.optim.lr_scheduler import MultiStepLR
from torch.utils.data import DataLoader
import torchfcn
from cmu_airlab.datasets.dataset_air_lab import AirLabClassSegBase
from torchfcn.models.fcn_utils import get_parameters
from torchfcn.utils import git_hash
# This is used to differentiate a kind of 'debug' mode on my notebook, which does not have enough graphics memory.
nb_hashs = [b'\x88\x95\xe23\x9b\xff_RN8\xfe\xd0\x08\xe6r\x05m1\x9e\x94\xac!\xef\xb2\xc2\xc9k\x18\x0f\xc6\xda\xbf',
b'YTZ\x13J4f\xda;)E\xb1\x82i\xbe\x87\xc3\xf2=\x90"\x1c\xa3\xfb\t>9\xb5\xb8\x89\x1au']
here = osp.dirname(osp.abspath(__file__))
def main():
m = hashlib.sha256()
m.update(str(uuid.getnode()).encode('utf-8'))
on_my_notebook = m.digest() in nb_hashs
args = argument_parsing()
args.model = 'FCN8s'
args.git_hash = git_hash() # This is a nice idea: Makes results reproducible by logging current git commit.
args.use_cuda = prepare_cuda(args, torch_seed=42)
args.use_cuda = False if on_my_notebook else args.use_cuda
settings_to_logfile(args)
print("Output folder:\n{}".format(args.out))
for k in range(args.k_fold):
print("Training fold {}/{}".format(k, args.k_fold))
out = osp.join(args.out, "fold_{}".format(k))
# Prepare Dataset
root = osp.expanduser('~/Daten/datasets/cmu-airlab/assignment-task-5/data')
if on_my_notebook:
root = "../data"
kwargs = {'num_workers': 8, 'pin_memory': True} if args.use_cuda else {}
train_dst = AirLabClassSegBase(root, transform=True, max_len=3 if on_my_notebook else None,
k_fold=args.k_fold, k_fold_val=k, use_augmented=False)
test_dst = AirLabClassSegBase(root, val=True, transform=True, max_len=3 if on_my_notebook else None,
k_fold=args.k_fold, k_fold_val=k, use_augmented=False)
train_loader = DataLoader(train_dst, batch_size=5, shuffle=False, **kwargs)
val_loader = DataLoader(test_dst, batch_size=1, shuffle=False, **kwargs)
# Check for checkpoint.
start_epoch = 0
start_iteration = 0
checkpoint = None
if args.resume:
checkpoint = torch.load(args.resume)
start_epoch = checkpoint['epoch']
start_iteration = checkpoint['iteration']
# Prepare model. Load weights from checkpoint if available.
fcn_model = prepare_model(args, freeze_cnn_weights=True, checkpoint=checkpoint)
# Prepare optimizer and learning rate scheduler-
optim = torch.optim.SGD(
[
{'params': get_parameters(fcn_model, bias=False)},
{'params': get_parameters(fcn_model, bias=True),
'lr': args.lr * 2, 'weight_decay': 0},
],
lr=args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
if args.resume:
checkpoint = torch.load(args.resume)
optim.load_state_dict(checkpoint['optim_state_dict'])
scheduler = MultiStepLR(optim, milestones=[64, 67, 70], gamma=0.1, last_epoch=start_epoch - 1)
weight_unfreezer = prepare_weight_unfreezer(optim, fcn_model, cnn_weights_frozen=True)
model_refiner = prepare_model_refinement(fcn_model)
trainer = torchfcn.Trainer(
cuda=args.use_cuda,
model=fcn_model,
optimizer=optim,
lr_scheduler=scheduler,
train_loader=train_loader,
val_loader=val_loader,
out=out,
max_epoch=args.max_epoch,
interval_val_viz=5,
epoch_callback_tuples=[(70, weight_unfreezer)]
)
trainer.epoch = start_epoch
trainer.iteration = start_iteration
trainer.train()
def prepare_model_refinement(fcn_model):
def set_model_refinement():
fcn_model.use_refinement = True
for name, layer in fcn_model.named_children():
if name is not "refinement_1":
for param in layer.parameters():
param.requires_grad = False
print("Model is using refinement layer, now, other layers frozen.")
return set_model_refinement
def prepare_weight_unfreezer(optim, fcn_model, cnn_weights_frozen):
def weight_unfreezer():
if cnn_weights_frozen: # Freezing cnn weights.
for name, layer in fcn_model.named_children():
for param in layer.parameters():
param.requires_grad = True
print("All weights unfrozen.")
return weight_unfreezer
def prepare_model(args, freeze_cnn_weights=True, checkpoint=None):
fcn_model = torchfcn.models.FCN8s(n_class=11)
if checkpoint is not None:
fcn_model.load_state_dict(checkpoint['model_state_dict'])
else:
# It seem to be tedious to load the pretrained model into FCN16s first and then copy the params from there.
# I assume this is due to the available pretrained models.
fcn16s = torchfcn.models.FCN16s()
state_dict = torch.load(args.pretrained_model)
try:
fcn16s.load_state_dict(state_dict)
except RuntimeError:
fcn16s.load_state_dict(state_dict['model_state_dict'])
fcn_model.copy_params_from_fcn16s(fcn16s, n_class_changed=True)
if args.use_cuda:
print("Using CUDA.")
fcn_model = fcn_model.cuda()
if freeze_cnn_weights: # Freezing cnn weights.
for name, layer in fcn_model.named_children():
if name not in fcn_model.class_dependent_layers:
for param in layer.parameters():
param.requires_grad = False
return fcn_model
def settings_to_logfile(args):
now = datetime.datetime.now()
args.out = osp.join(here, 'logs', now.strftime('%Y%m%d_%H%M%S.%f'))
os.makedirs(args.out)
with open(osp.join(args.out, 'config.yaml'), 'w') as f:
yaml.safe_dump(args.__dict__, f, default_flow_style=False)
def prepare_cuda(args, torch_seed=42):
os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu)
use_cuda = torch.cuda.is_available()
torch.manual_seed(torch_seed)
if use_cuda:
torch.cuda.manual_seed(torch_seed)
return use_cuda
def argument_parsing():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument('-g', '--gpu', type=int, default=0, help='gpu img_id')
parser.add_argument('--resume', help='checkpoint path')
# configurations (same configuration as original work)
# https://github.com/shelhamer/fcn.berkeleyvision.org
parser.add_argument(
'--max-epoch', type=int, default=101, help='max epoch'
)
parser.add_argument(
'--k-fold', type=int, default=4, help='k for k-fold validation'
)
parser.add_argument(
'--lr', type=float, default=1.0e-7, help='learning rate',
)
parser.add_argument(
'--weight-decay', type=float, default=0.0005, help='weight decay',
)
parser.add_argument(
'--momentum', type=float, default=0.99, help='momentum',
)
parser.add_argument(
'--pretrained-model',
default=torchfcn.models.FCN16s.download(),
help='pretrained model of FCN16s',
)
return parser.parse_args()
if __name__ == '__main__':
main() | 0.667906 | 0.225246 |
import os
import time
from mindspore.common import set_seed
from src.dataset import data_to_mindrecord_byte_image
from src.model_utils.config import config
set_seed(1)
rank = 0
device_num = 1
def generate_coco_mindrecord():
""" train_fasterrcnn_ """
# It will generate mindrecord file in config.mindrecord_dir,
# and the file name is FasterRcnn.mindrecord0, 1, ... file_num.
prefix = "FasterRcnn.mindrecord"
mindrecord_dir = config.mindrecord_dir
mindrecord_file = os.path.join(mindrecord_dir, prefix)
print("CHECKING MINDRECORD FILES ...")
if rank == 0 and not os.path.exists(mindrecord_file):
if not os.path.isdir(mindrecord_dir):
os.makedirs(mindrecord_dir)
if config.dataset == "coco":
if os.path.isdir(config.coco_root):
if not os.path.exists(config.coco_root):
print("Please make sure config:coco_root is valid.")
raise ValueError(config.coco_root)
print("Create Mindrecord. It may take some time.")
data_to_mindrecord_byte_image(config, "coco", True, prefix, 1)
# data_to_mindrecord_byte_image(config, "coco", True, prefix)
print("Create Mindrecord Done, at {}".format(mindrecord_dir))
else:
print("coco_root not exits.")
else:
if os.path.isdir(config.image_dir) and os.path.exists(config.anno_path):
if not os.path.exists(config.image_dir):
print("Please make sure config:image_dir is valid.")
raise ValueError(config.image_dir)
print("Create Mindrecord. It may take some time.")
data_to_mindrecord_byte_image(config, "other", True, prefix)
print("Create Mindrecord Done, at {}".format(mindrecord_dir))
else:
print("image_dir or anno_path not exits.")
while not os.path.exists(mindrecord_file + ".db"):
time.sleep(5)
print("CHECKING MINDRECORD FILES DONE!")
if __name__ == '__main__':
generate_coco_mindrecord() | tests/st/fl/cross_silo_faster_rcnn/generate_mindrecord.py | import os
import time
from mindspore.common import set_seed
from src.dataset import data_to_mindrecord_byte_image
from src.model_utils.config import config
set_seed(1)
rank = 0
device_num = 1
def generate_coco_mindrecord():
""" train_fasterrcnn_ """
# It will generate mindrecord file in config.mindrecord_dir,
# and the file name is FasterRcnn.mindrecord0, 1, ... file_num.
prefix = "FasterRcnn.mindrecord"
mindrecord_dir = config.mindrecord_dir
mindrecord_file = os.path.join(mindrecord_dir, prefix)
print("CHECKING MINDRECORD FILES ...")
if rank == 0 and not os.path.exists(mindrecord_file):
if not os.path.isdir(mindrecord_dir):
os.makedirs(mindrecord_dir)
if config.dataset == "coco":
if os.path.isdir(config.coco_root):
if not os.path.exists(config.coco_root):
print("Please make sure config:coco_root is valid.")
raise ValueError(config.coco_root)
print("Create Mindrecord. It may take some time.")
data_to_mindrecord_byte_image(config, "coco", True, prefix, 1)
# data_to_mindrecord_byte_image(config, "coco", True, prefix)
print("Create Mindrecord Done, at {}".format(mindrecord_dir))
else:
print("coco_root not exits.")
else:
if os.path.isdir(config.image_dir) and os.path.exists(config.anno_path):
if not os.path.exists(config.image_dir):
print("Please make sure config:image_dir is valid.")
raise ValueError(config.image_dir)
print("Create Mindrecord. It may take some time.")
data_to_mindrecord_byte_image(config, "other", True, prefix)
print("Create Mindrecord Done, at {}".format(mindrecord_dir))
else:
print("image_dir or anno_path not exits.")
while not os.path.exists(mindrecord_file + ".db"):
time.sleep(5)
print("CHECKING MINDRECORD FILES DONE!")
if __name__ == '__main__':
generate_coco_mindrecord() | 0.181263 | 0.117699 |
import cv2
import glob
import keras
import numpy as np
import pandas as pd
from keras import backend as K
from keras.layers import MaxPooling2D, Conv2D, Flatten, Dense, Input, AlphaDropout, Dropout
from keras.models import Model
from settings import IMAGE_SIZE
df = pd.read_pickle('../data/all_obs.pkl', compression='gzip')
df['key'] = df.year + '-' + df.basin + '-' + df.storm
def split_df(train=0.7):
# np.random.seed(1234)
key_count = df.groupby('key', as_index=False).agg({'basin': 'count'}).sample(frac=1).reset_index(drop=True)
key_count['cum_sum'] = np.cumsum(key_count.basin)
total = key_count.cum_sum.iloc[-1]
total_train = train * total
first_bigger = (key_count.cum_sum >= total_train).idxmax()
mask_train = key_count.key[0:first_bigger + 1]
mask_test = key_count.key[first_bigger + 1:]
print('Storms Train', len(mask_train))
print('Storms Test', len(mask_test))
df_train = df[df.key.isin(mask_train)]
df_test = df[df.key.isin(mask_test)]
print('Total Train', len(df_train))
print('Total Test', len(df_test))
return df_train, df_test
df_train, df_test = split_df(0.95)
def read(file):
img = cv2.imread(file)
if img is None:
return None
return img.astype('float32') / 255.0
class Encoder:
"""
Given a large-scale regression problem with a neural network it can be helpful to instead use output bins.
E.g. when predicting the age of a person from an image, possible output bins might be np.arange(0,101,1).
This encoder transforms numerical values (i.e. the age) into a normal probability distribution over these bins
which then can be used as a target for multi-label classification.
This means that networks which use this encoder should use "binary_crossentropy" loss together with "sigmoid"
activation in the last layer.
Example:
enc = RegressionToClassificationEncoder(classes=np.arange(0,101,1))
y = [[35],[28],[16]]
y_transformed = enc.transform(y) # gives a shape (3 x 100) array
model = keras.models.Sequential()
...
model.add(enc.get_last_layer()) # Dense(100, activation='sigmoid')
model.compile(loss=keras.losses.binary_crossentropy, optimizer='Adam',
metrics=[enc.mean_absolute_error, enc.mean_squared_error])
model.fit(x_train, y_transformed)
y_test_transformed = model.predict(x_test)
y_test = enc.inverse_transform(y_test_transformed)
"""
def __init__(self, classes):
self.classes = classes
self.n_classes = len(self.classes)
self.std = 3 # np.std(self.classes)
self.mean = np.mean(self.classes)
self._class_tensor = K.constant(value=self.classes.reshape(-1, 1), dtype='float32')
print(self.classes)
def transform(self, vals):
vals = np.asarray(vals, dtype='float32')
n_vals = vals.shape[0]
e = np.zeros((n_vals, self.n_classes))
c2 = 2 * self.std * self.std
# c = 1.0 / np.sqrt(np.pi * c2)
for i, val in enumerate(vals):
r = np.exp(-1 * np.square(val - self.classes) / c2)
# r[r < K.epsilon()] = 0
e[i, :] = r
return e
def inverse_transform(self, vals):
return (vals / np.sum(vals, axis=1, keepdims=True)).dot(self.classes)
def _inv_tensor(self, y):
# y (n_images x 20)
# sum (n_images x 1)
# div (n_images x 20)
# dot (n_images x 20) x (20 x 1) -> (n_images x 1)
d = (y / K.sum(y, axis=1, keepdims=True))
z = K.dot(d, self._class_tensor)
e = K.reshape(z, (-1,))
return e
def mean_squared_error(self, y_true, y_pred):
return keras.losses.mean_squared_error(self._inv_tensor(y_true), self._inv_tensor(y_pred))
def mean_absolute_error(self, y_true, y_pred):
return keras.losses.mean_absolute_error(self._inv_tensor(y_true), self._inv_tensor(y_pred))
def get_last_layer(self):
return keras.layers.Dense(len(self.classes), activation='sigmoid')
ENC = Encoder(classes=np.arange(0, 201, 5))
# generator function for batches. randomly loads pairs of images from the full dataset
def gen(my_df, batch_size=128, which='both'):
while True:
all_img_ir = []
all_img_wv = []
all_labels = []
x = np.random.choice(np.arange(len(my_df)), batch_size)
for i in x:
f = my_df.iloc[i]
label = f['wind']
if which == 'both':
img_ir = read(f['file_ir'])
img_wv = read(f['file_wv'])
if img_wv is None or img_ir is None:
continue
all_img_ir.append(img_ir)
all_img_wv.append(img_wv)
all_labels.append(label)
elif which == 'ir':
img_ir = read(f['file_ir'])
if img_ir is None:
continue
all_img_ir.append(img_ir)
all_labels.append(label)
elif which == 'wv':
img_wv = read(f['file_wv'])
if img_wv is None:
continue
all_img_wv.append(img_wv)
all_labels.append(label)
IR, WV, Y = np.asarray(all_img_ir, dtype='float32'), np.asarray(all_img_wv, dtype='float32'), ENC.transform(
all_labels)
if which == 'both':
yield [IR, WV], Y
elif which == 'ir':
yield [IR], Y
elif which == 'wv':
yield [WV], Y
def base_cnn2():
m = keras.applications.InceptionV3(include_top=False, weights=None, input_shape=(IMAGE_SIZE, IMAGE_SIZE, 3),
pooling='max')
return m.inputs[0], m.outputs[0]
def base_cnn():
x = Input(shape=(IMAGE_SIZE, IMAGE_SIZE, 3))
i = Conv2D(64, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv1')(x)
i = MaxPooling2D()(i)
i = Conv2D(64, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv2')(i)
i = MaxPooling2D()(i)
i = Conv2D(128, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv3')(i)
i = MaxPooling2D()(i)
i = Conv2D(128, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv4')(i)
i = MaxPooling2D()(i)
i = Conv2D(256, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv5')(i)
i = MaxPooling2D()(i)
i = Conv2D(256, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv6')(i)
# i = Conv2D(512, (3, 3), activation='selu', kernel_initializer='lecun_normal')(i)
i = AlphaDropout(0.3)(i)
i = Flatten()(i)
return x, i
if True:
for m in [ 'wv']:
gen_train = gen(df_train, which=m)
gen_test = gen(df_test, which=m)
# architecture_combined = keras.layers.concatenate([model_ir, model_water_vapor])
inp, model = base_cnn2()
model = Dropout(0.4)(model)
architecture_combined = Dense(256, activation='relu')(model)
architecture_combined = Dense(ENC.n_classes, activation='sigmoid')(architecture_combined)
model_combined = Model(inputs=[inp], outputs=[architecture_combined])
file = glob.glob('../data/' + m + '*h5')
if len(file) > 0:
print('Loading %s' % file[0])
model_combined.load_weights(file[0], by_name=True)
for l in model_combined.layers:
if m not in l.name:
l.name = m + '_' + l.name
model_combined.compile(loss=keras.losses.binary_crossentropy,
optimizer=keras.optimizers.SGD(momentum=0.9, decay=1e-6),
metrics=[ENC.mean_squared_error, ENC.mean_absolute_error])
print('Parameters', model_combined.count_params())
cb = [
# keras.callbacks.EarlyStopping(min_delta=0.5, patience=3, monitor='mean_squared_error'),
keras.callbacks.ModelCheckpoint(save_weights_only=True, save_best_only=True,
filepath='../data/' + m + '_EPOCH={epoch:02d}_MAE={val_mean_absolute_error:.2f}.h5',
monitor='val_mean_absolute_error', mode='min'),
]
model_combined.fit_generator(gen_train, steps_per_epoch=15, epochs=3, callbacks=cb, validation_data=gen_test,
validation_steps=5)
else:
gen_train = gen(df_train, which='both')
gen_test = gen(df_test, which='both')
file = glob.glob('../data/ir*hdf5')
model_ir = keras.models.load_model(file[0], compile=False)
model_ir.save_weights('weights_ir')
for l in model_ir.layers:
l.name += 'IR'
inp_ir = model_ir.inputs[0]
out_ir = model_ir.layers[-2].output
file = glob.glob('../data/wv*hdf5')
model_wv = keras.models.load_model(file[0], compile=False)
for l in model_wv.layers:
l.name += 'WV'
inp_wv = model_wv.inputs[0]
out_wv = model_wv.layers[-2].output
architecture_combined = keras.layers.concatenate([out_ir, out_wv])
architecture_combined = Dense(ENC.n_classes, activation='sigmoid')(architecture_combined)
model_combined = Model(inputs=[inp_ir, inp_wv], outputs=[architecture_combined])
model_combined.compile(loss=keras.losses.binary_crossentropy,
optimizer=keras.optimizers.Adam(),
metrics=[ENC.mean_squared_error])
model_combined.summary()
cb = [
# keras.callbacks.EarlyStopping(min_delta=0.5, patience=3, monitor='mean_squared_error'),
keras.callbacks.ModelCheckpoint(
filepath='../data/combined_weights.{epoch:02d}-{val_mean_squared_error:.2f}.hdf5'),
]
model_combined.fit_generator(gen_train, steps_per_epoch=40, epochs=5, callbacks=cb, validation_data=gen_test,
validation_steps=5) | process/3_train_nn.py | import cv2
import glob
import keras
import numpy as np
import pandas as pd
from keras import backend as K
from keras.layers import MaxPooling2D, Conv2D, Flatten, Dense, Input, AlphaDropout, Dropout
from keras.models import Model
from settings import IMAGE_SIZE
df = pd.read_pickle('../data/all_obs.pkl', compression='gzip')
df['key'] = df.year + '-' + df.basin + '-' + df.storm
def split_df(train=0.7):
# np.random.seed(1234)
key_count = df.groupby('key', as_index=False).agg({'basin': 'count'}).sample(frac=1).reset_index(drop=True)
key_count['cum_sum'] = np.cumsum(key_count.basin)
total = key_count.cum_sum.iloc[-1]
total_train = train * total
first_bigger = (key_count.cum_sum >= total_train).idxmax()
mask_train = key_count.key[0:first_bigger + 1]
mask_test = key_count.key[first_bigger + 1:]
print('Storms Train', len(mask_train))
print('Storms Test', len(mask_test))
df_train = df[df.key.isin(mask_train)]
df_test = df[df.key.isin(mask_test)]
print('Total Train', len(df_train))
print('Total Test', len(df_test))
return df_train, df_test
df_train, df_test = split_df(0.95)
def read(file):
img = cv2.imread(file)
if img is None:
return None
return img.astype('float32') / 255.0
class Encoder:
"""
Given a large-scale regression problem with a neural network it can be helpful to instead use output bins.
E.g. when predicting the age of a person from an image, possible output bins might be np.arange(0,101,1).
This encoder transforms numerical values (i.e. the age) into a normal probability distribution over these bins
which then can be used as a target for multi-label classification.
This means that networks which use this encoder should use "binary_crossentropy" loss together with "sigmoid"
activation in the last layer.
Example:
enc = RegressionToClassificationEncoder(classes=np.arange(0,101,1))
y = [[35],[28],[16]]
y_transformed = enc.transform(y) # gives a shape (3 x 100) array
model = keras.models.Sequential()
...
model.add(enc.get_last_layer()) # Dense(100, activation='sigmoid')
model.compile(loss=keras.losses.binary_crossentropy, optimizer='Adam',
metrics=[enc.mean_absolute_error, enc.mean_squared_error])
model.fit(x_train, y_transformed)
y_test_transformed = model.predict(x_test)
y_test = enc.inverse_transform(y_test_transformed)
"""
def __init__(self, classes):
self.classes = classes
self.n_classes = len(self.classes)
self.std = 3 # np.std(self.classes)
self.mean = np.mean(self.classes)
self._class_tensor = K.constant(value=self.classes.reshape(-1, 1), dtype='float32')
print(self.classes)
def transform(self, vals):
vals = np.asarray(vals, dtype='float32')
n_vals = vals.shape[0]
e = np.zeros((n_vals, self.n_classes))
c2 = 2 * self.std * self.std
# c = 1.0 / np.sqrt(np.pi * c2)
for i, val in enumerate(vals):
r = np.exp(-1 * np.square(val - self.classes) / c2)
# r[r < K.epsilon()] = 0
e[i, :] = r
return e
def inverse_transform(self, vals):
return (vals / np.sum(vals, axis=1, keepdims=True)).dot(self.classes)
def _inv_tensor(self, y):
# y (n_images x 20)
# sum (n_images x 1)
# div (n_images x 20)
# dot (n_images x 20) x (20 x 1) -> (n_images x 1)
d = (y / K.sum(y, axis=1, keepdims=True))
z = K.dot(d, self._class_tensor)
e = K.reshape(z, (-1,))
return e
def mean_squared_error(self, y_true, y_pred):
return keras.losses.mean_squared_error(self._inv_tensor(y_true), self._inv_tensor(y_pred))
def mean_absolute_error(self, y_true, y_pred):
return keras.losses.mean_absolute_error(self._inv_tensor(y_true), self._inv_tensor(y_pred))
def get_last_layer(self):
return keras.layers.Dense(len(self.classes), activation='sigmoid')
ENC = Encoder(classes=np.arange(0, 201, 5))
# generator function for batches. randomly loads pairs of images from the full dataset
def gen(my_df, batch_size=128, which='both'):
while True:
all_img_ir = []
all_img_wv = []
all_labels = []
x = np.random.choice(np.arange(len(my_df)), batch_size)
for i in x:
f = my_df.iloc[i]
label = f['wind']
if which == 'both':
img_ir = read(f['file_ir'])
img_wv = read(f['file_wv'])
if img_wv is None or img_ir is None:
continue
all_img_ir.append(img_ir)
all_img_wv.append(img_wv)
all_labels.append(label)
elif which == 'ir':
img_ir = read(f['file_ir'])
if img_ir is None:
continue
all_img_ir.append(img_ir)
all_labels.append(label)
elif which == 'wv':
img_wv = read(f['file_wv'])
if img_wv is None:
continue
all_img_wv.append(img_wv)
all_labels.append(label)
IR, WV, Y = np.asarray(all_img_ir, dtype='float32'), np.asarray(all_img_wv, dtype='float32'), ENC.transform(
all_labels)
if which == 'both':
yield [IR, WV], Y
elif which == 'ir':
yield [IR], Y
elif which == 'wv':
yield [WV], Y
def base_cnn2():
m = keras.applications.InceptionV3(include_top=False, weights=None, input_shape=(IMAGE_SIZE, IMAGE_SIZE, 3),
pooling='max')
return m.inputs[0], m.outputs[0]
def base_cnn():
x = Input(shape=(IMAGE_SIZE, IMAGE_SIZE, 3))
i = Conv2D(64, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv1')(x)
i = MaxPooling2D()(i)
i = Conv2D(64, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv2')(i)
i = MaxPooling2D()(i)
i = Conv2D(128, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv3')(i)
i = MaxPooling2D()(i)
i = Conv2D(128, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv4')(i)
i = MaxPooling2D()(i)
i = Conv2D(256, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv5')(i)
i = MaxPooling2D()(i)
i = Conv2D(256, (3, 3), activation='selu', kernel_initializer='lecun_normal', name='conv6')(i)
# i = Conv2D(512, (3, 3), activation='selu', kernel_initializer='lecun_normal')(i)
i = AlphaDropout(0.3)(i)
i = Flatten()(i)
return x, i
if True:
for m in [ 'wv']:
gen_train = gen(df_train, which=m)
gen_test = gen(df_test, which=m)
# architecture_combined = keras.layers.concatenate([model_ir, model_water_vapor])
inp, model = base_cnn2()
model = Dropout(0.4)(model)
architecture_combined = Dense(256, activation='relu')(model)
architecture_combined = Dense(ENC.n_classes, activation='sigmoid')(architecture_combined)
model_combined = Model(inputs=[inp], outputs=[architecture_combined])
file = glob.glob('../data/' + m + '*h5')
if len(file) > 0:
print('Loading %s' % file[0])
model_combined.load_weights(file[0], by_name=True)
for l in model_combined.layers:
if m not in l.name:
l.name = m + '_' + l.name
model_combined.compile(loss=keras.losses.binary_crossentropy,
optimizer=keras.optimizers.SGD(momentum=0.9, decay=1e-6),
metrics=[ENC.mean_squared_error, ENC.mean_absolute_error])
print('Parameters', model_combined.count_params())
cb = [
# keras.callbacks.EarlyStopping(min_delta=0.5, patience=3, monitor='mean_squared_error'),
keras.callbacks.ModelCheckpoint(save_weights_only=True, save_best_only=True,
filepath='../data/' + m + '_EPOCH={epoch:02d}_MAE={val_mean_absolute_error:.2f}.h5',
monitor='val_mean_absolute_error', mode='min'),
]
model_combined.fit_generator(gen_train, steps_per_epoch=15, epochs=3, callbacks=cb, validation_data=gen_test,
validation_steps=5)
else:
gen_train = gen(df_train, which='both')
gen_test = gen(df_test, which='both')
file = glob.glob('../data/ir*hdf5')
model_ir = keras.models.load_model(file[0], compile=False)
model_ir.save_weights('weights_ir')
for l in model_ir.layers:
l.name += 'IR'
inp_ir = model_ir.inputs[0]
out_ir = model_ir.layers[-2].output
file = glob.glob('../data/wv*hdf5')
model_wv = keras.models.load_model(file[0], compile=False)
for l in model_wv.layers:
l.name += 'WV'
inp_wv = model_wv.inputs[0]
out_wv = model_wv.layers[-2].output
architecture_combined = keras.layers.concatenate([out_ir, out_wv])
architecture_combined = Dense(ENC.n_classes, activation='sigmoid')(architecture_combined)
model_combined = Model(inputs=[inp_ir, inp_wv], outputs=[architecture_combined])
model_combined.compile(loss=keras.losses.binary_crossentropy,
optimizer=keras.optimizers.Adam(),
metrics=[ENC.mean_squared_error])
model_combined.summary()
cb = [
# keras.callbacks.EarlyStopping(min_delta=0.5, patience=3, monitor='mean_squared_error'),
keras.callbacks.ModelCheckpoint(
filepath='../data/combined_weights.{epoch:02d}-{val_mean_squared_error:.2f}.hdf5'),
]
model_combined.fit_generator(gen_train, steps_per_epoch=40, epochs=5, callbacks=cb, validation_data=gen_test,
validation_steps=5) | 0.747524 | 0.443359 |
import json
from invenio_indexer.api import RecordIndexer
from invenio_pidstore.models import PersistentIdentifier
from oarepo_records_draft import current_drafts
from sample.config import SAMPLE_DRAFT_PID_TYPE
from sample.record import SampleDraftRecord
def test_search_records(app, db, client, community):
assert len(current_drafts.managed_records) == 1
response = client.post('/cesnet/records/draft/',
data=json.dumps({"title": "necooo", "_primary_community": "cesnet", "state": "published"}),
content_type='application/json')
assert response.status_code == 201
print(response.data)
response = client.post('/cesnet/records/draft/',
data=json.dumps({"title": "xyyyyyyyyyyyyy", "_primary_community": "cesnet", "state": "published"}),
content_type='application/json')
assert response.status_code == 201
response = client.get('/cesnet/records/draft/1', content_type='application/json')
assert response.status_code == 200
response = client.get('/cesnet/records/draft/2', content_type='application/json')
assert response.status_code == 200
record_pid = PersistentIdentifier.query. \
filter_by(pid_type=SAMPLE_DRAFT_PID_TYPE, pid_value='1').one()
record = SampleDraftRecord.get_record(id_=record_pid.object_uuid)
current_drafts.publish(record=record, record_pid=record_pid, require_valid=False)
record_pid = PersistentIdentifier.query. \
filter_by(pid_type=SAMPLE_DRAFT_PID_TYPE, pid_value='2').one()
record = SampleDraftRecord.get_record(id_=record_pid.object_uuid)
current_drafts.publish(record=record, record_pid=record_pid, require_valid=False)
indexer = RecordIndexer()
indexer.client.indices.refresh()
response = client.get('/cesnet/records/1', content_type='application/json')
assert response.status_code == 200
response = client.get('/cesnet/records/2', content_type='application/json')
assert response.status_code == 200
search_class = ''
for x in current_drafts.managed_records.records:
search_class = x.published.resolve('search_class')
ids = []
primary_keys = []
for x in search_class(index="sample-sample-v1.0.0").source(includes=['id', '_primary_community']):
ids.append(x.id)
primary_keys.append(x._primary_community)
assert ids == ['1', '2']
assert primary_keys == ['cesnet', 'cesnet']
url = "https://localhost:5000/sitemap.xml"
response = client.get(url)
print(response.data)
assert response.status_code == 200
assert 'http://localhost:5000/cesnet/records/1' in str(response.data)
assert 'http://localhost:5000/cesnet/records/2' in str(response.data)
assert 'http://localhost:5000/records/1' not in str(response.data) | tests/test_search.py | import json
from invenio_indexer.api import RecordIndexer
from invenio_pidstore.models import PersistentIdentifier
from oarepo_records_draft import current_drafts
from sample.config import SAMPLE_DRAFT_PID_TYPE
from sample.record import SampleDraftRecord
def test_search_records(app, db, client, community):
assert len(current_drafts.managed_records) == 1
response = client.post('/cesnet/records/draft/',
data=json.dumps({"title": "necooo", "_primary_community": "cesnet", "state": "published"}),
content_type='application/json')
assert response.status_code == 201
print(response.data)
response = client.post('/cesnet/records/draft/',
data=json.dumps({"title": "xyyyyyyyyyyyyy", "_primary_community": "cesnet", "state": "published"}),
content_type='application/json')
assert response.status_code == 201
response = client.get('/cesnet/records/draft/1', content_type='application/json')
assert response.status_code == 200
response = client.get('/cesnet/records/draft/2', content_type='application/json')
assert response.status_code == 200
record_pid = PersistentIdentifier.query. \
filter_by(pid_type=SAMPLE_DRAFT_PID_TYPE, pid_value='1').one()
record = SampleDraftRecord.get_record(id_=record_pid.object_uuid)
current_drafts.publish(record=record, record_pid=record_pid, require_valid=False)
record_pid = PersistentIdentifier.query. \
filter_by(pid_type=SAMPLE_DRAFT_PID_TYPE, pid_value='2').one()
record = SampleDraftRecord.get_record(id_=record_pid.object_uuid)
current_drafts.publish(record=record, record_pid=record_pid, require_valid=False)
indexer = RecordIndexer()
indexer.client.indices.refresh()
response = client.get('/cesnet/records/1', content_type='application/json')
assert response.status_code == 200
response = client.get('/cesnet/records/2', content_type='application/json')
assert response.status_code == 200
search_class = ''
for x in current_drafts.managed_records.records:
search_class = x.published.resolve('search_class')
ids = []
primary_keys = []
for x in search_class(index="sample-sample-v1.0.0").source(includes=['id', '_primary_community']):
ids.append(x.id)
primary_keys.append(x._primary_community)
assert ids == ['1', '2']
assert primary_keys == ['cesnet', 'cesnet']
url = "https://localhost:5000/sitemap.xml"
response = client.get(url)
print(response.data)
assert response.status_code == 200
assert 'http://localhost:5000/cesnet/records/1' in str(response.data)
assert 'http://localhost:5000/cesnet/records/2' in str(response.data)
assert 'http://localhost:5000/records/1' not in str(response.data) | 0.389663 | 0.266047 |
import os
import numpy as np
import torch
import shutil
import torchvision.transforms as transforms
from torch.autograd import Variable
class AvgrageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.avg = 0
self.sum = 0
self.cnt = 0
def update(self, val, n=1):
self.sum += val * n
self.cnt += n
self.avg = self.sum / self.cnt
def accuracy(output, target, topk=(1,)):
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0/batch_size))
return res
class Cutout(object):
def __init__(self, length):
self.length = length
def __call__(self, img):
h, w = img.size(1), img.size(2)
mask = np.ones((h, w), np.float32)
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
mask[y1: y2, x1: x2] = 0.
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img *= mask
return img
def _data_transforms_cifar10(args):
CIFAR_MEAN = [0.49139968, 0.48215827, 0.44653124]
CIFAR_STD = [0.24703233, 0.24348505, 0.26158768]
train_transform = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(CIFAR_MEAN, CIFAR_STD),
])
if args.cutout:
train_transform.transforms.append(Cutout(args.cutout_length))
valid_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(CIFAR_MEAN, CIFAR_STD),
])
return train_transform, valid_transform
def count_parameters_in_MB(model):
return np.sum(np.prod(v.size()) for v in model.parameters())/1e6
def save_checkpoint(state, is_best, save):
filename = os.path.join(save, 'checkpoint.pth.tar')
torch.save(state, filename)
if is_best:
best_filename = os.path.join(save, 'model_best.pth.tar')
shutil.copyfile(filename, best_filename)
def save(model, model_path):
torch.save(model.state_dict(), model_path)
def load(model, model_path, genotype):
pretrained_dict = torch.load(model_path)
model_dict = model.state_dict()
# keep only the weights for the specified genotype,
# and prune all the other weights from the MixedOps
#pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict}
edge_dict = {(0,2): 0, (0,3): 2, (0,4): 5, (0,5): 9, (1,2): 1, (1,3): 3, (1,4): 6, (1,5): 10, (2,3): 4, (2,4): 7, (3,4): 8, (2,5): 11, (3,5): 12, (4,5): 13}
for layer in range(8):
first_number = layer
for p in range(2):
if layer in [3, 6] and p == 0:
key = 'cells.{}.preprocess{}.conv_1.weight'.format(layer, p)
key = 'cells.{}.preprocess{}.conv_2.weight'.format(layer, p)
else:
key = 'cells.{}.preprocess{}.op.1.weight'.format(layer, p)
model_dict[key] = pretrained_dict[key]
if layer in [2, 5]:
gene = genotype.reduce
else:
gene = genotype.normal
for i in range(4):
for k in [2*i, 2*i + 1]:
op, j = gene[k]
second_number = edge_dict[(j, i + 2)]
if op == 'sep_conv_3x3':
third_number = 4
for h in [1, 2, 5, 6]:
key_model = 'cells.{}._ops.{}.op.{}.weight'.format(layer, k, h)
key_pretrained = 'cells.{}._ops.{}._ops.{}.op.{}.weight'.format(first_number, second_number, third_number, h)
model_dict[key_model] = pretrained_dict[key_pretrained]
elif op == 'max_pool_3x3':
third_number = 1
elif op == 'avg_pool_3x3':
third_number = 2
model.load_state_dict(model_dict)
def drop_path(x, drop_prob):
if drop_prob > 0.:
keep_prob = 1.-drop_prob
try:
mask = Variable(torch.cuda.FloatTensor(x.size(0), 1, 1, 1).bernoulli_(keep_prob))
except:
mask = Variable(torch.FloatTensor(x.size(0), 1, 1, 1).bernoulli_(keep_prob))
x.div_(keep_prob)
x.mul_(mask)
return x
def create_exp_dir(path, scripts_to_save=None):
import time, random
time.sleep(random.uniform(1, 2))
if not os.path.exists(path):
os.mkdir(path)
print('Experiment dir : {}'.format(path))
if scripts_to_save is not None:
os.mkdir(os.path.join(path, 'scripts'))
for script in scripts_to_save:
dst_file = os.path.join(path, 'scripts', os.path.basename(script))
shutil.copyfile(script, dst_file) | autoPyTorch/components/networks/image/darts/utils.py | import os
import numpy as np
import torch
import shutil
import torchvision.transforms as transforms
from torch.autograd import Variable
class AvgrageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.avg = 0
self.sum = 0
self.cnt = 0
def update(self, val, n=1):
self.sum += val * n
self.cnt += n
self.avg = self.sum / self.cnt
def accuracy(output, target, topk=(1,)):
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0/batch_size))
return res
class Cutout(object):
def __init__(self, length):
self.length = length
def __call__(self, img):
h, w = img.size(1), img.size(2)
mask = np.ones((h, w), np.float32)
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
mask[y1: y2, x1: x2] = 0.
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img *= mask
return img
def _data_transforms_cifar10(args):
CIFAR_MEAN = [0.49139968, 0.48215827, 0.44653124]
CIFAR_STD = [0.24703233, 0.24348505, 0.26158768]
train_transform = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(CIFAR_MEAN, CIFAR_STD),
])
if args.cutout:
train_transform.transforms.append(Cutout(args.cutout_length))
valid_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(CIFAR_MEAN, CIFAR_STD),
])
return train_transform, valid_transform
def count_parameters_in_MB(model):
return np.sum(np.prod(v.size()) for v in model.parameters())/1e6
def save_checkpoint(state, is_best, save):
filename = os.path.join(save, 'checkpoint.pth.tar')
torch.save(state, filename)
if is_best:
best_filename = os.path.join(save, 'model_best.pth.tar')
shutil.copyfile(filename, best_filename)
def save(model, model_path):
torch.save(model.state_dict(), model_path)
def load(model, model_path, genotype):
pretrained_dict = torch.load(model_path)
model_dict = model.state_dict()
# keep only the weights for the specified genotype,
# and prune all the other weights from the MixedOps
#pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict}
edge_dict = {(0,2): 0, (0,3): 2, (0,4): 5, (0,5): 9, (1,2): 1, (1,3): 3, (1,4): 6, (1,5): 10, (2,3): 4, (2,4): 7, (3,4): 8, (2,5): 11, (3,5): 12, (4,5): 13}
for layer in range(8):
first_number = layer
for p in range(2):
if layer in [3, 6] and p == 0:
key = 'cells.{}.preprocess{}.conv_1.weight'.format(layer, p)
key = 'cells.{}.preprocess{}.conv_2.weight'.format(layer, p)
else:
key = 'cells.{}.preprocess{}.op.1.weight'.format(layer, p)
model_dict[key] = pretrained_dict[key]
if layer in [2, 5]:
gene = genotype.reduce
else:
gene = genotype.normal
for i in range(4):
for k in [2*i, 2*i + 1]:
op, j = gene[k]
second_number = edge_dict[(j, i + 2)]
if op == 'sep_conv_3x3':
third_number = 4
for h in [1, 2, 5, 6]:
key_model = 'cells.{}._ops.{}.op.{}.weight'.format(layer, k, h)
key_pretrained = 'cells.{}._ops.{}._ops.{}.op.{}.weight'.format(first_number, second_number, third_number, h)
model_dict[key_model] = pretrained_dict[key_pretrained]
elif op == 'max_pool_3x3':
third_number = 1
elif op == 'avg_pool_3x3':
third_number = 2
model.load_state_dict(model_dict)
def drop_path(x, drop_prob):
if drop_prob > 0.:
keep_prob = 1.-drop_prob
try:
mask = Variable(torch.cuda.FloatTensor(x.size(0), 1, 1, 1).bernoulli_(keep_prob))
except:
mask = Variable(torch.FloatTensor(x.size(0), 1, 1, 1).bernoulli_(keep_prob))
x.div_(keep_prob)
x.mul_(mask)
return x
def create_exp_dir(path, scripts_to_save=None):
import time, random
time.sleep(random.uniform(1, 2))
if not os.path.exists(path):
os.mkdir(path)
print('Experiment dir : {}'.format(path))
if scripts_to_save is not None:
os.mkdir(os.path.join(path, 'scripts'))
for script in scripts_to_save:
dst_file = os.path.join(path, 'scripts', os.path.basename(script))
shutil.copyfile(script, dst_file) | 0.615319 | 0.421076 |
import argparse
import math
import progressbar
import time
import h5py
import numpy as np
DSET_NAME = 'dset'
def _calc_batches(count, batch_size):
return int(math.ceil(count / float(batch_size)))
def write(filename, batched=False, batch_size=32):
data_count = 10000
data_shape = (3, 256, 256)
data_type = np.uint8
total_shape = (data_count,) + data_shape
if batched:
print 'Writing in batches ...'
with h5py.File(filename, 'w') as f:
dset = f.create_dataset(
DSET_NAME,
shape=(batch_size,) + data_shape,
maxshape=total_shape,
dtype=data_type,
chunks=(batch_size,) + data_shape,
)
batches = _calc_batches(data_count, batch_size)
count = 0
pbar = progressbar.ProgressBar(
widgets=[
progressbar.Percentage(),
progressbar.Bar(),
],
maxval=data_count,
).start()
for i in xrange(batches):
with h5py.File(filename, 'a') as f:
dset = f[DSET_NAME]
start = i * batch_size
stop = (i + 1) * batch_size
if stop > data_count:
stop = data_count
length = stop - start
count += length
dset.resize(stop, axis=0)
for i in xrange(start, stop):
dset[i] = np.ones(data_shape, dtype=data_type) * (i % 255)
pbar.update(count)
pbar.finish()
assert count == data_count
else:
print 'Writing all at once ...'
with h5py.File(filename, 'w') as f:
dset = f.create_dataset(
DSET_NAME,
shape=total_shape,
maxshape=total_shape,
dtype=data_type,
chunks=(batch_size,) + data_shape,
)
for i in xrange(data_count):
dset[i] = np.ones(data_shape, dtype=data_type) * (i % 255)
def read(filename, batched=False, batch_size=32):
with h5py.File(filename, 'r') as f:
dset = f[DSET_NAME]
if batched:
print 'Reading in batches ...'
batches = _calc_batches(len(dset), batch_size)
count = 0
pbar = progressbar.ProgressBar(
widgets=[
progressbar.Percentage(),
progressbar.Bar(),
],
maxval=len(dset),
).start()
for i in xrange(batches):
start = i * batch_size
stop = (i + 1) * batch_size
if stop > len(dset):
stop = len(dset)
for j in xrange(start, stop):
data = dset[j]
assert data[0][0][0] == (j % 255)
count += 1
pbar.update(count)
pbar.finish()
assert count == len(dset), '%d != %d' % (count, len(dset))
else:
print 'Reading all at once ...'
data = dset[...]
for i in xrange(len(data)):
assert data[i][0][0][0] == (i % 255)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('action')
parser.add_argument('--batched', action='store_true')
parser.add_argument('-f', '--filename', default='data.h5')
args = parser.parse_args()
start_time = time.time()
if args.action == 'write':
write(args.filename, batched=args.batched)
elif args.action == 'read':
read(args.filename, batched=args.batched)
else:
raise ValueError('Unknown action "%s"' % args.action)
print 'Done in %f seconds.' % (time.time() - start_time,)
print | main.py |
import argparse
import math
import progressbar
import time
import h5py
import numpy as np
DSET_NAME = 'dset'
def _calc_batches(count, batch_size):
return int(math.ceil(count / float(batch_size)))
def write(filename, batched=False, batch_size=32):
data_count = 10000
data_shape = (3, 256, 256)
data_type = np.uint8
total_shape = (data_count,) + data_shape
if batched:
print 'Writing in batches ...'
with h5py.File(filename, 'w') as f:
dset = f.create_dataset(
DSET_NAME,
shape=(batch_size,) + data_shape,
maxshape=total_shape,
dtype=data_type,
chunks=(batch_size,) + data_shape,
)
batches = _calc_batches(data_count, batch_size)
count = 0
pbar = progressbar.ProgressBar(
widgets=[
progressbar.Percentage(),
progressbar.Bar(),
],
maxval=data_count,
).start()
for i in xrange(batches):
with h5py.File(filename, 'a') as f:
dset = f[DSET_NAME]
start = i * batch_size
stop = (i + 1) * batch_size
if stop > data_count:
stop = data_count
length = stop - start
count += length
dset.resize(stop, axis=0)
for i in xrange(start, stop):
dset[i] = np.ones(data_shape, dtype=data_type) * (i % 255)
pbar.update(count)
pbar.finish()
assert count == data_count
else:
print 'Writing all at once ...'
with h5py.File(filename, 'w') as f:
dset = f.create_dataset(
DSET_NAME,
shape=total_shape,
maxshape=total_shape,
dtype=data_type,
chunks=(batch_size,) + data_shape,
)
for i in xrange(data_count):
dset[i] = np.ones(data_shape, dtype=data_type) * (i % 255)
def read(filename, batched=False, batch_size=32):
with h5py.File(filename, 'r') as f:
dset = f[DSET_NAME]
if batched:
print 'Reading in batches ...'
batches = _calc_batches(len(dset), batch_size)
count = 0
pbar = progressbar.ProgressBar(
widgets=[
progressbar.Percentage(),
progressbar.Bar(),
],
maxval=len(dset),
).start()
for i in xrange(batches):
start = i * batch_size
stop = (i + 1) * batch_size
if stop > len(dset):
stop = len(dset)
for j in xrange(start, stop):
data = dset[j]
assert data[0][0][0] == (j % 255)
count += 1
pbar.update(count)
pbar.finish()
assert count == len(dset), '%d != %d' % (count, len(dset))
else:
print 'Reading all at once ...'
data = dset[...]
for i in xrange(len(data)):
assert data[i][0][0][0] == (i % 255)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('action')
parser.add_argument('--batched', action='store_true')
parser.add_argument('-f', '--filename', default='data.h5')
args = parser.parse_args()
start_time = time.time()
if args.action == 'write':
write(args.filename, batched=args.batched)
elif args.action == 'read':
read(args.filename, batched=args.batched)
else:
raise ValueError('Unknown action "%s"' % args.action)
print 'Done in %f seconds.' % (time.time() - start_time,)
print | 0.424651 | 0.225513 |
import json
import os
from pathlib import Path
import numpy as np
import torch
from logzero import logger
from showcase import constants
_root_dir = os.path.expanduser('~/.config/showcase')
def get_dataset_root(create_directory=True):
if create_directory:
try:
os.makedirs(_root_dir, exist_ok=True)
except OSError:
if not os.path.isdir(_root_dir):
raise
return _root_dir
def get_config_path():
path = Path(constants.CONFIG_PATH)
if path.exists():
return str(path)
else:
logger.warn('config.json does not exist in the showcase root dir: [{}]'.format(get_dataset_root()))
logger.warn('Please follow instructions in README')
raise FileNotFoundError
def get_config():
config = json.load(open(get_config_path()))
return config
def get_word_idx_path():
config = get_config()
path = Path(config['word_index_path'])
if path.exists():
return str(path)
else:
logger.warn('word index file does not exist in {}!'.format(path))
raise FileNotFoundError
def get_pos_idx_path():
config = get_config()
path = Path(config['pos_index_path'])
if path.exists():
return str(path)
else:
logger.warn('pos index file does not exist in {}!'.format(path))
raise FileNotFoundError
def get_model_path(model_type, ensemble=False):
config = get_config()
assert model_type in ('PREDICATE', 'ARGUMENT')
if model_type == 'PREDICATE':
path_dict = config['pred_model_path']
else:
path_dict = config['arg_model_path']
model_paths = sorted(path_dict.items())
if not ensemble:
model_paths = [model_paths[0]]
return model_paths
def load_word_idx(path_to_idx):
word2idx = {}
with open(path_to_idx, 'r') as fi:
for line in fi:
chunks = line.rstrip('\n').split('\t')
assert len(chunks) == 2
word = chunks[0]
idx = int(chunks[1])
word2idx[word] = idx
return word2idx
def load_pretrained_word_vec():
config = get_config()
path = str(Path(config['word2vec_word_path']))
embed_matrix = np.load(path)['arr_0']
return torch.from_numpy(embed_matrix).float()
def load_pretrained_pos_vec():
config = get_config()
path = str(Path(config['word2vec_pos_path']))
embed_matrix = np.load(path)['arr_0']
return torch.from_numpy(embed_matrix).float()
def read_stdin():
# RaSCのrequirementsに対応するためにsys.stdinでなくinput()を使う
while True:
try:
yield input()
except EOFError:
logger.debug('EOF Found. Exit...')
break
def predicate_info_to_pas(predicate_info):
predicate_indices = predicate_info.nonzero()[0].tolist()
pas = []
for idx in predicate_indices:
pas.append({
'p_id': idx,
'p_type': int(predicate_info[idx]),
'args': [3] * len(predicate_info)
})
return pas | showcase/utils/subfuncs.py | import json
import os
from pathlib import Path
import numpy as np
import torch
from logzero import logger
from showcase import constants
_root_dir = os.path.expanduser('~/.config/showcase')
def get_dataset_root(create_directory=True):
if create_directory:
try:
os.makedirs(_root_dir, exist_ok=True)
except OSError:
if not os.path.isdir(_root_dir):
raise
return _root_dir
def get_config_path():
path = Path(constants.CONFIG_PATH)
if path.exists():
return str(path)
else:
logger.warn('config.json does not exist in the showcase root dir: [{}]'.format(get_dataset_root()))
logger.warn('Please follow instructions in README')
raise FileNotFoundError
def get_config():
config = json.load(open(get_config_path()))
return config
def get_word_idx_path():
config = get_config()
path = Path(config['word_index_path'])
if path.exists():
return str(path)
else:
logger.warn('word index file does not exist in {}!'.format(path))
raise FileNotFoundError
def get_pos_idx_path():
config = get_config()
path = Path(config['pos_index_path'])
if path.exists():
return str(path)
else:
logger.warn('pos index file does not exist in {}!'.format(path))
raise FileNotFoundError
def get_model_path(model_type, ensemble=False):
config = get_config()
assert model_type in ('PREDICATE', 'ARGUMENT')
if model_type == 'PREDICATE':
path_dict = config['pred_model_path']
else:
path_dict = config['arg_model_path']
model_paths = sorted(path_dict.items())
if not ensemble:
model_paths = [model_paths[0]]
return model_paths
def load_word_idx(path_to_idx):
word2idx = {}
with open(path_to_idx, 'r') as fi:
for line in fi:
chunks = line.rstrip('\n').split('\t')
assert len(chunks) == 2
word = chunks[0]
idx = int(chunks[1])
word2idx[word] = idx
return word2idx
def load_pretrained_word_vec():
config = get_config()
path = str(Path(config['word2vec_word_path']))
embed_matrix = np.load(path)['arr_0']
return torch.from_numpy(embed_matrix).float()
def load_pretrained_pos_vec():
config = get_config()
path = str(Path(config['word2vec_pos_path']))
embed_matrix = np.load(path)['arr_0']
return torch.from_numpy(embed_matrix).float()
def read_stdin():
# RaSCのrequirementsに対応するためにsys.stdinでなくinput()を使う
while True:
try:
yield input()
except EOFError:
logger.debug('EOF Found. Exit...')
break
def predicate_info_to_pas(predicate_info):
predicate_indices = predicate_info.nonzero()[0].tolist()
pas = []
for idx in predicate_indices:
pas.append({
'p_id': idx,
'p_type': int(predicate_info[idx]),
'args': [3] * len(predicate_info)
})
return pas | 0.29747 | 0.145449 |
from nonebot import (
CommandSession, IntentCommand, NLPSession, on_command, on_natural_language,
permission
)
from coolqbot import bot
@on_command(
'whoami', aliases={'我是谁'}, permission=permission.GROUP, only_to_me=False
)
async def whoami(session: CommandSession):
msg = await session.bot.get_group_member_info(
user_id=session.ctx['sender']['user_id'],
group_id=session.ctx['group_id'],
no_cache=True
)
if msg['card']:
outName = msg['card']
else:
outName = msg['nickname']
await session.send(f'你是{outName}!')
@on_command(
'whereami', aliases={'我在哪'}, permission=permission.GROUP, only_to_me=False
)
async def whereami(session: CommandSession):
group_list = await session.bot.get_group_list()
msg = await session.bot.get_group_member_info(
user_id=session.ctx['sender']['user_id'],
group_id=session.ctx['group_id'],
no_cache=True
)
if msg['area']:
country = msg['area']
else:
country = '不知道不关心'
for group in group_list:
if group['group_id'] == session.ctx['group_id']:
await session.send(
f'\n你所在群:{group["group_name"]}\n你所在地区:{country}',
at_sender=True
)
@on_command('whoareyou', aliases={'你是谁'}, only_to_me=False)
async def whoareyou(session: CommandSession):
await session.send('我是可爱的小誓约!')
@on_command('whatiamdoing', aliases={'我在干什么', '我在做什么'}, only_to_me=False)
async def whatiamdoing(session: CommandSession):
await session.send('你在调戏我!!')
@on_natural_language(keywords={'我是谁'}, permission=permission.GROUP)
async def _(session: NLPSession):
return IntentCommand(90.0, 'whoami')
@on_natural_language(keywords={'我在哪'}, permission=permission.GROUP)
async def _(session: NLPSession):
return IntentCommand(90.0, 'whereami')
@on_natural_language(keywords={'你是谁'})
async def _(session: NLPSession):
return IntentCommand(90.0, 'whoareyou')
@on_natural_language(keywords={'我在干什么', '我在做什么'})
async def _(session: NLPSession):
return IntentCommand(90.0, 'whatiamdoing') | plugins_bak/basic.py | from nonebot import (
CommandSession, IntentCommand, NLPSession, on_command, on_natural_language,
permission
)
from coolqbot import bot
@on_command(
'whoami', aliases={'我是谁'}, permission=permission.GROUP, only_to_me=False
)
async def whoami(session: CommandSession):
msg = await session.bot.get_group_member_info(
user_id=session.ctx['sender']['user_id'],
group_id=session.ctx['group_id'],
no_cache=True
)
if msg['card']:
outName = msg['card']
else:
outName = msg['nickname']
await session.send(f'你是{outName}!')
@on_command(
'whereami', aliases={'我在哪'}, permission=permission.GROUP, only_to_me=False
)
async def whereami(session: CommandSession):
group_list = await session.bot.get_group_list()
msg = await session.bot.get_group_member_info(
user_id=session.ctx['sender']['user_id'],
group_id=session.ctx['group_id'],
no_cache=True
)
if msg['area']:
country = msg['area']
else:
country = '不知道不关心'
for group in group_list:
if group['group_id'] == session.ctx['group_id']:
await session.send(
f'\n你所在群:{group["group_name"]}\n你所在地区:{country}',
at_sender=True
)
@on_command('whoareyou', aliases={'你是谁'}, only_to_me=False)
async def whoareyou(session: CommandSession):
await session.send('我是可爱的小誓约!')
@on_command('whatiamdoing', aliases={'我在干什么', '我在做什么'}, only_to_me=False)
async def whatiamdoing(session: CommandSession):
await session.send('你在调戏我!!')
@on_natural_language(keywords={'我是谁'}, permission=permission.GROUP)
async def _(session: NLPSession):
return IntentCommand(90.0, 'whoami')
@on_natural_language(keywords={'我在哪'}, permission=permission.GROUP)
async def _(session: NLPSession):
return IntentCommand(90.0, 'whereami')
@on_natural_language(keywords={'你是谁'})
async def _(session: NLPSession):
return IntentCommand(90.0, 'whoareyou')
@on_natural_language(keywords={'我在干什么', '我在做什么'})
async def _(session: NLPSession):
return IntentCommand(90.0, 'whatiamdoing') | 0.246806 | 0.066266 |
from prim import Parser, syntax_tree, fmap, lift, mzero
from operator import and_, or_
from state import (
isParseError,
isParseSuccess,
parseSuccessTree,
setParseSuccessTree,
mergeErrorsMany,
inputConsumed
)
def _sequence(*parsers):
'''same as 'sequence', but less efficient (due to having to
ALWAYS go over all 'parsers'.
'''
def flatten(tree):
try:
return tree if len(tree) != 2 else flatten(tree[0]) + [tree[1]]
except TypeError:
return tree
return syntax_tree(flatten)(reduce(and_, parsers, mzero))
def sequence(*parsers):
'''Applies 'parsers' in sequence. Returns a list of values returned
by parsers.
'''
@Parser
def processor(state):
tree = []
for pr in parsers:
state = pr(state)
if isParseError(state):
return state
tree.append(parseSuccessTree(state))
return setParseSuccessTree(state, tree)
return processor
def _choice(*parsers):
'''same as 'choice', but less efficient (due to having to
ALWAYS go over all 'parsers'.
'''
try:
return reduce(or_, parsers)
except TypeError:
return mzero
def choice(*parsers):
'''Applies the parsers from 'parsers' in order, until one of them
succeeds. Returns the value of the succeeding parser. If none of the
parsers succeed, the error that occurres 'the farthest' into
the input, is returned.
'''
@Parser
def processor(state):
errors = []
for pr in parsers:
newstate = pr(state)
if isParseSuccess(newstate):
return newstate
# fail if any input has been consumed
if not pr.arbitraryLookAhead() and inputConsumed(newstate, state):
return newstate
errors.append(newstate)
return mergeErrorsMany(*errors)
return processor
def between(open, close, parser):
'''Parses 'open' -> 'parser' -> 'close'. Returns the value returned
by 'parser'.
'''
return open >> parser >= (lambda p: close >> lift(p))
def many1(parser):
'''Runs 'parser' one or more times. Returns a list of results
returned py 'parser'.
'''
return parser >= (lambda head: fmap(lambda tail: [head] + tail, many(parser)))
def manyR(parser):
'''same as 'many', but quickly overflows stack due to recursion limit'''
return (parser >= (lambda head: fmap(lambda tail: [head] + tail, manyR(parser)))) | mzero
def many(parser):
'''Runs 'parser' zero or more times. Returns a list of values
returned by 'parser'.
'''
@Parser
def processor(state):
tree = []
while True:
newstate = parser(state)
if isParseError(newstate):
break
tree.append(parseSuccessTree(newstate))
state = newstate
return setParseSuccessTree(state, tree) if not inputConsumed(newstate, state) else newstate
return processor
def option(default_value, parser):
'''Runs 'parser' and returns a value returned by it. If parsing failed,
returns 'default_value'.
'''
return parser | lift(default_value)
concat = lambda xs: sum(xs, [])
prepend = lambda head: lambda tail: [head] + tail
def sepBy1(parser, sep, keep=False):
'''Parses one or more occurrences of 'parser', separated by 'sep'.
If keep is True, returns a list of values returned by BOTH 'parser'
and 'sep'; otherwise, a list of values returned by 'parser'.
'''
rest = fmap(concat, many(sequence(sep, parser))) if keep else many(sep >> parser)
return parser >= (lambda h: fmap(prepend(h), rest))
def sepBy(parser, sep, keep=False):
'''Parses zero or more occurrences of 'parser', separated by 'sep'.
If keep is True, returns a list of values returned by BOTH 'parser'
and 'sep'; otherwise, a list of values returned by 'parser'.
'''
return option([], sepBy1(parser, sep, keep))
def endBy1(parser, sep, keep=False):
'''Parses one or more occurrences of 'parser', separated and ended
by 'sep'. If keep is True, returns a list of values returned by
BOTH 'parser' and 'sep'; otherwise, a list of values returned by
'parser'.
'''
if keep:
parseOne, transform = sequence(parser, sep), concat
else:
parseOne, transform = parser >= (lambda p: sep >> lift(p)), lambda x: x
return fmap(transform, many1(parseOne))
def endBy(parser, sep, keep=False):
'''Parses zero or more occurrences of 'parser', separated and ended
by 'sep'. If keep is True, returns a list of values returned by
BOTH 'parser' and 'sep'; otherwise, a list of values returned by
'parser'.
'''
return option([], endBy1(parser, sep, keep))
def skipMany1(parser):
'''Applies 'parser' one or more times, ignoring the result.'''
return parser >> skipMany(parser)
def skipMany(parser):
'''Applies 'parser' zero or more times, ignoring the result.'''
@Parser
def processor(state):
newstate = many(parser)(state)
if isParseSuccess(newstate):
newstate = setParseSuccessTree(newstate, None)
return newstate
return processor
def count(n, parser):
'''Applies 'parser' n times. Returns a list of n values returned by
'parser'. If n <= 0, returns [].
'''
return sequence(*[parser for _ in xrange(n)]) if n > 0 else mzero | parsefunc/combinators.py |
from prim import Parser, syntax_tree, fmap, lift, mzero
from operator import and_, or_
from state import (
isParseError,
isParseSuccess,
parseSuccessTree,
setParseSuccessTree,
mergeErrorsMany,
inputConsumed
)
def _sequence(*parsers):
'''same as 'sequence', but less efficient (due to having to
ALWAYS go over all 'parsers'.
'''
def flatten(tree):
try:
return tree if len(tree) != 2 else flatten(tree[0]) + [tree[1]]
except TypeError:
return tree
return syntax_tree(flatten)(reduce(and_, parsers, mzero))
def sequence(*parsers):
'''Applies 'parsers' in sequence. Returns a list of values returned
by parsers.
'''
@Parser
def processor(state):
tree = []
for pr in parsers:
state = pr(state)
if isParseError(state):
return state
tree.append(parseSuccessTree(state))
return setParseSuccessTree(state, tree)
return processor
def _choice(*parsers):
'''same as 'choice', but less efficient (due to having to
ALWAYS go over all 'parsers'.
'''
try:
return reduce(or_, parsers)
except TypeError:
return mzero
def choice(*parsers):
'''Applies the parsers from 'parsers' in order, until one of them
succeeds. Returns the value of the succeeding parser. If none of the
parsers succeed, the error that occurres 'the farthest' into
the input, is returned.
'''
@Parser
def processor(state):
errors = []
for pr in parsers:
newstate = pr(state)
if isParseSuccess(newstate):
return newstate
# fail if any input has been consumed
if not pr.arbitraryLookAhead() and inputConsumed(newstate, state):
return newstate
errors.append(newstate)
return mergeErrorsMany(*errors)
return processor
def between(open, close, parser):
'''Parses 'open' -> 'parser' -> 'close'. Returns the value returned
by 'parser'.
'''
return open >> parser >= (lambda p: close >> lift(p))
def many1(parser):
'''Runs 'parser' one or more times. Returns a list of results
returned py 'parser'.
'''
return parser >= (lambda head: fmap(lambda tail: [head] + tail, many(parser)))
def manyR(parser):
'''same as 'many', but quickly overflows stack due to recursion limit'''
return (parser >= (lambda head: fmap(lambda tail: [head] + tail, manyR(parser)))) | mzero
def many(parser):
'''Runs 'parser' zero or more times. Returns a list of values
returned by 'parser'.
'''
@Parser
def processor(state):
tree = []
while True:
newstate = parser(state)
if isParseError(newstate):
break
tree.append(parseSuccessTree(newstate))
state = newstate
return setParseSuccessTree(state, tree) if not inputConsumed(newstate, state) else newstate
return processor
def option(default_value, parser):
'''Runs 'parser' and returns a value returned by it. If parsing failed,
returns 'default_value'.
'''
return parser | lift(default_value)
concat = lambda xs: sum(xs, [])
prepend = lambda head: lambda tail: [head] + tail
def sepBy1(parser, sep, keep=False):
'''Parses one or more occurrences of 'parser', separated by 'sep'.
If keep is True, returns a list of values returned by BOTH 'parser'
and 'sep'; otherwise, a list of values returned by 'parser'.
'''
rest = fmap(concat, many(sequence(sep, parser))) if keep else many(sep >> parser)
return parser >= (lambda h: fmap(prepend(h), rest))
def sepBy(parser, sep, keep=False):
'''Parses zero or more occurrences of 'parser', separated by 'sep'.
If keep is True, returns a list of values returned by BOTH 'parser'
and 'sep'; otherwise, a list of values returned by 'parser'.
'''
return option([], sepBy1(parser, sep, keep))
def endBy1(parser, sep, keep=False):
'''Parses one or more occurrences of 'parser', separated and ended
by 'sep'. If keep is True, returns a list of values returned by
BOTH 'parser' and 'sep'; otherwise, a list of values returned by
'parser'.
'''
if keep:
parseOne, transform = sequence(parser, sep), concat
else:
parseOne, transform = parser >= (lambda p: sep >> lift(p)), lambda x: x
return fmap(transform, many1(parseOne))
def endBy(parser, sep, keep=False):
'''Parses zero or more occurrences of 'parser', separated and ended
by 'sep'. If keep is True, returns a list of values returned by
BOTH 'parser' and 'sep'; otherwise, a list of values returned by
'parser'.
'''
return option([], endBy1(parser, sep, keep))
def skipMany1(parser):
'''Applies 'parser' one or more times, ignoring the result.'''
return parser >> skipMany(parser)
def skipMany(parser):
'''Applies 'parser' zero or more times, ignoring the result.'''
@Parser
def processor(state):
newstate = many(parser)(state)
if isParseSuccess(newstate):
newstate = setParseSuccessTree(newstate, None)
return newstate
return processor
def count(n, parser):
'''Applies 'parser' n times. Returns a list of n values returned by
'parser'. If n <= 0, returns [].
'''
return sequence(*[parser for _ in xrange(n)]) if n > 0 else mzero | 0.414188 | 0.449332 |
import discord
from discord.ext import commands
from utils import MyContext
async def can_mute(ctx: MyContext) -> bool:
"""Check if someone can mute"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "mute")
else:
return ctx.channel.permissions_for(ctx.author).manage_roles
async def can_warn(ctx: MyContext) -> bool:
"""Check if someone can warn"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "warn")
else:
return ctx.channel.permissions_for(ctx.author).manage_roles
async def can_kick(ctx: MyContext) -> bool:
"""Check if someone can kick"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "kick")
else:
return ctx.channel.permissions_for(ctx.author).kick_members
async def can_ban(ctx: MyContext) -> bool:
"""Check if someone can ban"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "ban")
else:
return ctx.channel.permissions_for(ctx.author).ban_members
async def can_slowmode(ctx: MyContext) -> bool:
"""Check if someone can use slowmode"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "slowmode")
else:
return ctx.channel.permissions_for(ctx.author).manage_channels
async def can_clear(ctx: MyContext) -> bool:
"""Check if someone can use clear"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "clear")
else:
return ctx.channel.permissions_for(ctx.author).manage_messages
async def has_admin(ctx: MyContext) -> bool:
"""Check if someone can see the banlist"""
return ctx.channel.permissions_for(ctx.author).administrator or await ctx.bot.get_cog("Admin").check_if_god(ctx)
async def has_manage_msg(ctx: MyContext) -> bool:
"""... if someone can pin a message"""
return ctx.channel.permissions_for(ctx.author).manage_messages or await ctx.bot.get_cog("Admin").check_if_god(ctx)
async def has_manage_guild(ctx: MyContext) -> bool:
"""... if someone can manage the server"""
return ctx.channel.permissions_for(ctx.author).manage_guild or await ctx.bot.get_cog('Admin').check_if_god(ctx)
async def has_manage_roles(ctx: MyContext) -> bool:
"""... if someone can manage the roles"""
return ctx.channel.permissions_for(ctx.author).manage_roles or await ctx.bot.get_cog('Admin').check_if_god(ctx)
async def has_manage_nicknames(ctx: MyContext) -> bool:
"""... if someone can change nicknames"""
return ctx.channel.permissions_for(ctx.author).manage_nicknames or await ctx.bot.get_cog('Admin').check_if_god(ctx)
async def has_embed_links(ctx: MyContext) -> bool:
"""... if someone can send embeds"""
if not isinstance(ctx.author, discord.Member):
return True
return ctx.channel.permissions_for(ctx.author).embed_links or await ctx.bot.get_cog('Admin').check_if_god(ctx)
async def verify_role_exists(ctx: MyContext) -> bool:
"""Check if the verify role exists"""
if ctx.guild is None:
return False
roles_raw = await ctx.bot.get_config(ctx.guild.id, "verification_role")
if roles_raw is None:
return False
roles = [r for r in [ctx.guild.get_role(int(x)) for x in roles_raw.split(';') if x.isnumeric() and len(x) > 0] if r is not None]
return len(roles) > 0
async def database_connected(ctx: MyContext) -> bool:
"Check if the database is online and accessible"
if ctx.bot.database_online:
return True
raise commands.CommandError("Database offline")
async def is_fun_enabled(ctx: MyContext, self=None) -> bool:
if self is None:
if hasattr(ctx, 'bot'):
self = ctx.bot.get_cog("Fun")
else:
return False
if ctx.guild is None:
return True
if not self.bot.database_online and not ctx.guild.channels[0].permissions_for(ctx.author).manage_guild:
return False
ID = ctx.guild.id
if str(ID) not in self.fun_opt.keys():
fun = await self.bot.get_config(ID, "enable_fun")
self.fun_opt[str(ID)] = fun
else:
fun = self.fun_opt[str(ID)]
if fun is None:
fun = await self.bot.get_config(ID, "enable_fun")
if fun is not None:
self.fun_opt[str(ID)] = fun
return bool(fun)
async def is_a_cmd(msg: discord.Message, bot: commands.Bot) -> bool:
"Check if a message is a command"
pr = await bot.get_prefix(msg)
is_cmd = False
for p in pr:
is_cmd = is_cmd or msg.content.startswith(p)
return is_cmd
async def is_ttt_enabled(ctx: MyContext, self=None) -> bool:
if ctx.guild is None:
return True
mode = await ctx.bot.get_config(ctx.guild.id, "ttt_display")
return mode != 0 | fcts/checks.py | import discord
from discord.ext import commands
from utils import MyContext
async def can_mute(ctx: MyContext) -> bool:
"""Check if someone can mute"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "mute")
else:
return ctx.channel.permissions_for(ctx.author).manage_roles
async def can_warn(ctx: MyContext) -> bool:
"""Check if someone can warn"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "warn")
else:
return ctx.channel.permissions_for(ctx.author).manage_roles
async def can_kick(ctx: MyContext) -> bool:
"""Check if someone can kick"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "kick")
else:
return ctx.channel.permissions_for(ctx.author).kick_members
async def can_ban(ctx: MyContext) -> bool:
"""Check if someone can ban"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "ban")
else:
return ctx.channel.permissions_for(ctx.author).ban_members
async def can_slowmode(ctx: MyContext) -> bool:
"""Check if someone can use slowmode"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "slowmode")
else:
return ctx.channel.permissions_for(ctx.author).manage_channels
async def can_clear(ctx: MyContext) -> bool:
"""Check if someone can use clear"""
if ctx.bot.database_online:
return await ctx.bot.get_cog("Servers").staff_finder(ctx.author, "clear")
else:
return ctx.channel.permissions_for(ctx.author).manage_messages
async def has_admin(ctx: MyContext) -> bool:
"""Check if someone can see the banlist"""
return ctx.channel.permissions_for(ctx.author).administrator or await ctx.bot.get_cog("Admin").check_if_god(ctx)
async def has_manage_msg(ctx: MyContext) -> bool:
"""... if someone can pin a message"""
return ctx.channel.permissions_for(ctx.author).manage_messages or await ctx.bot.get_cog("Admin").check_if_god(ctx)
async def has_manage_guild(ctx: MyContext) -> bool:
"""... if someone can manage the server"""
return ctx.channel.permissions_for(ctx.author).manage_guild or await ctx.bot.get_cog('Admin').check_if_god(ctx)
async def has_manage_roles(ctx: MyContext) -> bool:
"""... if someone can manage the roles"""
return ctx.channel.permissions_for(ctx.author).manage_roles or await ctx.bot.get_cog('Admin').check_if_god(ctx)
async def has_manage_nicknames(ctx: MyContext) -> bool:
"""... if someone can change nicknames"""
return ctx.channel.permissions_for(ctx.author).manage_nicknames or await ctx.bot.get_cog('Admin').check_if_god(ctx)
async def has_embed_links(ctx: MyContext) -> bool:
"""... if someone can send embeds"""
if not isinstance(ctx.author, discord.Member):
return True
return ctx.channel.permissions_for(ctx.author).embed_links or await ctx.bot.get_cog('Admin').check_if_god(ctx)
async def verify_role_exists(ctx: MyContext) -> bool:
"""Check if the verify role exists"""
if ctx.guild is None:
return False
roles_raw = await ctx.bot.get_config(ctx.guild.id, "verification_role")
if roles_raw is None:
return False
roles = [r for r in [ctx.guild.get_role(int(x)) for x in roles_raw.split(';') if x.isnumeric() and len(x) > 0] if r is not None]
return len(roles) > 0
async def database_connected(ctx: MyContext) -> bool:
"Check if the database is online and accessible"
if ctx.bot.database_online:
return True
raise commands.CommandError("Database offline")
async def is_fun_enabled(ctx: MyContext, self=None) -> bool:
if self is None:
if hasattr(ctx, 'bot'):
self = ctx.bot.get_cog("Fun")
else:
return False
if ctx.guild is None:
return True
if not self.bot.database_online and not ctx.guild.channels[0].permissions_for(ctx.author).manage_guild:
return False
ID = ctx.guild.id
if str(ID) not in self.fun_opt.keys():
fun = await self.bot.get_config(ID, "enable_fun")
self.fun_opt[str(ID)] = fun
else:
fun = self.fun_opt[str(ID)]
if fun is None:
fun = await self.bot.get_config(ID, "enable_fun")
if fun is not None:
self.fun_opt[str(ID)] = fun
return bool(fun)
async def is_a_cmd(msg: discord.Message, bot: commands.Bot) -> bool:
"Check if a message is a command"
pr = await bot.get_prefix(msg)
is_cmd = False
for p in pr:
is_cmd = is_cmd or msg.content.startswith(p)
return is_cmd
async def is_ttt_enabled(ctx: MyContext, self=None) -> bool:
if ctx.guild is None:
return True
mode = await ctx.bot.get_config(ctx.guild.id, "ttt_display")
return mode != 0 | 0.463201 | 0.168686 |
import sys
import chardet
import argparse
from pyflowchart.flowchart import Flowchart
def detect_decode(file_content: bytes) -> str:
"""detect_decode detect the encoding of file_content,
then decode file_content on the detected encoding.
If the confidence of detect result is less then 0.9,
the UTF-8 will be used to decode. PyFlowchart is
designed to convert Python 3 codes into flowcharts.
And Python 3 is coding in UTF-8 in default. So only
if we can make sure the file is not UTF-8 encoded (
i.e. confidence > 0.9) than we will use that no
default encoding to decoded it.
Args:
file_content: bytes: binary file content to decode
Returns:
str: decoded content
"""
# detect encoding
detect_result = chardet.detect(file_content)
# print("DEBUG detect_result =", detect_result)
encoding = detect_result.get("encoding")
confidence = detect_result.get("confidence")
if confidence < 0.9:
encoding = "UTF-8"
# decode file content by detected encoding
try:
content = file_content.decode(encoding=encoding)
except TypeError: # TypeError: decode() argument 1 must be str, not None
content = file_content.decode()
return content
def main(code_file, field, inner, simplify):
# read file content: binary
file_content: bytes = code_file.read()
# detect encoding and decode file content by detected encoding
code = detect_decode(file_content)
flowchart = Flowchart.from_code(code, field=field, inner=inner, simplify=simplify)
print(flowchart.flowchart())
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Python code to flowchart.')
# code_file: open as binary, detect encoding and decode in main later
parser.add_argument('code_file', type=argparse.FileType('rb'))
parser.add_argument('-f', '--field', default="", type=str, help="field to draw flowchart. (e.g. Class.method)")
parser.add_argument('-i', '--inner', action="store_true", help="parse the body of field")
parser.add_argument('--no-simplify', action="store_false", help="do not simplify the one-line-body If/Loop")
args = parser.parse_args()
if not args.field: # field="", parse the whole file (ast Module), should use the body
args.inner = True
main(args.code_file, args.field, args.inner, args.no_simplify) | pyflowchart/__main__.py | import sys
import chardet
import argparse
from pyflowchart.flowchart import Flowchart
def detect_decode(file_content: bytes) -> str:
"""detect_decode detect the encoding of file_content,
then decode file_content on the detected encoding.
If the confidence of detect result is less then 0.9,
the UTF-8 will be used to decode. PyFlowchart is
designed to convert Python 3 codes into flowcharts.
And Python 3 is coding in UTF-8 in default. So only
if we can make sure the file is not UTF-8 encoded (
i.e. confidence > 0.9) than we will use that no
default encoding to decoded it.
Args:
file_content: bytes: binary file content to decode
Returns:
str: decoded content
"""
# detect encoding
detect_result = chardet.detect(file_content)
# print("DEBUG detect_result =", detect_result)
encoding = detect_result.get("encoding")
confidence = detect_result.get("confidence")
if confidence < 0.9:
encoding = "UTF-8"
# decode file content by detected encoding
try:
content = file_content.decode(encoding=encoding)
except TypeError: # TypeError: decode() argument 1 must be str, not None
content = file_content.decode()
return content
def main(code_file, field, inner, simplify):
# read file content: binary
file_content: bytes = code_file.read()
# detect encoding and decode file content by detected encoding
code = detect_decode(file_content)
flowchart = Flowchart.from_code(code, field=field, inner=inner, simplify=simplify)
print(flowchart.flowchart())
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Python code to flowchart.')
# code_file: open as binary, detect encoding and decode in main later
parser.add_argument('code_file', type=argparse.FileType('rb'))
parser.add_argument('-f', '--field', default="", type=str, help="field to draw flowchart. (e.g. Class.method)")
parser.add_argument('-i', '--inner', action="store_true", help="parse the body of field")
parser.add_argument('--no-simplify', action="store_false", help="do not simplify the one-line-body If/Loop")
args = parser.parse_args()
if not args.field: # field="", parse the whole file (ast Module), should use the body
args.inner = True
main(args.code_file, args.field, args.inner, args.no_simplify) | 0.441673 | 0.254787 |
import json
from datetime import date, datetime
import sqlalchemy.types as types
from dbbase import DB
db = DB(config=":memory:")
status_codes = [
[0, "New"],
[1, "Active"],
[2, "Suspended"],
[3, "Inactive"],
]
class StatusCodes(types.TypeDecorator):
"""
Status codes are entered as strings and converted to
integers when saved to the database.
"""
impl = types.Integer
def __init__(self, status_codes, **kw):
self.choices = dict(status_codes)
super(StatusCodes, self).__init__(**kw)
def process_bind_param(self, value, dialect):
"""called when saving to the database"""
return [k for k, v in self.choices.items() if v == value][0]
def process_result_value(self, value, dialect):
"""called when pulling from database"""
return self.choices[value]
class BigTable(db.Model):
"""Test class with a variety of column types"""
__tablename__ = "big_table"
id = db.Column(
db.Integer,
primary_key=True,
nullable=True,
comment="Primary key with a value assigned by the database",
info={"extra": "info here"},
)
status_id = db.Column(
StatusCodes(status_codes),
nullable=False,
comment="Choices from a list. String descriptors "
"change to integer upon saving. Enums without the headache.",
)
@db.orm.validates("status_id")
def _validate_id(self, key, id):
"""_validate_id
Args:
id: (int) : id must be in cls.choices
"""
if id not in dict(status_codes):
raise ValueError("{} is not valid".format(id))
return id
# nullable / not nullable
name1 = db.Column(
db.String(50), nullable=False, comment="This field is required"
)
name2 = db.Column(
db.String(50), nullable=True, comment="This field is not required",
)
# text default
name3 = db.Column(
db.Text,
default="test",
nullable=False,
comment="This field has a default value",
index=True,
)
item_length = db.Column(
db.Float, nullable=False, comment="This field is a float value"
)
item_amount = db.Column(db.Numeric(17, 6), default=0.0)
# integer and default
some_small_int = db.Column(
db.SmallInteger,
default=0,
nullable=False,
comment="This field is a small integer",
)
some_int = db.Column(
db.Integer,
default=0,
nullable=False,
comment="This field is a 32 bit integer",
)
some_big_int = db.Column(
db.BigInteger,
default=0,
nullable=False,
comment="This field is a big integer",
)
fk_id = db.Column(
db.Integer,
db.ForeignKey("other_table.id"),
nullable=False,
comment="This field is constrained by a foreign key on"
"another table",
)
today = db.Column(
db.Date,
doc="this is a test",
info={"test": "this is"},
comment="This field defaults to today, created at model level",
default=date.today,
)
created_at1 = db.Column(
db.DateTime,
default=datetime.now,
comment="This field defaults to now, created at model level",
)
created_at2 = db.Column(
db.DateTime,
server_default=db.func.now(),
comment="This field defaults to now, created at the server" "level",
)
update_time1 = db.Column(
db.DateTime,
onupdate=datetime.now,
comment="This field defaults only on updates",
)
update_time2 = db.Column(
db.DateTime,
server_onupdate=db.func.now(),
comment="This field defaults only on updates, but on the" "server",
)
unique_col = db.Column(
db.String(20),
unique=True,
comment="This must be a unique value in the database.",
)
# adapted from sqlalchemy docs
abc = db.Column(
db.String(20),
server_default="abc",
comment="This field defaults to text but on the server.",
)
index_value = db.Column(
db.Integer,
server_default=db.text("0"),
comment="This field defaults to an integer on the server.",
)
__table_args = (db.Index("ix_name1_name2", "name1", "name2", unique=True),)
class OtherTable(db.Model):
"""
This table is used solely to enable an option for a foreign key.
"""
__tablename__ = "other_table"
id = db.Column(db.Integer, primary_key=True, nullable=True)
# using JSON for a better output
print(json.dumps(db.doc_table(BigTable), indent=4)) | examples/table_documentation.py | import json
from datetime import date, datetime
import sqlalchemy.types as types
from dbbase import DB
db = DB(config=":memory:")
status_codes = [
[0, "New"],
[1, "Active"],
[2, "Suspended"],
[3, "Inactive"],
]
class StatusCodes(types.TypeDecorator):
"""
Status codes are entered as strings and converted to
integers when saved to the database.
"""
impl = types.Integer
def __init__(self, status_codes, **kw):
self.choices = dict(status_codes)
super(StatusCodes, self).__init__(**kw)
def process_bind_param(self, value, dialect):
"""called when saving to the database"""
return [k for k, v in self.choices.items() if v == value][0]
def process_result_value(self, value, dialect):
"""called when pulling from database"""
return self.choices[value]
class BigTable(db.Model):
"""Test class with a variety of column types"""
__tablename__ = "big_table"
id = db.Column(
db.Integer,
primary_key=True,
nullable=True,
comment="Primary key with a value assigned by the database",
info={"extra": "info here"},
)
status_id = db.Column(
StatusCodes(status_codes),
nullable=False,
comment="Choices from a list. String descriptors "
"change to integer upon saving. Enums without the headache.",
)
@db.orm.validates("status_id")
def _validate_id(self, key, id):
"""_validate_id
Args:
id: (int) : id must be in cls.choices
"""
if id not in dict(status_codes):
raise ValueError("{} is not valid".format(id))
return id
# nullable / not nullable
name1 = db.Column(
db.String(50), nullable=False, comment="This field is required"
)
name2 = db.Column(
db.String(50), nullable=True, comment="This field is not required",
)
# text default
name3 = db.Column(
db.Text,
default="test",
nullable=False,
comment="This field has a default value",
index=True,
)
item_length = db.Column(
db.Float, nullable=False, comment="This field is a float value"
)
item_amount = db.Column(db.Numeric(17, 6), default=0.0)
# integer and default
some_small_int = db.Column(
db.SmallInteger,
default=0,
nullable=False,
comment="This field is a small integer",
)
some_int = db.Column(
db.Integer,
default=0,
nullable=False,
comment="This field is a 32 bit integer",
)
some_big_int = db.Column(
db.BigInteger,
default=0,
nullable=False,
comment="This field is a big integer",
)
fk_id = db.Column(
db.Integer,
db.ForeignKey("other_table.id"),
nullable=False,
comment="This field is constrained by a foreign key on"
"another table",
)
today = db.Column(
db.Date,
doc="this is a test",
info={"test": "this is"},
comment="This field defaults to today, created at model level",
default=date.today,
)
created_at1 = db.Column(
db.DateTime,
default=datetime.now,
comment="This field defaults to now, created at model level",
)
created_at2 = db.Column(
db.DateTime,
server_default=db.func.now(),
comment="This field defaults to now, created at the server" "level",
)
update_time1 = db.Column(
db.DateTime,
onupdate=datetime.now,
comment="This field defaults only on updates",
)
update_time2 = db.Column(
db.DateTime,
server_onupdate=db.func.now(),
comment="This field defaults only on updates, but on the" "server",
)
unique_col = db.Column(
db.String(20),
unique=True,
comment="This must be a unique value in the database.",
)
# adapted from sqlalchemy docs
abc = db.Column(
db.String(20),
server_default="abc",
comment="This field defaults to text but on the server.",
)
index_value = db.Column(
db.Integer,
server_default=db.text("0"),
comment="This field defaults to an integer on the server.",
)
__table_args = (db.Index("ix_name1_name2", "name1", "name2", unique=True),)
class OtherTable(db.Model):
"""
This table is used solely to enable an option for a foreign key.
"""
__tablename__ = "other_table"
id = db.Column(db.Integer, primary_key=True, nullable=True)
# using JSON for a better output
print(json.dumps(db.doc_table(BigTable), indent=4)) | 0.652574 | 0.275978 |
import argparse
import os
import pathlib
import subprocess
def error(msg):
print(msg)
exit(1)
def is_windows():
if os.name == 'nt':
return True
else:
return False
def get_shader_list(project_path, asset_platform, shader_type, shader_platform, shadergen_path):
"""
Gets the shader list for a specific platform using ShaderCacheGen.
Right now the shader list will always output at <project-path>/user/Cache/Shaders
That will change when this is updated to take a destination path
"""
shadergen_path = os.path.join(shadergen_path, 'ShaderCacheGen')
if is_windows():
shadergen_path += '.exe'
command_args = [
shadergen_path,
f'--project-path={str(project_path)}'
'--GetShaderList',
'--ShadersPlatform={}'.format(shader_type),
'--TargetPlatform={}'.format(asset_platform)
]
if not os.path.isfile(shadergen_path):
error("[ERROR] ShaderCacheGen could not be found at {}".format(shadergen_path))
else:
command = ' '.join(command_args)
print('[INFO] get_shader_list: Running command - {}'.format(command))
try:
subprocess.check_call(command, shell=True)
except subprocess.CalledProcessError:
error('[ERROR] Failed to get the shader list for {}'.format(shader_type))
parser = argparse.ArgumentParser(description='Gets the shader list for a specific platform from the current shader compiler server')
parser.add_argument('-g', '--project-path', type=pathlib.Path, required=True, help="Path to the project")
parser.add_argument('asset-platform', type=str, help="The asset cache sub folder to use for shader generation")
parser.add_argument('shader-type', type=str, help="The shader type to use")
parser.add_argument('-p', '--shader_platform', type=str, required=False, default='', help="The target platform to generate shaders for.")
parser.add_argument('-s', '--shadergen_path', type=str, help="Path to where the the ShaderCacheGen executable lives")
args = parser.parse_args()
print('Getting shader list for {}'.format(args.asset_platform))
get_shader_list(args.project_path, args.asset_platform, args.shader_type, args.shader_platform, args.shadergen_path)
print('Finish getting shader list') | scripts/bundler/get_shader_list.py | import argparse
import os
import pathlib
import subprocess
def error(msg):
print(msg)
exit(1)
def is_windows():
if os.name == 'nt':
return True
else:
return False
def get_shader_list(project_path, asset_platform, shader_type, shader_platform, shadergen_path):
"""
Gets the shader list for a specific platform using ShaderCacheGen.
Right now the shader list will always output at <project-path>/user/Cache/Shaders
That will change when this is updated to take a destination path
"""
shadergen_path = os.path.join(shadergen_path, 'ShaderCacheGen')
if is_windows():
shadergen_path += '.exe'
command_args = [
shadergen_path,
f'--project-path={str(project_path)}'
'--GetShaderList',
'--ShadersPlatform={}'.format(shader_type),
'--TargetPlatform={}'.format(asset_platform)
]
if not os.path.isfile(shadergen_path):
error("[ERROR] ShaderCacheGen could not be found at {}".format(shadergen_path))
else:
command = ' '.join(command_args)
print('[INFO] get_shader_list: Running command - {}'.format(command))
try:
subprocess.check_call(command, shell=True)
except subprocess.CalledProcessError:
error('[ERROR] Failed to get the shader list for {}'.format(shader_type))
parser = argparse.ArgumentParser(description='Gets the shader list for a specific platform from the current shader compiler server')
parser.add_argument('-g', '--project-path', type=pathlib.Path, required=True, help="Path to the project")
parser.add_argument('asset-platform', type=str, help="The asset cache sub folder to use for shader generation")
parser.add_argument('shader-type', type=str, help="The shader type to use")
parser.add_argument('-p', '--shader_platform', type=str, required=False, default='', help="The target platform to generate shaders for.")
parser.add_argument('-s', '--shadergen_path', type=str, help="Path to where the the ShaderCacheGen executable lives")
args = parser.parse_args()
print('Getting shader list for {}'.format(args.asset_platform))
get_shader_list(args.project_path, args.asset_platform, args.shader_type, args.shader_platform, args.shadergen_path)
print('Finish getting shader list') | 0.321993 | 0.071429 |
import logging
from getpass import getpass
from argparse import ArgumentParser
from configparser import SafeConfigParser
import concurrent.futures
import os
import uuid
import datetime
import asyncio
from urllib.parse import urljoin
import slixmpp
import pyinotify
class EventHandler(pyinotify.ProcessEvent):
def my_init(self, xmppclient, linkto, baseurl, recipient, loop=None):
self.loop = loop if loop else asyncio.get_event_loop()
self.xmppclient = xmppclient
self.linkto = linkto
self.baseurl = baseurl
self.recipient = recipient
def process_IN_MOVED_TO(self, event):
datestr = datetime.datetime.now().strftime("%Y%m%d_%H.%M.%S")
uuidstr = uuid.uuid4().hex[:8]
extstr = os.path.splitext(event.pathname)[1]
filename = "%s%s%s" % (datestr, uuidstr, extstr)
os.symlink(event.pathname, os.path.join(self.linkto, filename))
self.xmppclient.send_message(mto=self.recipient,
mbody=urljoin(self.baseurl, filename),
mtype='chat')
class SendMsgBot(slixmpp.ClientXMPP):
"""
XMPP bot that will hold a connection open while watching for pyinotify events.
"""
def __init__(self, jid, password, auto_reconnect=False):
slixmpp.ClientXMPP.__init__(self, jid, password)
# The session_start event will be triggered when
# the bot establishes its connection with the server
# and the XML streams are ready for use. We want to
# listen for this event so that we we can initialize
# our roster.
self.add_event_handler("session_start", self.start)
self.add_event_handler("message", self.echo)
self.add_event_handler("disconnected", self.end)
self.end_session_on_disconnect = not auto_reconnect
def start(self, event):
"""
Process the session_start event.
Typical actions for the session_start event are
requesting the roster and broadcasting an initial
presence stanza.
Arguments:
event -- An empty dictionary. The session_start
event does not provide any additional
data.
"""
self.send_presence()
self.get_roster()
def end(self, event):
"""
Process the session_end event. In this case, reconnect unless
we were specifically told to "die".
"""
if not self.end_session_on_disconnect:
self.connect(address=('talk.google.com', 5222))
@asyncio.coroutine
def echo(self, msg):
if msg['type'] in ('chat', 'normal'):
if msg['body'] == 'forget on':
ret = yield from self.plugin['google']['nosave'].enable(jid=msg['from'].bare)
msg.reply("%s recording disabled" % msg['from']).send()
elif msg['body'] == 'forget off':
msg.reply("%s recording enabled" % msg['from']).send()
ret = yield from self.plugin['google']['nosave'].disable(jid=msg['from'].bare)
elif msg['body'] == 'die':
self.end_session_on_disconnect = True
self.disconnect()
else:
msg.reply("%s sent %s" % (msg["from"], msg["body"])).send()
if __name__ == '__main__':
# Setup the command line arguments.
parser = ArgumentParser(description=SendMsgBot.__doc__)
# Config file location
parser.add_argument('-c', '--conf', help='location of config file',
dest="conf", default='imgnotifybot.conf', metavar='FILE')
# Output verbosity options.
parser.add_argument("-q", "--quiet", help="set logging to ERROR",
action="store_const", dest="loglevel",
const=logging.ERROR, default=logging.INFO)
parser.add_argument("-d", "--debug", help="set logging to DEBUG",
action="store_const", dest="loglevel",
const=logging.DEBUG, default=logging.INFO)
args = parser.parse_args()
# Setup logging
logging.basicConfig(level=args.loglevel,
format='%(levelname)-8s %(message)s')
# Load config
config = SafeConfigParser()
config.read(args.conf)
# Initialize our XMPP bot and register plugins
xmpp = SendMsgBot(config['credentials']['jid'], config['credentials']['password'],
auto_reconnect=True)
xmpp.register_plugin('xep_0030') # Service Discovery
xmpp.register_plugin('xep_0199') # XMPP Ping
xmpp.register_plugin('google')
# Set a "breakpoint" in the event loop when we're ready to run messages
loop = asyncio.get_event_loop()
xmpp.connected_event_one = asyncio.Event()
callback_one = lambda _: xmpp.connected_event_one.set()
xmpp.add_event_handler('session_start', callback_one)
xmpp.add_event_handler('session_end', lambda _: loop.stop())
# Connect to the XMPP server and run until we're ready to send messages.
xmpp.connect(address=('talk.google.com', 5222))
loop.run_until_complete(xmpp.connected_event_one.wait())
# For each [watch.*] section in the config, register a pyinotify listener
for watcher in [dict(config[x]) for x in config.sections() if x.startswith("watch")]:
wm = pyinotify.WatchManager()
mask = pyinotify.IN_MOVED_TO # watched events
wm.add_watch(watcher["watchdir"], mask)
handler = EventHandler(xmppclient=xmpp, linkto=watcher["linkto"],
baseurl=watcher["baseurl"], recipient=watcher["msgto"],
loop=loop)
notifier = pyinotify.AsyncioNotifier(wm, loop, default_proc_fun=handler)
# Start turning the event crank
loop.run_forever() | imgnotifybot.py |
import logging
from getpass import getpass
from argparse import ArgumentParser
from configparser import SafeConfigParser
import concurrent.futures
import os
import uuid
import datetime
import asyncio
from urllib.parse import urljoin
import slixmpp
import pyinotify
class EventHandler(pyinotify.ProcessEvent):
def my_init(self, xmppclient, linkto, baseurl, recipient, loop=None):
self.loop = loop if loop else asyncio.get_event_loop()
self.xmppclient = xmppclient
self.linkto = linkto
self.baseurl = baseurl
self.recipient = recipient
def process_IN_MOVED_TO(self, event):
datestr = datetime.datetime.now().strftime("%Y%m%d_%H.%M.%S")
uuidstr = uuid.uuid4().hex[:8]
extstr = os.path.splitext(event.pathname)[1]
filename = "%s%s%s" % (datestr, uuidstr, extstr)
os.symlink(event.pathname, os.path.join(self.linkto, filename))
self.xmppclient.send_message(mto=self.recipient,
mbody=urljoin(self.baseurl, filename),
mtype='chat')
class SendMsgBot(slixmpp.ClientXMPP):
"""
XMPP bot that will hold a connection open while watching for pyinotify events.
"""
def __init__(self, jid, password, auto_reconnect=False):
slixmpp.ClientXMPP.__init__(self, jid, password)
# The session_start event will be triggered when
# the bot establishes its connection with the server
# and the XML streams are ready for use. We want to
# listen for this event so that we we can initialize
# our roster.
self.add_event_handler("session_start", self.start)
self.add_event_handler("message", self.echo)
self.add_event_handler("disconnected", self.end)
self.end_session_on_disconnect = not auto_reconnect
def start(self, event):
"""
Process the session_start event.
Typical actions for the session_start event are
requesting the roster and broadcasting an initial
presence stanza.
Arguments:
event -- An empty dictionary. The session_start
event does not provide any additional
data.
"""
self.send_presence()
self.get_roster()
def end(self, event):
"""
Process the session_end event. In this case, reconnect unless
we were specifically told to "die".
"""
if not self.end_session_on_disconnect:
self.connect(address=('talk.google.com', 5222))
@asyncio.coroutine
def echo(self, msg):
if msg['type'] in ('chat', 'normal'):
if msg['body'] == 'forget on':
ret = yield from self.plugin['google']['nosave'].enable(jid=msg['from'].bare)
msg.reply("%s recording disabled" % msg['from']).send()
elif msg['body'] == 'forget off':
msg.reply("%s recording enabled" % msg['from']).send()
ret = yield from self.plugin['google']['nosave'].disable(jid=msg['from'].bare)
elif msg['body'] == 'die':
self.end_session_on_disconnect = True
self.disconnect()
else:
msg.reply("%s sent %s" % (msg["from"], msg["body"])).send()
if __name__ == '__main__':
# Setup the command line arguments.
parser = ArgumentParser(description=SendMsgBot.__doc__)
# Config file location
parser.add_argument('-c', '--conf', help='location of config file',
dest="conf", default='imgnotifybot.conf', metavar='FILE')
# Output verbosity options.
parser.add_argument("-q", "--quiet", help="set logging to ERROR",
action="store_const", dest="loglevel",
const=logging.ERROR, default=logging.INFO)
parser.add_argument("-d", "--debug", help="set logging to DEBUG",
action="store_const", dest="loglevel",
const=logging.DEBUG, default=logging.INFO)
args = parser.parse_args()
# Setup logging
logging.basicConfig(level=args.loglevel,
format='%(levelname)-8s %(message)s')
# Load config
config = SafeConfigParser()
config.read(args.conf)
# Initialize our XMPP bot and register plugins
xmpp = SendMsgBot(config['credentials']['jid'], config['credentials']['password'],
auto_reconnect=True)
xmpp.register_plugin('xep_0030') # Service Discovery
xmpp.register_plugin('xep_0199') # XMPP Ping
xmpp.register_plugin('google')
# Set a "breakpoint" in the event loop when we're ready to run messages
loop = asyncio.get_event_loop()
xmpp.connected_event_one = asyncio.Event()
callback_one = lambda _: xmpp.connected_event_one.set()
xmpp.add_event_handler('session_start', callback_one)
xmpp.add_event_handler('session_end', lambda _: loop.stop())
# Connect to the XMPP server and run until we're ready to send messages.
xmpp.connect(address=('talk.google.com', 5222))
loop.run_until_complete(xmpp.connected_event_one.wait())
# For each [watch.*] section in the config, register a pyinotify listener
for watcher in [dict(config[x]) for x in config.sections() if x.startswith("watch")]:
wm = pyinotify.WatchManager()
mask = pyinotify.IN_MOVED_TO # watched events
wm.add_watch(watcher["watchdir"], mask)
handler = EventHandler(xmppclient=xmpp, linkto=watcher["linkto"],
baseurl=watcher["baseurl"], recipient=watcher["msgto"],
loop=loop)
notifier = pyinotify.AsyncioNotifier(wm, loop, default_proc_fun=handler)
# Start turning the event crank
loop.run_forever() | 0.50293 | 0.066327 |
def axisymmetric_file(geometry_type, geometry_parameters, Nrank, wavelength,
index, index_m, kb=None, conducting=False, Nparam=1,
use_ds=True, complex_plane=True, eps_z_re_im=0.95, Nint=200):
"""Create input file for axisymmetric particles
Arguments:
geometry_type (int) choose from 1 (spheroid), 2 (cylinder), 3 (rounded oblate cylinder)
geometry_parameters (list) geometric parameters ([radius along symmetry axius, radius along other axes])
Nrank (int) maximum number of multipoles
wavelength (float) wavelength of incident light
index (complex) index of refraction of the particle
index_m (float) index of refraction of the medium
kb (float) parameter of chirality (default: None [no chirality])
conducting (bool) if True, particle is conducting (default: False)
Nparam (int) number of smooth curves used in approximate surface (default: 1)
use_ds (bool) if True, use discrete sources (default: True)
complex_plane (bool) if True, distribute discrete sources in complex plane (default: True)
eps_z_re_im (float) parameter used to distribute discrete sources (default: 0.95)
Nint (int) number of points used in integration (default: 200)
"""
geometry_xy = geometry_parameters[0]/wavelength
geometry_z = geometry_parameters[1]/wavelength
wavelength = 1
if kb is None:
chiral = False
kb = 1
else:
chiral = True
file_str_template = """OptProp
{wavelength}
{index_m.real}
({index.real}, {index.imag})
Variables:
- wavelength - wavelength of the incident light in vacuo.
- ind_refMed - refractive index of the ambient medium.
- ind_refRel - relative refractive index of the particle.
MatProp
.{conducting}.
.{chiral}.
{kb}
Variables:
- perfectcond - if perfectcond = t, the particle is perfectly conducting.
- chiral - if chiral = t, the particle is optical active (chiral).
- kb - parameter of chirality.
GeomProp
.false.
'../GEOMFILES/prolate.fem'
{geometry_type}
2
{geometry_xy}
{geometry_z}
{Nparam}
1.0
1.0
.false.
Variables:
- FileGeom - if FileGeom = t, the particle geometry is supplied by the
input file FileFEM.
- FileFEM - name of the file containing the particle geometry.
- TypeGeom - parameter specifying the type of the particle geometry.
- Nsurf - number of surface parameters.
- surf(1) - surface parameter.
- ...
- surf(Nsurf
- Nparam - number of smooth curves forming the generatrix curve.
- anorm - characteristic length of the particle which is used to
normalize the differential scattering cross sections.
- Rcirc - characteristic length of the particle for computing Nrank.
- miror - if miror = t, the particle is mirror symmetric.
NOTE: FOR CHIRAL PARTICLES AND DISTRIBUTED SOURCES SET miror = f.
ConvTest
.false.
.false.
Variables:
- DoConvTest - if DoConvTest = t, the interactive convergence tests
over Nint and Nrank are performed.
- MishConvTest - if MishConvTest = t, estimates of Nint and Nrank are
computed with the convergence criterion proposed by
Mishchenko.
NOTE: IF THE PARTICLE IS OPTICAL ACTIVE (chiral = t) OR THE PARTICLE
GEOMETRY IS SUPPLIED BY THE FILE FileFEM (FileGeom = t), THE CODE SETS
MishConvTest = f. IN FACT, MISHCHENKOS CONVERGENCE TEST WILL BE
PERFORMED IF (DS = f AND DoConvTest = t AND chiral = f AND FileGeom = f),
OR (DS = t AND autGenDS = t AND DoConvTest = t AND chiral = f AND
FileGeom = f).
Sources
.{use_ds}.
.true.
Variables:
- DS - if DS = t, distributed sources are used for T-matrix
calculation.
- autGenDS - if autGenDS = t, the coordinates of the distributed sources
are generated by the code.
NOTE: IF THE PARTICLE GEOMETRY IS READ FROM FILE (FileGeom = t),
THE CODE SETS autgenDS = f.
SourcePosAut
.{complex_plane}.
{eps_z_re_im}
Variables:
- ComplexPlane - if ComplexPlane = t, the distributed sources are placed
in the complex plane.
- EpsZReIm - parameter controlling the distribution of the discrete
sources.
NOTE: THESE VARIABLES MUST BE PROVIDED IF (DS = t AND autgenDS = t).
NintNrank
{Nint}
{Nrank}
Variables:
- Nint - number of integration points in computing integrals over the
generatrix curve.
- Nrank - maximum expansion order.
NOTE: THESE VARIABLES MUST BE PROVIDED IF ((DoConvTest = f) OR
(DS = t AND autgenDS = f)).
Errors
5.e-2
5.e-2
1.e-2
4
50
Variables:
- epsNint - error tolerance for the integration test.
- epsNrank - error tolerance for the expansion order test.
- epsMrank - error tolerance for the azimuthal order test.
- dNint - number of division points for the integration test
and Mishchenkos convergence test.
- dNintMrank - number of division points for azimuthal mode
calculation.
Tmat
'../TMATFILES/tmatrix.dat'
Variable:
- FileTmat - name of the file to which the T matrix is written.
PrintProgress
.false.
Variable:
- PrnProgress - if PrnProgress = t, the progress of calculation
is printed.
"""
return file_str_template.format(geometry_type=geometry_type, geometry_xy=geometry_xy, geometry_z=geometry_z,
Nrank=Nrank, wavelength=wavelength, index=index/index_m, index_m=index_m, chiral=str(chiral).lower(),
kb=kb, conducting=str(conducting).lower(), Nparam=Nparam, use_ds=str(use_ds).lower(),
complex_plane=str(complex_plane).lower(), eps_z_re_im=eps_z_re_im, Nint=Nint) | miepy/tmatrix/axisymmetric_file.py | def axisymmetric_file(geometry_type, geometry_parameters, Nrank, wavelength,
index, index_m, kb=None, conducting=False, Nparam=1,
use_ds=True, complex_plane=True, eps_z_re_im=0.95, Nint=200):
"""Create input file for axisymmetric particles
Arguments:
geometry_type (int) choose from 1 (spheroid), 2 (cylinder), 3 (rounded oblate cylinder)
geometry_parameters (list) geometric parameters ([radius along symmetry axius, radius along other axes])
Nrank (int) maximum number of multipoles
wavelength (float) wavelength of incident light
index (complex) index of refraction of the particle
index_m (float) index of refraction of the medium
kb (float) parameter of chirality (default: None [no chirality])
conducting (bool) if True, particle is conducting (default: False)
Nparam (int) number of smooth curves used in approximate surface (default: 1)
use_ds (bool) if True, use discrete sources (default: True)
complex_plane (bool) if True, distribute discrete sources in complex plane (default: True)
eps_z_re_im (float) parameter used to distribute discrete sources (default: 0.95)
Nint (int) number of points used in integration (default: 200)
"""
geometry_xy = geometry_parameters[0]/wavelength
geometry_z = geometry_parameters[1]/wavelength
wavelength = 1
if kb is None:
chiral = False
kb = 1
else:
chiral = True
file_str_template = """OptProp
{wavelength}
{index_m.real}
({index.real}, {index.imag})
Variables:
- wavelength - wavelength of the incident light in vacuo.
- ind_refMed - refractive index of the ambient medium.
- ind_refRel - relative refractive index of the particle.
MatProp
.{conducting}.
.{chiral}.
{kb}
Variables:
- perfectcond - if perfectcond = t, the particle is perfectly conducting.
- chiral - if chiral = t, the particle is optical active (chiral).
- kb - parameter of chirality.
GeomProp
.false.
'../GEOMFILES/prolate.fem'
{geometry_type}
2
{geometry_xy}
{geometry_z}
{Nparam}
1.0
1.0
.false.
Variables:
- FileGeom - if FileGeom = t, the particle geometry is supplied by the
input file FileFEM.
- FileFEM - name of the file containing the particle geometry.
- TypeGeom - parameter specifying the type of the particle geometry.
- Nsurf - number of surface parameters.
- surf(1) - surface parameter.
- ...
- surf(Nsurf
- Nparam - number of smooth curves forming the generatrix curve.
- anorm - characteristic length of the particle which is used to
normalize the differential scattering cross sections.
- Rcirc - characteristic length of the particle for computing Nrank.
- miror - if miror = t, the particle is mirror symmetric.
NOTE: FOR CHIRAL PARTICLES AND DISTRIBUTED SOURCES SET miror = f.
ConvTest
.false.
.false.
Variables:
- DoConvTest - if DoConvTest = t, the interactive convergence tests
over Nint and Nrank are performed.
- MishConvTest - if MishConvTest = t, estimates of Nint and Nrank are
computed with the convergence criterion proposed by
Mishchenko.
NOTE: IF THE PARTICLE IS OPTICAL ACTIVE (chiral = t) OR THE PARTICLE
GEOMETRY IS SUPPLIED BY THE FILE FileFEM (FileGeom = t), THE CODE SETS
MishConvTest = f. IN FACT, MISHCHENKOS CONVERGENCE TEST WILL BE
PERFORMED IF (DS = f AND DoConvTest = t AND chiral = f AND FileGeom = f),
OR (DS = t AND autGenDS = t AND DoConvTest = t AND chiral = f AND
FileGeom = f).
Sources
.{use_ds}.
.true.
Variables:
- DS - if DS = t, distributed sources are used for T-matrix
calculation.
- autGenDS - if autGenDS = t, the coordinates of the distributed sources
are generated by the code.
NOTE: IF THE PARTICLE GEOMETRY IS READ FROM FILE (FileGeom = t),
THE CODE SETS autgenDS = f.
SourcePosAut
.{complex_plane}.
{eps_z_re_im}
Variables:
- ComplexPlane - if ComplexPlane = t, the distributed sources are placed
in the complex plane.
- EpsZReIm - parameter controlling the distribution of the discrete
sources.
NOTE: THESE VARIABLES MUST BE PROVIDED IF (DS = t AND autgenDS = t).
NintNrank
{Nint}
{Nrank}
Variables:
- Nint - number of integration points in computing integrals over the
generatrix curve.
- Nrank - maximum expansion order.
NOTE: THESE VARIABLES MUST BE PROVIDED IF ((DoConvTest = f) OR
(DS = t AND autgenDS = f)).
Errors
5.e-2
5.e-2
1.e-2
4
50
Variables:
- epsNint - error tolerance for the integration test.
- epsNrank - error tolerance for the expansion order test.
- epsMrank - error tolerance for the azimuthal order test.
- dNint - number of division points for the integration test
and Mishchenkos convergence test.
- dNintMrank - number of division points for azimuthal mode
calculation.
Tmat
'../TMATFILES/tmatrix.dat'
Variable:
- FileTmat - name of the file to which the T matrix is written.
PrintProgress
.false.
Variable:
- PrnProgress - if PrnProgress = t, the progress of calculation
is printed.
"""
return file_str_template.format(geometry_type=geometry_type, geometry_xy=geometry_xy, geometry_z=geometry_z,
Nrank=Nrank, wavelength=wavelength, index=index/index_m, index_m=index_m, chiral=str(chiral).lower(),
kb=kb, conducting=str(conducting).lower(), Nparam=Nparam, use_ds=str(use_ds).lower(),
complex_plane=str(complex_plane).lower(), eps_z_re_im=eps_z_re_im, Nint=Nint) | 0.809351 | 0.596609 |
import argparse
import datetime
import json
import os
import pdfminer.high_level
import sys
import wget
import sys
def query_yes_no(question, default="yes"):
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
parser = argparse.ArgumentParser(description='Convert MK PDF to a human readable format.')
parser.add_argument("-y", "--year", type=int, default=datetime.datetime.now().year,
help="Change year in URL because sometimes it is wrong")
parser.add_argument('--urls', nargs='*',
help='PDF URLs in case you wish to add them manually for some reason')
parser.add_argument('-j', '--json',
help='Generate JSON output', dest='json', action='store_true')
parser.set_defaults(json=False)
args = vars(parser.parse_args())
files = []
if not args['urls']:
def get_pdf_url(year, period):
if datetime.datetime.now().month < 10:
year -= 1
return f'http://poincare.matf.bg.ac.rs/~kmiljan/raspored/RASPORED_ISPITA_{period}_{year}.pdf'
for period in ['JAN', 'FEB', 'JUN', 'JUL', 'SEP', 'OKT']:
files.append(get_pdf_url(args['year'], period))
pdfs = []
print('Downloading PDFs...')
print(files)
for f in files:
overwrite = True
filename = f.split('/')[-1]
if os.path.exists(filename):
print(f'File exists: {f}')
overwrite = query_yes_no('Overwrite?', default='no')
if overwrite:
wget.download(f, out=filename)
pdfs.append(filename)
print('Downloaded PDFs.')
import conv
print('Parsing PDFs ...')
for pdf in pdfs:
print(f'======== {pdf} ========')
text = pdfminer.high_level.extract_text(pdf)
schedule = conv.convert(text)
if args['json']:
print(conv.schedule_to_json(schedule))
else:
conv.print_schedule(schedule)
print(f'=======================')
print('Done! Have a nice day.') | mkparser.py | import argparse
import datetime
import json
import os
import pdfminer.high_level
import sys
import wget
import sys
def query_yes_no(question, default="yes"):
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
parser = argparse.ArgumentParser(description='Convert MK PDF to a human readable format.')
parser.add_argument("-y", "--year", type=int, default=datetime.datetime.now().year,
help="Change year in URL because sometimes it is wrong")
parser.add_argument('--urls', nargs='*',
help='PDF URLs in case you wish to add them manually for some reason')
parser.add_argument('-j', '--json',
help='Generate JSON output', dest='json', action='store_true')
parser.set_defaults(json=False)
args = vars(parser.parse_args())
files = []
if not args['urls']:
def get_pdf_url(year, period):
if datetime.datetime.now().month < 10:
year -= 1
return f'http://poincare.matf.bg.ac.rs/~kmiljan/raspored/RASPORED_ISPITA_{period}_{year}.pdf'
for period in ['JAN', 'FEB', 'JUN', 'JUL', 'SEP', 'OKT']:
files.append(get_pdf_url(args['year'], period))
pdfs = []
print('Downloading PDFs...')
print(files)
for f in files:
overwrite = True
filename = f.split('/')[-1]
if os.path.exists(filename):
print(f'File exists: {f}')
overwrite = query_yes_no('Overwrite?', default='no')
if overwrite:
wget.download(f, out=filename)
pdfs.append(filename)
print('Downloaded PDFs.')
import conv
print('Parsing PDFs ...')
for pdf in pdfs:
print(f'======== {pdf} ========')
text = pdfminer.high_level.extract_text(pdf)
schedule = conv.convert(text)
if args['json']:
print(conv.schedule_to_json(schedule))
else:
conv.print_schedule(schedule)
print(f'=======================')
print('Done! Have a nice day.') | 0.244814 | 0.091423 |
import time
import sched
import threading
from synapse.config import config
from synapse.logger import logger
@logger
class SynSched(threading.Thread):
def __init__(self):
self.logger.debug("Initializing the scheduler...")
threading.Thread.__init__(self, name="SCHEDULER")
# Start the scheduler
self.scheduler = sched.scheduler(time.time, lambda x: time.sleep(.1))
def run(self):
self.scheduler.run()
self.logger.debug("Scheduler started...")
def add_job(self, job, interval, actionargs=()):
self.logger.debug("Adding job '%s' to scheduler every %d seconds" %
(job, interval))
self._periodic(self.scheduler, interval, job, actionargs=actionargs)
def update_job(self, job, interval, actionargs=()):
job_name = actionargs[0].__name__
existing_job = self.get_job(job_name)
if existing_job is None:
self.add_job(job, interval, actionargs)
elif (interval != existing_job.argument[1] or
actionargs != existing_job.argument[3]):
self.scheduler.cancel(existing_job)
self.add_job(job, interval, actionargs)
def get_job(self, job_name):
job = None
for event in self.scheduler.queue:
if len(event.argument[3]):
if job_name == event.argument[3][0].__name__:
job = event
else:
if job_name == event.argument[2].__name__:
job = event
return job
def _periodic(self, scheduler, interval, action, actionargs=()):
args = (scheduler, interval, action, actionargs)
scheduler.enter(interval, 1, self._periodic, args)
try:
action(*actionargs)
except NotImplementedError:
pass
except Exception as err:
self.logger.error("Could not run job \'%s\' (%s)", action, err)
def shutdown(self):
"""Shuts down the scheduler."""
self.logger.debug("Canceling scheduled events")
for event in self.scheduler.queue:
self.scheduler.cancel(event) | synapse/scheduler.py | import time
import sched
import threading
from synapse.config import config
from synapse.logger import logger
@logger
class SynSched(threading.Thread):
def __init__(self):
self.logger.debug("Initializing the scheduler...")
threading.Thread.__init__(self, name="SCHEDULER")
# Start the scheduler
self.scheduler = sched.scheduler(time.time, lambda x: time.sleep(.1))
def run(self):
self.scheduler.run()
self.logger.debug("Scheduler started...")
def add_job(self, job, interval, actionargs=()):
self.logger.debug("Adding job '%s' to scheduler every %d seconds" %
(job, interval))
self._periodic(self.scheduler, interval, job, actionargs=actionargs)
def update_job(self, job, interval, actionargs=()):
job_name = actionargs[0].__name__
existing_job = self.get_job(job_name)
if existing_job is None:
self.add_job(job, interval, actionargs)
elif (interval != existing_job.argument[1] or
actionargs != existing_job.argument[3]):
self.scheduler.cancel(existing_job)
self.add_job(job, interval, actionargs)
def get_job(self, job_name):
job = None
for event in self.scheduler.queue:
if len(event.argument[3]):
if job_name == event.argument[3][0].__name__:
job = event
else:
if job_name == event.argument[2].__name__:
job = event
return job
def _periodic(self, scheduler, interval, action, actionargs=()):
args = (scheduler, interval, action, actionargs)
scheduler.enter(interval, 1, self._periodic, args)
try:
action(*actionargs)
except NotImplementedError:
pass
except Exception as err:
self.logger.error("Could not run job \'%s\' (%s)", action, err)
def shutdown(self):
"""Shuts down the scheduler."""
self.logger.debug("Canceling scheduled events")
for event in self.scheduler.queue:
self.scheduler.cancel(event) | 0.447702 | 0.075927 |
from typing import Dict, Callable, Union
import random
from ..calc.combat_data import AttackData
from ..calc import stats
def _(_: AttackData) -> None:
pass
def fe7_silencer(atk: AttackData) -> None:
"""
With a crit/2% chance to activate, deals damage equal to the opponent's remaining HP.
Prevents other skills from activating on this attack.
Actually simply runs the hit calculation using 2RN, runs the crit calculation, and if Silencer
would activate, sets them both to 100%
"""
if atk.skillable:
avg_roll = (random.randint(0, 99) + random.randint(0, 99)) // 2
if avg_roll < atk.hit - atk.avo and random.randint(0, 99) < (atk.crit - atk.ddg) / 2:
# silencer activates
atk.dmg = atk.against.current_hp
atk.hit = 999
atk.crit = 999
atk.tags.append("silencer")
atk.skillable = False
return None
def fe7_devil(atk: AttackData) -> None:
"""
Rolls a random number [0-99], and if the number is less than (31 - Unit's Luck),
then sets the attacker as the defender for their own attack. Does not change any
other numbers (Hit, Crit, Prt, etc.) remain the same).
Also, does not return any particular message.
"""
luk = stats.calc_luk(atk.by)
if random.randint(0, 99) < (31 - luk):
atk.against = atk.by
return None
def brave(atk: AttackData) -> None:
"""
Adds another attack after this one, identical to it.
"""
if "brave" not in atk.tags:
atk.append(AttackData(
by=atk.by,
against=atk.against,
with_weapon=atk.with_weapon,
against_weapon=atk.against_weapon,
atk=atk.atk,
prt_rsl=atk.prt_rsl,
hit=atk.hit,
avo=atk.avo,
crit=atk.crit,
ddg=atk.ddg,
skillable=atk.skillable,
counterattack=atk.counterattack,
followup=atk.followup,
tags=["brave"] + atk.tags[:]
))
before_attack: Dict[Union[str, None], Callable[[AttackData], Union[Dict, None]]] = {
'brave': brave,
'fe7_devil': fe7_devil,
'fe7_silencer': fe7_silencer,
None: _,
} | FEArena/feaapi/api/skills/before_attack.py | from typing import Dict, Callable, Union
import random
from ..calc.combat_data import AttackData
from ..calc import stats
def _(_: AttackData) -> None:
pass
def fe7_silencer(atk: AttackData) -> None:
"""
With a crit/2% chance to activate, deals damage equal to the opponent's remaining HP.
Prevents other skills from activating on this attack.
Actually simply runs the hit calculation using 2RN, runs the crit calculation, and if Silencer
would activate, sets them both to 100%
"""
if atk.skillable:
avg_roll = (random.randint(0, 99) + random.randint(0, 99)) // 2
if avg_roll < atk.hit - atk.avo and random.randint(0, 99) < (atk.crit - atk.ddg) / 2:
# silencer activates
atk.dmg = atk.against.current_hp
atk.hit = 999
atk.crit = 999
atk.tags.append("silencer")
atk.skillable = False
return None
def fe7_devil(atk: AttackData) -> None:
"""
Rolls a random number [0-99], and if the number is less than (31 - Unit's Luck),
then sets the attacker as the defender for their own attack. Does not change any
other numbers (Hit, Crit, Prt, etc.) remain the same).
Also, does not return any particular message.
"""
luk = stats.calc_luk(atk.by)
if random.randint(0, 99) < (31 - luk):
atk.against = atk.by
return None
def brave(atk: AttackData) -> None:
"""
Adds another attack after this one, identical to it.
"""
if "brave" not in atk.tags:
atk.append(AttackData(
by=atk.by,
against=atk.against,
with_weapon=atk.with_weapon,
against_weapon=atk.against_weapon,
atk=atk.atk,
prt_rsl=atk.prt_rsl,
hit=atk.hit,
avo=atk.avo,
crit=atk.crit,
ddg=atk.ddg,
skillable=atk.skillable,
counterattack=atk.counterattack,
followup=atk.followup,
tags=["brave"] + atk.tags[:]
))
before_attack: Dict[Union[str, None], Callable[[AttackData], Union[Dict, None]]] = {
'brave': brave,
'fe7_devil': fe7_devil,
'fe7_silencer': fe7_silencer,
None: _,
} | 0.67405 | 0.4436 |
import pandas as pd
import numpy as np
import tensorflow as tf
class SlidingWindow(tf.keras.utils.Sequence):
def __init__(
self,
df: pd.DataFrame,
window_size: int,
target_features: list[str],
feature_names: list[str] = None,
horizon_size: int = 1,
jump: int = 0,
stride: int = 1,
):
if feature_names is None:
feature_names = list(df.columns)
self.x = df[feature_names].to_numpy()
self.y = df[target_features].to_numpy()
self.window_size = window_size
self.horizon_size = horizon_size
total = len(self.x) - horizon_size + 1
offset = window_size + jump
self.x_idx = list(range(0, total - offset, stride))
self.y_idx = list(range(offset, total, stride))
def __len__(self):
return len(self.x_idx)
def __getitem__(self, idx):
X = self.x[self.x_idx[idx] : self.x_idx[idx] + self.window_size]
y = self.y[self.y_idx[idx] : self.y_idx[idx] + self.horizon_size]
return X, y
def tabularize_dataframe(
df: pd.DataFrame,
window_size: int,
target_features: list[str],
feature_names: list[str] = None,
horizon_size: int = 1,
jump: int = 0,
stride: int = 1,
):
"""Tabularizes a Pandas dataframe.
Args:
df (pd.DataFrame): The dataframe.
window_size (int): Window size determines the number of rows included in each entry of X.
target_features (list[str]): A list of target names
feature_names (list[str], optional): A list of feature names. Defaults to None.
Returns:
(np.ndarray, np.ndarray): A tuple of (X,y)
"""
sw = SlidingWindow(
df,
window_size,
target_features,
feature_names,
horizon_size,
jump,
stride,
)
X, y = [], []
for _x, _y in sw:
X.append(_x)
y.append(_y)
return np.array(X), np.array(y)
if __name__ == "__main__":
from dataset import load_dataset
df = load_dataset("1D")
X, y = tabularize_dataframe(df, 10, ["active power"], ["active power"], 2)
print(X.shape, y.shape)
print(X[-1], y[-1]) | windpower/utils/tabularize.py | import pandas as pd
import numpy as np
import tensorflow as tf
class SlidingWindow(tf.keras.utils.Sequence):
def __init__(
self,
df: pd.DataFrame,
window_size: int,
target_features: list[str],
feature_names: list[str] = None,
horizon_size: int = 1,
jump: int = 0,
stride: int = 1,
):
if feature_names is None:
feature_names = list(df.columns)
self.x = df[feature_names].to_numpy()
self.y = df[target_features].to_numpy()
self.window_size = window_size
self.horizon_size = horizon_size
total = len(self.x) - horizon_size + 1
offset = window_size + jump
self.x_idx = list(range(0, total - offset, stride))
self.y_idx = list(range(offset, total, stride))
def __len__(self):
return len(self.x_idx)
def __getitem__(self, idx):
X = self.x[self.x_idx[idx] : self.x_idx[idx] + self.window_size]
y = self.y[self.y_idx[idx] : self.y_idx[idx] + self.horizon_size]
return X, y
def tabularize_dataframe(
df: pd.DataFrame,
window_size: int,
target_features: list[str],
feature_names: list[str] = None,
horizon_size: int = 1,
jump: int = 0,
stride: int = 1,
):
"""Tabularizes a Pandas dataframe.
Args:
df (pd.DataFrame): The dataframe.
window_size (int): Window size determines the number of rows included in each entry of X.
target_features (list[str]): A list of target names
feature_names (list[str], optional): A list of feature names. Defaults to None.
Returns:
(np.ndarray, np.ndarray): A tuple of (X,y)
"""
sw = SlidingWindow(
df,
window_size,
target_features,
feature_names,
horizon_size,
jump,
stride,
)
X, y = [], []
for _x, _y in sw:
X.append(_x)
y.append(_y)
return np.array(X), np.array(y)
if __name__ == "__main__":
from dataset import load_dataset
df = load_dataset("1D")
X, y = tabularize_dataframe(df, 10, ["active power"], ["active power"], 2)
print(X.shape, y.shape)
print(X[-1], y[-1]) | 0.778776 | 0.400955 |
# XKCD password generator
import argparse
import collections
import os.path
import random
# Parse the command line options.
parser = argparse.ArgumentParser(description="XKCD password generator https://xkcd.com/936/")
parser.add_argument("-d", "--dictionary", default="en", help="Dictionary to use")
parser.add_argument("--min-words", type=int, default=4, help="Minimum number of words to use")
parser.add_argument("--max-words", type=int, default=4, help="Maximum number of words to use")
parser.add_argument("--min-length", type=int, default=4, help="Minimum length of the words to use")
parser.add_argument("--max-length", type=int, default=8, help="Maximum length of the words to use")
args = parser.parse_args()
if not os.path.exists(args.dictionary):
if not "." in args.dictionary:
args.dictionary = args.dictionary + ".txt"
if not os.path.exists(args.dictionary):
if not os.path.sep in args.dictionary:
args.dictionary = os.path.abspath(os.path.join(os.path.dirname(__file__), args.dictionary))
if not os.path.exists(args.dictionary):
parser.error("Could not find dictionary: %s" % args.dictionary)
print args
# Load the dictionary, skipping words of the wrong length.
min_length = args.min_length
max_length = args.max_length
dictionary = collections.defaultdict(list)
with open(args.dictionary, "rU") as fd:
for line in fd:
for word in line.strip().split(" "):
word = word.strip().lower()
if "'" in word:
continue
length = len(word)
if min_length <= length <= max_length:
dictionary[length].append(word)
# Pick the random words for the password.
words = []
lengths = dictionary.keys()
lengths.sort()
count = random.randint(args.min_words, args.max_words)
while count > 0:
length = random.choice(lengths)
word = random.choice(dictionary[length])
words.append(word)
count = count - 1
# Print out the chosen password.
print " ".join(words) | network/xkcd/xkcd.py |
# XKCD password generator
import argparse
import collections
import os.path
import random
# Parse the command line options.
parser = argparse.ArgumentParser(description="XKCD password generator https://xkcd.com/936/")
parser.add_argument("-d", "--dictionary", default="en", help="Dictionary to use")
parser.add_argument("--min-words", type=int, default=4, help="Minimum number of words to use")
parser.add_argument("--max-words", type=int, default=4, help="Maximum number of words to use")
parser.add_argument("--min-length", type=int, default=4, help="Minimum length of the words to use")
parser.add_argument("--max-length", type=int, default=8, help="Maximum length of the words to use")
args = parser.parse_args()
if not os.path.exists(args.dictionary):
if not "." in args.dictionary:
args.dictionary = args.dictionary + ".txt"
if not os.path.exists(args.dictionary):
if not os.path.sep in args.dictionary:
args.dictionary = os.path.abspath(os.path.join(os.path.dirname(__file__), args.dictionary))
if not os.path.exists(args.dictionary):
parser.error("Could not find dictionary: %s" % args.dictionary)
print args
# Load the dictionary, skipping words of the wrong length.
min_length = args.min_length
max_length = args.max_length
dictionary = collections.defaultdict(list)
with open(args.dictionary, "rU") as fd:
for line in fd:
for word in line.strip().split(" "):
word = word.strip().lower()
if "'" in word:
continue
length = len(word)
if min_length <= length <= max_length:
dictionary[length].append(word)
# Pick the random words for the password.
words = []
lengths = dictionary.keys()
lengths.sort()
count = random.randint(args.min_words, args.max_words)
while count > 0:
length = random.choice(lengths)
word = random.choice(dictionary[length])
words.append(word)
count = count - 1
# Print out the chosen password.
print " ".join(words) | 0.502686 | 0.064359 |
import numpy as np
class Distance_metrics:
"""
Calculate distance between each corresponding points
of two arrays using different distance metrics
"""
def Eucledian_Distance(X1,X2):
""""
Returns the list of eucledian distance
between two corresponding points of
two arrays
PARAMETERS
==========
X1:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
X2:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
RETURNS
=========
distance:list
Returns the list of eucledian distance
between two corresponding points of
two arrays
"""
distance=[]
for i in range(len(X1)):
single=0
single=np.sum((X1[i]-X2[i])**2)
distance.append(np.sqrt(single))
return(distance)
def Manhattan_Distance(X1,X2):
""""
Returns the list of manhattan distance
between two corresponding points of
two arrays
PARAMETERS
==========
X1:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
X2:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
RETURNS
=========
distance:list
Returns the list of manhattan distance
between two corresponding points of
two arrays
"""
distance=[]
for i in range(len(X1)):
single=0
single=np.sum(abs(X1[i]-X2[i]))
distance.append(single)
return(distance)
def Chebyshev_Distance(X1,X2):
""""
Returns the list of chebyshev distance
between two corresponding points of
two arrays
PARAMETERS
==========
X1:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
X2:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
RETURNS
=========
distance:list
Returns the list of chebyshev distance
between two corresponding points of
two arrays
"""
distance=[]
for i in range(len(X1)):
single=0
single=np.sum(max(X1[i]-X2[i]))
distance.append(single)
return(distance)
def Minkowski_Distance(X1,X2,p):
""""
Returns list of minkowski distance of order 'p'
between two corresponding vectors of
two arrays
PARAMETERS
==========
X1:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
X2:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
p:float
input order value between 1 and 2 inclusive
RETURNS
=========
distance:list
Returns the list of minkowski distance
between two corresponding vectors of
two arrays
"""
distance=[]
for i in range(len(X1)):
single=0
single=np.sum((abs(X1[i]-X2[i]))**p)
distance.append((single)**(1/p))
return(distance)
def WMinkowski_Distance(X1,X2,p,W):
""""
Returns list of weighted minkowski distance of order 'p'
between two corresponding vectors weighted by W of
two arrays
PARAMETERS
==========
X1:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
X2:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
p:float
input order value between 1 and 2 inclusive
W:array(dtype=int,axis=1)
input 1 dimensional array
RETURNS
=========
distance:list
Returns the list of weighted minkowski distance
between two corresponding vectors of
two arrays
"""
distance=[]
for i in range(len(X1)):
single=0
single=np.sum((abs(W*(X1[i]-X2[i])))**p)
distance.append((single)**(1/p))
return(distance) | MLlib/distance_metrics.py | import numpy as np
class Distance_metrics:
"""
Calculate distance between each corresponding points
of two arrays using different distance metrics
"""
def Eucledian_Distance(X1,X2):
""""
Returns the list of eucledian distance
between two corresponding points of
two arrays
PARAMETERS
==========
X1:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
X2:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
RETURNS
=========
distance:list
Returns the list of eucledian distance
between two corresponding points of
two arrays
"""
distance=[]
for i in range(len(X1)):
single=0
single=np.sum((X1[i]-X2[i])**2)
distance.append(np.sqrt(single))
return(distance)
def Manhattan_Distance(X1,X2):
""""
Returns the list of manhattan distance
between two corresponding points of
two arrays
PARAMETERS
==========
X1:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
X2:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
RETURNS
=========
distance:list
Returns the list of manhattan distance
between two corresponding points of
two arrays
"""
distance=[]
for i in range(len(X1)):
single=0
single=np.sum(abs(X1[i]-X2[i]))
distance.append(single)
return(distance)
def Chebyshev_Distance(X1,X2):
""""
Returns the list of chebyshev distance
between two corresponding points of
two arrays
PARAMETERS
==========
X1:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
X2:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
RETURNS
=========
distance:list
Returns the list of chebyshev distance
between two corresponding points of
two arrays
"""
distance=[]
for i in range(len(X1)):
single=0
single=np.sum(max(X1[i]-X2[i]))
distance.append(single)
return(distance)
def Minkowski_Distance(X1,X2,p):
""""
Returns list of minkowski distance of order 'p'
between two corresponding vectors of
two arrays
PARAMETERS
==========
X1:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
X2:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
p:float
input order value between 1 and 2 inclusive
RETURNS
=========
distance:list
Returns the list of minkowski distance
between two corresponding vectors of
two arrays
"""
distance=[]
for i in range(len(X1)):
single=0
single=np.sum((abs(X1[i]-X2[i]))**p)
distance.append((single)**(1/p))
return(distance)
def WMinkowski_Distance(X1,X2,p,W):
""""
Returns list of weighted minkowski distance of order 'p'
between two corresponding vectors weighted by W of
two arrays
PARAMETERS
==========
X1:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
X2:ndarray(dtype=int,axis=1)
input array with more than 1 dimension
p:float
input order value between 1 and 2 inclusive
W:array(dtype=int,axis=1)
input 1 dimensional array
RETURNS
=========
distance:list
Returns the list of weighted minkowski distance
between two corresponding vectors of
two arrays
"""
distance=[]
for i in range(len(X1)):
single=0
single=np.sum((abs(W*(X1[i]-X2[i])))**p)
distance.append((single)**(1/p))
return(distance) | 0.805173 | 0.837487 |
b_w = 'QPushButton{border-top-left-radius: 10px;border-top-right-radius: 10px;border-bottom-right-radius: ' \
'10px;border-bottom-left-radius: 10px;background-color: rgb(234, 234, 234);}' \
'QPushButton:pressed {background-color: rgb(188, 188, 188);}' \
'QPushButton {text-align: left;}'
b_g = 'QPushButton{border-top-left-radius: 10px;border-top-right-radius: 10px;border-bottom-right-radius: ' \
'10px;border-bottom-left-radius: 10px;background-color: rgb(59, 190, 190);}' \
'QPushButton:pressed {background-color: rgb(188, 188, 188);}' \
'QPushButton {text-align: left;}'
ti_b ='border-top-left-radius: 10px; border-top-right-radius: 10px; border-bottom-right-radius: 10px;' \
'border-bottom-left-radius: 10px; background-color: rgb(59, 190, 190); border: 1px solid black;'
guid_dis_back_color = 'background-color: rgb(59, 190, 190);'
test_col = 'background-color: rgb(59, 100, 100);'
button_False = """
Cont_label{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_label:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_label:pressed{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial";
}"""
button_True = """
Cont_label{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_label:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_label:pressed{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial";
}"""
button_switch_False = """
Cont_open_close{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial"; text-align: center;
}
Cont_open_close:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial"; text-align: center;
}
Cont_open_close:pressed{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial"; text-align: center;
}"""
button_switch_True = """
Cont_open_close{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial"; text-align: center;
}
Cont_open_close:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial"; text-align: center;
}
Cont_open_close:pressed{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial"; text-align: center;
}"""
button_ok_False = """
Cont_ok{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_ok:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_ok:pressed{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial";
}"""
button_ok_True = """
Cont_ok{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_ok:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_ok:pressed{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial";
}""" | SAMG/COL.py | b_w = 'QPushButton{border-top-left-radius: 10px;border-top-right-radius: 10px;border-bottom-right-radius: ' \
'10px;border-bottom-left-radius: 10px;background-color: rgb(234, 234, 234);}' \
'QPushButton:pressed {background-color: rgb(188, 188, 188);}' \
'QPushButton {text-align: left;}'
b_g = 'QPushButton{border-top-left-radius: 10px;border-top-right-radius: 10px;border-bottom-right-radius: ' \
'10px;border-bottom-left-radius: 10px;background-color: rgb(59, 190, 190);}' \
'QPushButton:pressed {background-color: rgb(188, 188, 188);}' \
'QPushButton {text-align: left;}'
ti_b ='border-top-left-radius: 10px; border-top-right-radius: 10px; border-bottom-right-radius: 10px;' \
'border-bottom-left-radius: 10px; background-color: rgb(59, 190, 190); border: 1px solid black;'
guid_dis_back_color = 'background-color: rgb(59, 190, 190);'
test_col = 'background-color: rgb(59, 100, 100);'
button_False = """
Cont_label{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_label:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_label:pressed{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial";
}"""
button_True = """
Cont_label{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_label:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_label:pressed{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial";
}"""
button_switch_False = """
Cont_open_close{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial"; text-align: center;
}
Cont_open_close:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial"; text-align: center;
}
Cont_open_close:pressed{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial"; text-align: center;
}"""
button_switch_True = """
Cont_open_close{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial"; text-align: center;
}
Cont_open_close:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial"; text-align: center;
}
Cont_open_close:pressed{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial"; text-align: center;
}"""
button_ok_False = """
Cont_ok{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_ok:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_ok:pressed{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial";
}"""
button_ok_True = """
Cont_ok{
background-color: rgb(59,190,190); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_ok:hover{
background-color: rgb(150,150,150); color: rgb(0,0,0); border-radius: 10px; border: 1px solid black; font: 14pt "Arial";
}
Cont_ok:pressed{
background-color: rgb(234,234,234); color: rgb(0,0,0); border-radius: 10px border: 1px solid black; font: 14pt "Arial";
}""" | 0.742235 | 0.268384 |
import os
from PySide2 import QtWidgets, QtCore, QtGui
from propsettings_qt.ui_settings_area import SettingsAreaWidget
from pyrulo import class_imports
class ConfigurableSelector(QtWidgets.QWidget):
"""
Widget para cargar clases que hereden de una clase base especificada
e inicializar un combobox con instancias de dichas clases. Consta de dos elementos agrupados en un vertical layout.
El primero es el combobox. El segundo es un area para configurar las uiproperties del objeto seleccionado.
"""
eventObjectSelected = QtCore.Signal(object)
def __init__(self, dir_key=None, base_class: type = None, parent=None):
assert dir_key is not None or base_class is not None, f"dir_key or base_class must be specified"
super(ConfigurableSelector, self).__init__(parent)
self._dir_key = dir_key
self._base_class = base_class
self._dir_key_based = dir_key is not None
self._classes = []
self._added_classes = []
self._objects = {}
self._custom_object = None
self._current_index = 0
layout = QtWidgets.QVBoxLayout(self)
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
self._toggle_button = QtWidgets.QToolButton()
self._toggle_button.setStyleSheet("QToolButton { border: none; }")
self._toggle_button.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
self._toggle_button.setArrowType(QtCore.Qt.RightArrow)
self._toggle_button.setCheckable(True)
self._toggle_button.setChecked(False)
self._toggle_button.clicked.connect(self._collapse_or_expand)
self._combobox = QtWidgets.QComboBox(self)
self._combobox.currentIndexChanged.connect(self._selection_changed)
self._combobox.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
combobox_containter = QtWidgets.QWidget()
combobox_containter_layout = QtWidgets.QHBoxLayout()
combobox_containter_layout.setContentsMargins(0, 0, 0, 0)
combobox_containter.setLayout(combobox_containter_layout)
combobox_containter_layout.addWidget(self._toggle_button)
combobox_containter_layout.addWidget(self._combobox)
layout.addWidget(combobox_containter)
self._custom_script_widget = QtWidgets.QWidget()
custom_script_widget_layout = QtWidgets.QVBoxLayout()
custom_script_widget_layout.setContentsMargins(0, 0, 0, 0)
self._custom_script_widget.setLayout(custom_script_widget_layout)
self._custom_script_widget.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
self._script_dir_widget = QtWidgets.QWidget()
script_dir_widget_layout = QtWidgets.QHBoxLayout()
script_dir_widget_layout.setContentsMargins(0, 0, 0, 0)
self._script_dir_widget.setLayout(script_dir_widget_layout)
self._script_dir_label = QtWidgets.QLabel()
self._script_dir_label.setText(self.tr("Script not selected"))
self._script_dir_label.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
self._script_dir_button = QtWidgets.QPushButton()
self._script_dir_button.setIcon(self.style().standardIcon(QtWidgets.QStyle.SP_DialogOpenButton))
self._script_dir_button.clicked.connect(self._load_object_from_custom_script)
self._script_dir_widget.layout().addWidget(self._script_dir_label)
self._script_dir_widget.layout().addWidget(self._script_dir_button)
self._script_class_name_label = QtWidgets.QLabel()
self._script_class_name_label.setText(self.tr("None"))
custom_script_widget_layout.addWidget(self._script_dir_widget)
custom_script_widget_layout.addWidget(self._script_class_name_label)
self._custom_script_widget.hide()
layout.addWidget(self._custom_script_widget)
self._collapsible_widget = QtWidgets.QWidget()
collapsible_widget_layout = QtWidgets.QVBoxLayout()
collapsible_widget_layout.setContentsMargins(0, 0, 0, 0)
self._collapsible_widget.setLayout(collapsible_widget_layout)
self._collapsible_widget.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)
layout.addWidget(self._collapsible_widget)
self._conf_properties = SettingsAreaWidget()
self._conf_properties.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)
self._collapsible_widget.layout().addWidget(self._conf_properties)
self._collapsible_widget.hide()
self._populate_objects()
def current_object(self):
classes_count = len(self._classes)
if classes_count > 0:
if self._current_index == classes_count:
return self._custom_object
else:
clazz = self._classes[self._current_index]
if clazz not in self._objects:
self._objects[clazz] = clazz()
return self._objects[clazz]
else:
return None
def populate_class(self, dir_key):
"""
Inicializar el combobox con una nueva clase.
:param class_dir:
:param clazz:
:return:
"""
self._dir_key = dir_key
self._populate_objects()
def add_class(self, clazz: type):
if clazz not in self._classes:
self._added_classes.append(clazz)
self._populate_objects()
def set_object_for_class(self, clazz: type, obj):
"""
Set the object value for a given class.
:param clazz:
:return:
"""
if clazz in self._classes and isinstance(obj, clazz):
self._objects[clazz] = obj
class_index = self._classes.index(clazz)
if self._combobox.currentIndex() == class_index:
self._populate_current_object_properties()
else:
raise TypeError(f"Class {clazz} must be present in this selector and object {obj} must be of type {clazz}.")
def select_class(self, clazz: type):
if clazz in self._classes:
index = self._classes.index(clazz)
self._combobox.setCurrentIndex(index)
def set_current_index(self, index: int):
self._combobox.setCurrentIndex(index)
def _populate_objects(self):
"""
Inicializar el combobox.
:return:
"""
self._clear_objects()
classes = self._get_classes()
classes = sorted(classes, key=lambda cls: str(cls))
classes.extend(self._added_classes)
for cls in classes:
self._classes.append(cls)
self._combobox.addItem(cls.__name__)
self._combobox.addItem(self.tr("Custom script..."))
self.eventObjectSelected.emit(self.current_object())
def _clear_objects(self):
self._classes.clear()
self._objects.clear()
self._custom_object = None
self._combobox.clear()
self._conf_properties.clear()
def _selection_changed(self, index):
if index == len(self._classes):
self._custom_script_widget.show()
else:
self._custom_script_widget.hide()
self._current_index = index
self._populate_current_object_properties()
def _populate_current_object_properties(self):
current_object = self.current_object()
self._conf_properties.populate_object(current_object)
if self._conf_properties.children_count > 0:
self._enable_collapsible_feature()
else:
self._disable_collapsible_feature()
self.eventObjectSelected.emit(current_object)
def _load_object_from_custom_script(self):
file_path, file_filter = QtWidgets.QFileDialog.getOpenFileName(
self,
self.tr("Select custom script"),
os.getcwd(),
"Python script (*.py)"
)
if file_path != "":
classes = self._get_specific_file_classes(file_path)
if len(classes) > 0:
first_class = classes[0]
self._custom_object = first_class()
self._update_custom_script_texts(file_path, first_class.__name__)
else:
QtWidgets.QMessageBox.critical(
self,
self.tr("Error"),
self.tr("Invalid script"),
QtWidgets.QMessageBox.StandardButton.Ok)
self._populate_current_object_properties()
def _update_custom_script_texts(self, file_path, class_name):
metrics = QtGui.QFontMetrics(self._script_dir_label.font())
elided_text = metrics.elidedText(
file_path,
QtCore.Qt.TextElideMode.ElideMiddle,
self._script_dir_label.width())
self._script_dir_label.setText(elided_text)
self._script_class_name_label.setText(class_name)
def _disable_collapsible_feature(self):
self._toggle_button.hide()
self._collapsible_widget.hide()
def _enable_collapsible_feature(self):
self._toggle_button.show()
@QtCore.Slot()
def _collapse_or_expand(self, expand):
arrow_type = QtCore.Qt.DownArrow if expand else QtCore.Qt.RightArrow
self._toggle_button.setArrowType(arrow_type)
if expand:
self._collapsible_widget.show()
else:
self._collapsible_widget.hide()
def _get_classes(self):
if self._dir_key_based:
classes = class_imports.import_classes_by_key(self._dir_key)
else:
classes = self._base_class.__subclasses__()
return classes
def _get_specific_file_classes(self, file_path):
if self._dir_key_based:
classes = class_imports.import_classes_in_file_by_key(file_path, self._dir_key)
else:
classes = class_imports.import_classes_in_file(file_path, self._base_class)
return classes | pyrulo_qt/ui_configurable_selector.py | import os
from PySide2 import QtWidgets, QtCore, QtGui
from propsettings_qt.ui_settings_area import SettingsAreaWidget
from pyrulo import class_imports
class ConfigurableSelector(QtWidgets.QWidget):
"""
Widget para cargar clases que hereden de una clase base especificada
e inicializar un combobox con instancias de dichas clases. Consta de dos elementos agrupados en un vertical layout.
El primero es el combobox. El segundo es un area para configurar las uiproperties del objeto seleccionado.
"""
eventObjectSelected = QtCore.Signal(object)
def __init__(self, dir_key=None, base_class: type = None, parent=None):
assert dir_key is not None or base_class is not None, f"dir_key or base_class must be specified"
super(ConfigurableSelector, self).__init__(parent)
self._dir_key = dir_key
self._base_class = base_class
self._dir_key_based = dir_key is not None
self._classes = []
self._added_classes = []
self._objects = {}
self._custom_object = None
self._current_index = 0
layout = QtWidgets.QVBoxLayout(self)
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
self._toggle_button = QtWidgets.QToolButton()
self._toggle_button.setStyleSheet("QToolButton { border: none; }")
self._toggle_button.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
self._toggle_button.setArrowType(QtCore.Qt.RightArrow)
self._toggle_button.setCheckable(True)
self._toggle_button.setChecked(False)
self._toggle_button.clicked.connect(self._collapse_or_expand)
self._combobox = QtWidgets.QComboBox(self)
self._combobox.currentIndexChanged.connect(self._selection_changed)
self._combobox.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
combobox_containter = QtWidgets.QWidget()
combobox_containter_layout = QtWidgets.QHBoxLayout()
combobox_containter_layout.setContentsMargins(0, 0, 0, 0)
combobox_containter.setLayout(combobox_containter_layout)
combobox_containter_layout.addWidget(self._toggle_button)
combobox_containter_layout.addWidget(self._combobox)
layout.addWidget(combobox_containter)
self._custom_script_widget = QtWidgets.QWidget()
custom_script_widget_layout = QtWidgets.QVBoxLayout()
custom_script_widget_layout.setContentsMargins(0, 0, 0, 0)
self._custom_script_widget.setLayout(custom_script_widget_layout)
self._custom_script_widget.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
self._script_dir_widget = QtWidgets.QWidget()
script_dir_widget_layout = QtWidgets.QHBoxLayout()
script_dir_widget_layout.setContentsMargins(0, 0, 0, 0)
self._script_dir_widget.setLayout(script_dir_widget_layout)
self._script_dir_label = QtWidgets.QLabel()
self._script_dir_label.setText(self.tr("Script not selected"))
self._script_dir_label.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
self._script_dir_button = QtWidgets.QPushButton()
self._script_dir_button.setIcon(self.style().standardIcon(QtWidgets.QStyle.SP_DialogOpenButton))
self._script_dir_button.clicked.connect(self._load_object_from_custom_script)
self._script_dir_widget.layout().addWidget(self._script_dir_label)
self._script_dir_widget.layout().addWidget(self._script_dir_button)
self._script_class_name_label = QtWidgets.QLabel()
self._script_class_name_label.setText(self.tr("None"))
custom_script_widget_layout.addWidget(self._script_dir_widget)
custom_script_widget_layout.addWidget(self._script_class_name_label)
self._custom_script_widget.hide()
layout.addWidget(self._custom_script_widget)
self._collapsible_widget = QtWidgets.QWidget()
collapsible_widget_layout = QtWidgets.QVBoxLayout()
collapsible_widget_layout.setContentsMargins(0, 0, 0, 0)
self._collapsible_widget.setLayout(collapsible_widget_layout)
self._collapsible_widget.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)
layout.addWidget(self._collapsible_widget)
self._conf_properties = SettingsAreaWidget()
self._conf_properties.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)
self._collapsible_widget.layout().addWidget(self._conf_properties)
self._collapsible_widget.hide()
self._populate_objects()
def current_object(self):
classes_count = len(self._classes)
if classes_count > 0:
if self._current_index == classes_count:
return self._custom_object
else:
clazz = self._classes[self._current_index]
if clazz not in self._objects:
self._objects[clazz] = clazz()
return self._objects[clazz]
else:
return None
def populate_class(self, dir_key):
"""
Inicializar el combobox con una nueva clase.
:param class_dir:
:param clazz:
:return:
"""
self._dir_key = dir_key
self._populate_objects()
def add_class(self, clazz: type):
if clazz not in self._classes:
self._added_classes.append(clazz)
self._populate_objects()
def set_object_for_class(self, clazz: type, obj):
"""
Set the object value for a given class.
:param clazz:
:return:
"""
if clazz in self._classes and isinstance(obj, clazz):
self._objects[clazz] = obj
class_index = self._classes.index(clazz)
if self._combobox.currentIndex() == class_index:
self._populate_current_object_properties()
else:
raise TypeError(f"Class {clazz} must be present in this selector and object {obj} must be of type {clazz}.")
def select_class(self, clazz: type):
if clazz in self._classes:
index = self._classes.index(clazz)
self._combobox.setCurrentIndex(index)
def set_current_index(self, index: int):
self._combobox.setCurrentIndex(index)
def _populate_objects(self):
"""
Inicializar el combobox.
:return:
"""
self._clear_objects()
classes = self._get_classes()
classes = sorted(classes, key=lambda cls: str(cls))
classes.extend(self._added_classes)
for cls in classes:
self._classes.append(cls)
self._combobox.addItem(cls.__name__)
self._combobox.addItem(self.tr("Custom script..."))
self.eventObjectSelected.emit(self.current_object())
def _clear_objects(self):
self._classes.clear()
self._objects.clear()
self._custom_object = None
self._combobox.clear()
self._conf_properties.clear()
def _selection_changed(self, index):
if index == len(self._classes):
self._custom_script_widget.show()
else:
self._custom_script_widget.hide()
self._current_index = index
self._populate_current_object_properties()
def _populate_current_object_properties(self):
current_object = self.current_object()
self._conf_properties.populate_object(current_object)
if self._conf_properties.children_count > 0:
self._enable_collapsible_feature()
else:
self._disable_collapsible_feature()
self.eventObjectSelected.emit(current_object)
def _load_object_from_custom_script(self):
file_path, file_filter = QtWidgets.QFileDialog.getOpenFileName(
self,
self.tr("Select custom script"),
os.getcwd(),
"Python script (*.py)"
)
if file_path != "":
classes = self._get_specific_file_classes(file_path)
if len(classes) > 0:
first_class = classes[0]
self._custom_object = first_class()
self._update_custom_script_texts(file_path, first_class.__name__)
else:
QtWidgets.QMessageBox.critical(
self,
self.tr("Error"),
self.tr("Invalid script"),
QtWidgets.QMessageBox.StandardButton.Ok)
self._populate_current_object_properties()
def _update_custom_script_texts(self, file_path, class_name):
metrics = QtGui.QFontMetrics(self._script_dir_label.font())
elided_text = metrics.elidedText(
file_path,
QtCore.Qt.TextElideMode.ElideMiddle,
self._script_dir_label.width())
self._script_dir_label.setText(elided_text)
self._script_class_name_label.setText(class_name)
def _disable_collapsible_feature(self):
self._toggle_button.hide()
self._collapsible_widget.hide()
def _enable_collapsible_feature(self):
self._toggle_button.show()
@QtCore.Slot()
def _collapse_or_expand(self, expand):
arrow_type = QtCore.Qt.DownArrow if expand else QtCore.Qt.RightArrow
self._toggle_button.setArrowType(arrow_type)
if expand:
self._collapsible_widget.show()
else:
self._collapsible_widget.hide()
def _get_classes(self):
if self._dir_key_based:
classes = class_imports.import_classes_by_key(self._dir_key)
else:
classes = self._base_class.__subclasses__()
return classes
def _get_specific_file_classes(self, file_path):
if self._dir_key_based:
classes = class_imports.import_classes_in_file_by_key(file_path, self._dir_key)
else:
classes = class_imports.import_classes_in_file(file_path, self._base_class)
return classes | 0.519765 | 0.061087 |
import datetime
from typing import List, Dict
from lxml import etree as ET
import os
import numpy as np
from collections import OrderedDict
from miso.training.parameters import MisoParameters
class ModelInfo:
def __init__(self,
name: str,
description: str,
type: str,
date: datetime.datetime,
protobuf: str,
params: MisoParameters,
inputs: OrderedDict,
outputs: OrderedDict,
data_source_name: str,
labels: List[str],
counts: List[int],
prepro_name: str,
prepro_params: List,
accuracy: float,
precision: float,
recall: float,
f1score: float,
support: float,
training_epochs: int,
training_time: float,
training_split: float,
inference_time_per_image: float):
self.name = name
self.description = description
self.type = type
self.date = date
self.params = params
self.inputs = inputs
self.outputs = outputs
self.data_source_name = data_source_name
self.labels = labels
self.counts = counts
self.prepro_name = prepro_name
self.prepro_params = prepro_params
self.protobuf = protobuf
self.accuracy = accuracy
self.precision = precision
self.recall = recall
self.f1score = f1score
self.support = support
self.training_epochs = training_epochs
self.training_time = training_time
self.training_split = training_split
self.inference_time_per_image = inference_time_per_image
self.version = "2.1"
def save(self, filename):
os.makedirs(os.path.dirname(filename), exist_ok=True)
f = open(filename, 'wb')
f.write(self.to_xml())
f.close()
def to_xml(self):
root = ET.Element("network", version=self.version)
ET.SubElement(root, "name").text = self.name
ET.SubElement(root, "description").text = self.description
ET.SubElement(root, "type").text = self.type
ET.SubElement(root, "date").text = "{0:%Y-%m-%d_%H%M%S}".format(self.date)
ET.SubElement(root, "protobuf").text = self.protobuf
parent_node = ET.SubElement(root, "params")
for key, value in self.params.asdict().items():
ET.SubElement(parent_node, key).text = str(value)
parent_node = ET.SubElement(root, "inputs")
for name, tensor in self.inputs.items():
node = ET.SubElement(parent_node, "input")
ET.SubElement(node, "name").text = name
ET.SubElement(node, "operation").text = tensor.op.name
ET.SubElement(node, "height").text = str(tensor.shape[1])
if len(tensor.shape) > 2:
ET.SubElement(node, "width").text = str(tensor.shape[2])
else:
ET.SubElement(node, "width").text = "0"
if len(tensor.shape) > 3:
ET.SubElement(node, "channels").text = str(tensor.shape[3])
else:
ET.SubElement(node, "channels").text = "0"
parent_node = ET.SubElement(root, "outputs")
for name, tensor in self.outputs.items():
node = ET.SubElement(parent_node, "output")
ET.SubElement(node, "name").text = name
ET.SubElement(node, "operation").text = tensor.op.name
ET.SubElement(node, "height").text = str(tensor.shape[1])
if len(tensor.shape) > 2:
ET.SubElement(node, "width").text = str(tensor.shape[2])
else:
ET.SubElement(node, "width").text = "0"
if len(tensor.shape) > 3:
ET.SubElement(node, "channels").text = str(tensor.shape[3])
else:
ET.SubElement(node, "channels").text = "0"
ET.SubElement(root, "source_data").text = str(self.data_source_name)
ET.SubElement(root, "source_size").text = str(np.sum(self.counts))
parent_node = ET.SubElement(root, "labels")
for idx, value in enumerate(self.labels):
node = ET.SubElement(parent_node, "label")
ET.SubElement(node, "code").text = value
ET.SubElement(node, "count").text = str(self.counts[idx])
ET.SubElement(node, "precision").text = str(self.precision[idx])
ET.SubElement(node, "recall").text = str(self.recall[idx])
ET.SubElement(node, "f1score").text = str(self.f1score[idx])
ET.SubElement(node, "support").text = str(self.support[idx])
parent_node = ET.SubElement(root, "prepro")
ET.SubElement(parent_node, "name").text = self.prepro_name
parent_node = ET.SubElement(parent_node, "params")
for idx, value in enumerate(self.prepro_params):
ET.SubElement(parent_node, "param").text = str(value)
ET.SubElement(root, "accuracy").text = str(self.accuracy)
ET.SubElement(root, "precision").text = str(np.mean(self.precision))
ET.SubElement(root, "recall").text = str(np.mean(self.recall))
ET.SubElement(root, "f1score").text = str(np.mean(self.f1score))
parent_node = ET.SubElement(root, "load")
ET.SubElement(parent_node, "training_epochs").text = str(self.training_epochs)
ET.SubElement(parent_node, "training_time").text = str(self.training_time)
ET.SubElement(parent_node, "training_split").text = str(self.training_split)
ET.SubElement(parent_node, "training_time_per_image").text = str(self.training_time / self.training_epochs / (np.sum(self.counts) * (1 - self.training_split)))
ET.SubElement(parent_node, "inference_time_per_image").text = str(np.mean(self.f1score))
return ET.tostring(root, pretty_print=True) | miso/deploy/model_info.py | import datetime
from typing import List, Dict
from lxml import etree as ET
import os
import numpy as np
from collections import OrderedDict
from miso.training.parameters import MisoParameters
class ModelInfo:
def __init__(self,
name: str,
description: str,
type: str,
date: datetime.datetime,
protobuf: str,
params: MisoParameters,
inputs: OrderedDict,
outputs: OrderedDict,
data_source_name: str,
labels: List[str],
counts: List[int],
prepro_name: str,
prepro_params: List,
accuracy: float,
precision: float,
recall: float,
f1score: float,
support: float,
training_epochs: int,
training_time: float,
training_split: float,
inference_time_per_image: float):
self.name = name
self.description = description
self.type = type
self.date = date
self.params = params
self.inputs = inputs
self.outputs = outputs
self.data_source_name = data_source_name
self.labels = labels
self.counts = counts
self.prepro_name = prepro_name
self.prepro_params = prepro_params
self.protobuf = protobuf
self.accuracy = accuracy
self.precision = precision
self.recall = recall
self.f1score = f1score
self.support = support
self.training_epochs = training_epochs
self.training_time = training_time
self.training_split = training_split
self.inference_time_per_image = inference_time_per_image
self.version = "2.1"
def save(self, filename):
os.makedirs(os.path.dirname(filename), exist_ok=True)
f = open(filename, 'wb')
f.write(self.to_xml())
f.close()
def to_xml(self):
root = ET.Element("network", version=self.version)
ET.SubElement(root, "name").text = self.name
ET.SubElement(root, "description").text = self.description
ET.SubElement(root, "type").text = self.type
ET.SubElement(root, "date").text = "{0:%Y-%m-%d_%H%M%S}".format(self.date)
ET.SubElement(root, "protobuf").text = self.protobuf
parent_node = ET.SubElement(root, "params")
for key, value in self.params.asdict().items():
ET.SubElement(parent_node, key).text = str(value)
parent_node = ET.SubElement(root, "inputs")
for name, tensor in self.inputs.items():
node = ET.SubElement(parent_node, "input")
ET.SubElement(node, "name").text = name
ET.SubElement(node, "operation").text = tensor.op.name
ET.SubElement(node, "height").text = str(tensor.shape[1])
if len(tensor.shape) > 2:
ET.SubElement(node, "width").text = str(tensor.shape[2])
else:
ET.SubElement(node, "width").text = "0"
if len(tensor.shape) > 3:
ET.SubElement(node, "channels").text = str(tensor.shape[3])
else:
ET.SubElement(node, "channels").text = "0"
parent_node = ET.SubElement(root, "outputs")
for name, tensor in self.outputs.items():
node = ET.SubElement(parent_node, "output")
ET.SubElement(node, "name").text = name
ET.SubElement(node, "operation").text = tensor.op.name
ET.SubElement(node, "height").text = str(tensor.shape[1])
if len(tensor.shape) > 2:
ET.SubElement(node, "width").text = str(tensor.shape[2])
else:
ET.SubElement(node, "width").text = "0"
if len(tensor.shape) > 3:
ET.SubElement(node, "channels").text = str(tensor.shape[3])
else:
ET.SubElement(node, "channels").text = "0"
ET.SubElement(root, "source_data").text = str(self.data_source_name)
ET.SubElement(root, "source_size").text = str(np.sum(self.counts))
parent_node = ET.SubElement(root, "labels")
for idx, value in enumerate(self.labels):
node = ET.SubElement(parent_node, "label")
ET.SubElement(node, "code").text = value
ET.SubElement(node, "count").text = str(self.counts[idx])
ET.SubElement(node, "precision").text = str(self.precision[idx])
ET.SubElement(node, "recall").text = str(self.recall[idx])
ET.SubElement(node, "f1score").text = str(self.f1score[idx])
ET.SubElement(node, "support").text = str(self.support[idx])
parent_node = ET.SubElement(root, "prepro")
ET.SubElement(parent_node, "name").text = self.prepro_name
parent_node = ET.SubElement(parent_node, "params")
for idx, value in enumerate(self.prepro_params):
ET.SubElement(parent_node, "param").text = str(value)
ET.SubElement(root, "accuracy").text = str(self.accuracy)
ET.SubElement(root, "precision").text = str(np.mean(self.precision))
ET.SubElement(root, "recall").text = str(np.mean(self.recall))
ET.SubElement(root, "f1score").text = str(np.mean(self.f1score))
parent_node = ET.SubElement(root, "load")
ET.SubElement(parent_node, "training_epochs").text = str(self.training_epochs)
ET.SubElement(parent_node, "training_time").text = str(self.training_time)
ET.SubElement(parent_node, "training_split").text = str(self.training_split)
ET.SubElement(parent_node, "training_time_per_image").text = str(self.training_time / self.training_epochs / (np.sum(self.counts) * (1 - self.training_split)))
ET.SubElement(parent_node, "inference_time_per_image").text = str(np.mean(self.f1score))
return ET.tostring(root, pretty_print=True) | 0.734501 | 0.233029 |
from uuid import uuid4
import pytest
from common.test.acceptance.fixtures.course import CourseFixture # lint-amnesty, pylint: disable=unused-import
from common.test.acceptance.fixtures.discussion import (
Comment,
Response,
SingleThreadViewFixture,
Thread,
)
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.discussion import (
DiscussionTabHomePage,
DiscussionTabSingleThreadPage,
)
from common.test.acceptance.tests.discussion.helpers import BaseDiscussionMixin, BaseDiscussionTestCase
from common.test.acceptance.tests.helpers import UniqueCourseTest
from openedx.core.lib.tests import attr
THREAD_CONTENT_WITH_LATEX = """Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt # lint-amnesty, pylint: disable=line-too-long
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
\n\n----------\n\nLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur. (b).\n\n
**(a)** $H_1(e^{j\\omega}) = \\sum_{n=-\\infty}^{\\infty}h_1[n]e^{-j\\omega n} =
\\sum_{n=-\\infty} ^{\\infty}h[n]e^{-j\\omega n}+\\delta_2e^{-j\\omega n_0}$
$= H(e^{j\\omega})+\\delta_2e^{-j\\omega n_0}=A_e (e^{j\\omega}) e^{-j\\omega n_0}
+\\delta_2e^{-j\\omega n_0}=e^{-j\\omega n_0} (A_e(e^{j\\omega})+\\delta_2)
$H_3(e^{j\\omega})=A_e(e^{j\\omega})+\\delta_2$. Dummy $A_e(e^{j\\omega})$ dummy post $.
$A_e(e^{j\\omega}) \\ge -\\delta_2$, it follows that $H_3(e^{j\\omega})$ is real and
$H_3(e^{j\\omega})\\ge 0$.\n\n**(b)** Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.\n\n
**Case 1:** If $re^{j\\theta}$ is a Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
\n\n**Case 3:** Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
Lorem $H_3(e^{j\\omega}) = P(cos\\omega)(cos\\omega - cos\\theta)^k$,
Lorem Lorem Lorem Lorem Lorem Lorem $P(cos\\omega)$ has no
$(cos\\omega - cos\\theta)$ factor.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
$P(cos\\theta) \\neq 0$. Since $P(cos\\omega)$ this is a dummy data post $\\omega$,
dummy $\\delta > 0$ such that for all $\\omega$ dummy $|\\omega - \\theta|
< \\delta$, $P(cos\\omega)$ Lorem ipsum dolor sit amet, consectetur adipiscing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim
veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit sse cillum dolore
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
"""
@attr(shard=2)
class DiscussionHomePageTest(BaseDiscussionTestCase):
"""
Tests for the discussion home page.
"""
SEARCHED_USERNAME = "gizmo"
def setUp(self):
super().setUp()
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.page = DiscussionTabHomePage(self.browser, self.course_id)
self.page.visit()
@attr('a11y')
def test_page_accessibility(self):
self.page.a11y_audit.config.set_rules({
"ignore": [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region' # TODO: AC-932
]
})
self.page.a11y_audit.check_for_accessibility_errors()
class DiscussionTabMultipleThreadTest(BaseDiscussionTestCase, BaseDiscussionMixin):
"""
Tests for the discussion page with multiple threads
"""
def setUp(self):
super().setUp()
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.thread_count = 2
self.thread_ids = []
self.setup_multiple_threads(thread_count=self.thread_count)
self.thread_page_1 = DiscussionTabSingleThreadPage(
self.browser,
self.course_id,
self.discussion_id,
self.thread_ids[0]
)
self.thread_page_2 = DiscussionTabSingleThreadPage(
self.browser,
self.course_id,
self.discussion_id,
self.thread_ids[1]
)
self.thread_page_1.visit()
@attr('a11y')
def test_page_accessibility(self):
self.thread_page_1.a11y_audit.config.set_rules({
"ignore": [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
self.thread_page_1.a11y_audit.check_for_accessibility_errors()
self.thread_page_2.a11y_audit.config.set_rules({
"ignore": [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'region' # TODO: AC-932
]
})
self.thread_page_2.a11y_audit.check_for_accessibility_errors()
class DiscussionOpenClosedThreadTest(BaseDiscussionTestCase):
"""
Tests for checking the display of attributes on open and closed threads
"""
def setUp(self):
super().setUp()
self.thread_id = f"test_thread_{uuid4().hex}"
def setup_user(self, roles=[]): # lint-amnesty, pylint: disable=dangerous-default-value
roles_str = ','.join(roles)
self.user_id = AutoAuthPage(self.browser, course_id=self.course_id, roles=roles_str).visit().get_user_id() # lint-amnesty, pylint: disable=attribute-defined-outside-init
def setup_view(self, **thread_kwargs): # lint-amnesty, pylint: disable=missing-function-docstring
thread_kwargs.update({'commentable_id': self.discussion_id})
view = SingleThreadViewFixture(
Thread(id=self.thread_id, **thread_kwargs)
)
view.addResponse(Response(id="response1"))
view.push()
def setup_openclosed_thread_page(self, closed=False): # lint-amnesty, pylint: disable=missing-function-docstring
self.setup_user(roles=['Moderator'])
if closed:
self.setup_view(closed=True)
else:
self.setup_view()
page = self.create_single_thread_page(self.thread_id)
page.visit()
page.close_open_thread()
return page
@attr('a11y')
def test_page_accessibility(self):
page = self.setup_openclosed_thread_page()
page.a11y_audit.config.set_rules({
'ignore': [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'color-contrast', # Commented out for now because they reproducibly fail on Jenkins but not locally
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
page.a11y_audit.check_for_accessibility_errors()
page = self.setup_openclosed_thread_page(True)
page.a11y_audit.config.set_rules({
'ignore': [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'color-contrast', # Commented out for now because they reproducibly fail on Jenkins but not locally
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
page.a11y_audit.check_for_accessibility_errors()
class DiscussionResponseEditTest(BaseDiscussionTestCase):
"""
Tests for editing responses displayed beneath thread in the single thread view.
"""
def setup_user(self, roles=[]): # lint-amnesty, pylint: disable=dangerous-default-value
roles_str = ','.join(roles)
self.user_id = AutoAuthPage(self.browser, course_id=self.course_id, roles=roles_str).visit().get_user_id() # lint-amnesty, pylint: disable=attribute-defined-outside-init
def setup_view(self): # lint-amnesty, pylint: disable=missing-function-docstring
view = SingleThreadViewFixture(Thread(id="response_edit_test_thread", commentable_id=self.discussion_id))
view.addResponse(
Response(id="response_other_author", user_id="other", thread_id="response_edit_test_thread"),
)
view.addResponse(
Response(id="response_self_author", user_id=self.user_id, thread_id="response_edit_test_thread"),
)
view.push()
@attr('a11y')
def test_page_accessibility(self):
self.setup_user()
self.setup_view()
page = self.create_single_thread_page("response_edit_test_thread")
page.a11y_audit.config.set_rules({
'ignore': [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
page.visit()
page.a11y_audit.check_for_accessibility_errors()
class DiscussionCommentEditTest(BaseDiscussionTestCase):
"""
Tests for editing comments displayed beneath responses in the single thread view.
"""
def setup_user(self, roles=[]): # lint-amnesty, pylint: disable=dangerous-default-value
roles_str = ','.join(roles)
self.user_id = AutoAuthPage(self.browser, course_id=self.course_id, roles=roles_str).visit().get_user_id() # lint-amnesty, pylint: disable=attribute-defined-outside-init
def setup_view(self): # lint-amnesty, pylint: disable=missing-function-docstring
view = SingleThreadViewFixture(Thread(id="comment_edit_test_thread", commentable_id=self.discussion_id))
view.addResponse(
Response(id="response1"),
[Comment(id="comment_other_author", user_id="other"), Comment(id="comment_self_author", user_id=self.user_id)]) # lint-amnesty, pylint: disable=line-too-long
view.push()
@attr('a11y')
def test_page_accessibility(self):
self.setup_user()
self.setup_view()
page = self.create_single_thread_page("comment_edit_test_thread")
page.visit()
page.a11y_audit.config.set_rules({
'ignore': [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
page.a11y_audit.check_for_accessibility_errors()
@attr('a11y')
@pytest.mark.skip(reason='This test is too flaky to run at all. TNL-6215')
def test_inline_a11y(self):
"""
Tests Inline Discussion for accessibility issues.
"""
self.setup_multiple_threads(thread_count=3)
# First test the a11y of the expanded list of threads
self.discussion_page.expand_discussion()
self.discussion_page.a11y_audit.config.set_rules({
'ignore': [
'section'
]
})
self.discussion_page.a11y_audit.check_for_accessibility_errors()
# Now show the first thread and test the a11y again
self.discussion_page.show_thread(self.thread_ids[0])
self.discussion_page.a11y_audit.check_for_accessibility_errors()
# Finally show the new post form and test its a11y
self.discussion_page.click_new_post_button()
self.discussion_page.a11y_audit.check_for_accessibility_errors()
class DiscussionSearchAlertTest(UniqueCourseTest):
"""
Tests for spawning and dismissing alerts related to user search actions and their results.
"""
SEARCHED_USERNAME = "gizmo"
def setUp(self):
super().setUp()
CourseFixture(**self.course_info).install()
# first auto auth call sets up a user that we will search for in some tests
self.searched_user_id = AutoAuthPage(
self.browser,
username=self.SEARCHED_USERNAME,
course_id=self.course_id
).visit().get_user_id()
# this auto auth call creates the actual session user
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.page = DiscussionTabHomePage(self.browser, self.course_id)
self.page.visit()
@attr('a11y')
def test_page_accessibility(self):
self.page.a11y_audit.config.set_rules({
'ignore': [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
self.page.a11y_audit.check_for_accessibility_errors() | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/common/test/acceptance/tests/discussion/test_discussion.py | from uuid import uuid4
import pytest
from common.test.acceptance.fixtures.course import CourseFixture # lint-amnesty, pylint: disable=unused-import
from common.test.acceptance.fixtures.discussion import (
Comment,
Response,
SingleThreadViewFixture,
Thread,
)
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.discussion import (
DiscussionTabHomePage,
DiscussionTabSingleThreadPage,
)
from common.test.acceptance.tests.discussion.helpers import BaseDiscussionMixin, BaseDiscussionTestCase
from common.test.acceptance.tests.helpers import UniqueCourseTest
from openedx.core.lib.tests import attr
THREAD_CONTENT_WITH_LATEX = """Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt # lint-amnesty, pylint: disable=line-too-long
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
\n\n----------\n\nLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur. (b).\n\n
**(a)** $H_1(e^{j\\omega}) = \\sum_{n=-\\infty}^{\\infty}h_1[n]e^{-j\\omega n} =
\\sum_{n=-\\infty} ^{\\infty}h[n]e^{-j\\omega n}+\\delta_2e^{-j\\omega n_0}$
$= H(e^{j\\omega})+\\delta_2e^{-j\\omega n_0}=A_e (e^{j\\omega}) e^{-j\\omega n_0}
+\\delta_2e^{-j\\omega n_0}=e^{-j\\omega n_0} (A_e(e^{j\\omega})+\\delta_2)
$H_3(e^{j\\omega})=A_e(e^{j\\omega})+\\delta_2$. Dummy $A_e(e^{j\\omega})$ dummy post $.
$A_e(e^{j\\omega}) \\ge -\\delta_2$, it follows that $H_3(e^{j\\omega})$ is real and
$H_3(e^{j\\omega})\\ge 0$.\n\n**(b)** Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.\n\n
**Case 1:** If $re^{j\\theta}$ is a Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
\n\n**Case 3:** Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
Lorem $H_3(e^{j\\omega}) = P(cos\\omega)(cos\\omega - cos\\theta)^k$,
Lorem Lorem Lorem Lorem Lorem Lorem $P(cos\\omega)$ has no
$(cos\\omega - cos\\theta)$ factor.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
$P(cos\\theta) \\neq 0$. Since $P(cos\\omega)$ this is a dummy data post $\\omega$,
dummy $\\delta > 0$ such that for all $\\omega$ dummy $|\\omega - \\theta|
< \\delta$, $P(cos\\omega)$ Lorem ipsum dolor sit amet, consectetur adipiscing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim
veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit sse cillum dolore
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
reprehenderit in voluptate velit sse cillum dolore eu fugiat nulla pariatur.
"""
@attr(shard=2)
class DiscussionHomePageTest(BaseDiscussionTestCase):
"""
Tests for the discussion home page.
"""
SEARCHED_USERNAME = "gizmo"
def setUp(self):
super().setUp()
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.page = DiscussionTabHomePage(self.browser, self.course_id)
self.page.visit()
@attr('a11y')
def test_page_accessibility(self):
self.page.a11y_audit.config.set_rules({
"ignore": [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region' # TODO: AC-932
]
})
self.page.a11y_audit.check_for_accessibility_errors()
class DiscussionTabMultipleThreadTest(BaseDiscussionTestCase, BaseDiscussionMixin):
"""
Tests for the discussion page with multiple threads
"""
def setUp(self):
super().setUp()
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.thread_count = 2
self.thread_ids = []
self.setup_multiple_threads(thread_count=self.thread_count)
self.thread_page_1 = DiscussionTabSingleThreadPage(
self.browser,
self.course_id,
self.discussion_id,
self.thread_ids[0]
)
self.thread_page_2 = DiscussionTabSingleThreadPage(
self.browser,
self.course_id,
self.discussion_id,
self.thread_ids[1]
)
self.thread_page_1.visit()
@attr('a11y')
def test_page_accessibility(self):
self.thread_page_1.a11y_audit.config.set_rules({
"ignore": [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
self.thread_page_1.a11y_audit.check_for_accessibility_errors()
self.thread_page_2.a11y_audit.config.set_rules({
"ignore": [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'region' # TODO: AC-932
]
})
self.thread_page_2.a11y_audit.check_for_accessibility_errors()
class DiscussionOpenClosedThreadTest(BaseDiscussionTestCase):
"""
Tests for checking the display of attributes on open and closed threads
"""
def setUp(self):
super().setUp()
self.thread_id = f"test_thread_{uuid4().hex}"
def setup_user(self, roles=[]): # lint-amnesty, pylint: disable=dangerous-default-value
roles_str = ','.join(roles)
self.user_id = AutoAuthPage(self.browser, course_id=self.course_id, roles=roles_str).visit().get_user_id() # lint-amnesty, pylint: disable=attribute-defined-outside-init
def setup_view(self, **thread_kwargs): # lint-amnesty, pylint: disable=missing-function-docstring
thread_kwargs.update({'commentable_id': self.discussion_id})
view = SingleThreadViewFixture(
Thread(id=self.thread_id, **thread_kwargs)
)
view.addResponse(Response(id="response1"))
view.push()
def setup_openclosed_thread_page(self, closed=False): # lint-amnesty, pylint: disable=missing-function-docstring
self.setup_user(roles=['Moderator'])
if closed:
self.setup_view(closed=True)
else:
self.setup_view()
page = self.create_single_thread_page(self.thread_id)
page.visit()
page.close_open_thread()
return page
@attr('a11y')
def test_page_accessibility(self):
page = self.setup_openclosed_thread_page()
page.a11y_audit.config.set_rules({
'ignore': [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'color-contrast', # Commented out for now because they reproducibly fail on Jenkins but not locally
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
page.a11y_audit.check_for_accessibility_errors()
page = self.setup_openclosed_thread_page(True)
page.a11y_audit.config.set_rules({
'ignore': [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'color-contrast', # Commented out for now because they reproducibly fail on Jenkins but not locally
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
page.a11y_audit.check_for_accessibility_errors()
class DiscussionResponseEditTest(BaseDiscussionTestCase):
"""
Tests for editing responses displayed beneath thread in the single thread view.
"""
def setup_user(self, roles=[]): # lint-amnesty, pylint: disable=dangerous-default-value
roles_str = ','.join(roles)
self.user_id = AutoAuthPage(self.browser, course_id=self.course_id, roles=roles_str).visit().get_user_id() # lint-amnesty, pylint: disable=attribute-defined-outside-init
def setup_view(self): # lint-amnesty, pylint: disable=missing-function-docstring
view = SingleThreadViewFixture(Thread(id="response_edit_test_thread", commentable_id=self.discussion_id))
view.addResponse(
Response(id="response_other_author", user_id="other", thread_id="response_edit_test_thread"),
)
view.addResponse(
Response(id="response_self_author", user_id=self.user_id, thread_id="response_edit_test_thread"),
)
view.push()
@attr('a11y')
def test_page_accessibility(self):
self.setup_user()
self.setup_view()
page = self.create_single_thread_page("response_edit_test_thread")
page.a11y_audit.config.set_rules({
'ignore': [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
page.visit()
page.a11y_audit.check_for_accessibility_errors()
class DiscussionCommentEditTest(BaseDiscussionTestCase):
"""
Tests for editing comments displayed beneath responses in the single thread view.
"""
def setup_user(self, roles=[]): # lint-amnesty, pylint: disable=dangerous-default-value
roles_str = ','.join(roles)
self.user_id = AutoAuthPage(self.browser, course_id=self.course_id, roles=roles_str).visit().get_user_id() # lint-amnesty, pylint: disable=attribute-defined-outside-init
def setup_view(self): # lint-amnesty, pylint: disable=missing-function-docstring
view = SingleThreadViewFixture(Thread(id="comment_edit_test_thread", commentable_id=self.discussion_id))
view.addResponse(
Response(id="response1"),
[Comment(id="comment_other_author", user_id="other"), Comment(id="comment_self_author", user_id=self.user_id)]) # lint-amnesty, pylint: disable=line-too-long
view.push()
@attr('a11y')
def test_page_accessibility(self):
self.setup_user()
self.setup_view()
page = self.create_single_thread_page("comment_edit_test_thread")
page.visit()
page.a11y_audit.config.set_rules({
'ignore': [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
page.a11y_audit.check_for_accessibility_errors()
@attr('a11y')
@pytest.mark.skip(reason='This test is too flaky to run at all. TNL-6215')
def test_inline_a11y(self):
"""
Tests Inline Discussion for accessibility issues.
"""
self.setup_multiple_threads(thread_count=3)
# First test the a11y of the expanded list of threads
self.discussion_page.expand_discussion()
self.discussion_page.a11y_audit.config.set_rules({
'ignore': [
'section'
]
})
self.discussion_page.a11y_audit.check_for_accessibility_errors()
# Now show the first thread and test the a11y again
self.discussion_page.show_thread(self.thread_ids[0])
self.discussion_page.a11y_audit.check_for_accessibility_errors()
# Finally show the new post form and test its a11y
self.discussion_page.click_new_post_button()
self.discussion_page.a11y_audit.check_for_accessibility_errors()
class DiscussionSearchAlertTest(UniqueCourseTest):
"""
Tests for spawning and dismissing alerts related to user search actions and their results.
"""
SEARCHED_USERNAME = "gizmo"
def setUp(self):
super().setUp()
CourseFixture(**self.course_info).install()
# first auto auth call sets up a user that we will search for in some tests
self.searched_user_id = AutoAuthPage(
self.browser,
username=self.SEARCHED_USERNAME,
course_id=self.course_id
).visit().get_user_id()
# this auto auth call creates the actual session user
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.page = DiscussionTabHomePage(self.browser, self.course_id)
self.page.visit()
@attr('a11y')
def test_page_accessibility(self):
self.page.a11y_audit.config.set_rules({
'ignore': [
'section', # TODO: AC-491
'aria-required-children', # TODO: AC-534
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
self.page.a11y_audit.check_for_accessibility_errors() | 0.345768 | 0.427875 |
import numpy as np
def integerized(sequence):
key_dict = sorted(set(sequence))
int_seq = []
for char in sequence:
to_int = key_dict.index(char)
int_seq.append(to_int)
return int_seq
def preprocess(sequences, ignoreLower=True):
upper_seq = []
len_record = []
for seq in sequences:
if ignoreLower:
seq = [x for x in seq if 'A' <= x <= 'Z']
else:
seq = seq.upper()
upper_seq.append(integerized(seq))
len_record.append(len(seq))
length_used = min(len_record)
post_seq = []
for seq in upper_seq:
seq = seq[:length_used]
post_seq.append(seq)
return post_seq
def normalize_kernel(kernel):
nkernel = np.copy(kernel)
assert nkernel.ndim == 2
assert nkernel.shape[0] == nkernel.shape[1]
for i in range(nkernel.shape[0]):
for j in range(i + 1, nkernel.shape[0]):
q = np.sqrt(nkernel[i, i] * nkernel[j, j])
if q > 0:
nkernel[i, j] /= q
nkernel[j, i] = nkernel[i, j]
np.fill_diagonal(nkernel, 1.)
return nkernel
class MismatchTrie(object):
def __init__(self, label=None, parent=None):
self.label = label
self.level = 0
self.children = {}
self.full_label = ""
self.kmers = {}
self.parent = parent
if not parent is None:
parent.add_child(self)
def is_root(self):
return self.parent is None
def is_leaf(self):
return len(self.children) == 0
def is_empty(self):
return len(self.kmers) == 0
def copy_kmers(self):
return {index: np.array(substring_pointers) for index, substring_pointers in self.kmers.items()}
def add_child(self, child):
child.kmers = self.copy_kmers()
child.level = self.level + 1
child.full_label = '%s%s' % (self.full_label, child.label)
self.children[child.label] = child
child.parent = self
def delete_child(self, child):
label = child.label if isinstance(child, MismatchTrie) else child
del self.children[label]
def compute_kmers(self, training_data, k):
for index in range(len(training_data)):
self.kmers[index] = np.array([(offset, 0) for offset in range(len(training_data[index])-k+1)])
def process_node(self, training_data, k, m):
if self.is_root():
self.compute_kmers(training_data, k)
else:
for index, substring_pointers in self.kmers.items():
substring_pointers[..., 1] += (training_data[index][
substring_pointers[..., 0] + self.level - 1
] != self.label)
self.kmers[index] = np.delete(substring_pointers,
np.nonzero(substring_pointers[..., 1] > m),
axis=0)
self.kmers = {index: substring_pointers for (
index, substring_pointers) in self.kmers.items(
) if len(substring_pointers)}
return not self.is_empty()
def update_kernel(self, kernel):
for i in self.kmers:
for j in self.kmers:
kernel[i, j] += len(self.kmers[i]) * len(self.kmers[j])
def traverse(self, training_data, l, k, m, kernel=None, kernel_update_callback=None):
if kernel is None:
kernel = np.zeros((len(training_data), len(training_data)))
n_surviving_kmers = 0
go_ahead = self.process_node(training_data, k, m)
if go_ahead:
if k == 0:
n_surviving_kmers += 1
self.update_kernel(kernel)
else:
for j in range(l):
child = MismatchTrie(label=j, parent=self)
kernel, child_n_surviving_kmers, \
child_go_ahead = child.traverse(
training_data, l, k - 1, m, kernel=kernel)
if child.is_empty():
self.delete_child(child)
n_surviving_kmers += child_n_surviving_kmers if \
child_go_ahead else 0
return kernel, n_surviving_kmers, go_ahead
class MismatchKernel(MismatchTrie):
def __init__(self, l= 4, k=None, m=None, **kwargs):
MismatchTrie.__init__(self, **kwargs)
self.l = l
self.k = k
self.m = m
def get_kernel(self, X, normalize = True, **kwargs):
self.kernel, _, _ = self.traverse(X, self.l, self.k, self.m, **kwargs)
if normalize:
self.kernel = normalize_kernel(self.kernel)
return self | utils/mismatchtree.py |
import numpy as np
def integerized(sequence):
key_dict = sorted(set(sequence))
int_seq = []
for char in sequence:
to_int = key_dict.index(char)
int_seq.append(to_int)
return int_seq
def preprocess(sequences, ignoreLower=True):
upper_seq = []
len_record = []
for seq in sequences:
if ignoreLower:
seq = [x for x in seq if 'A' <= x <= 'Z']
else:
seq = seq.upper()
upper_seq.append(integerized(seq))
len_record.append(len(seq))
length_used = min(len_record)
post_seq = []
for seq in upper_seq:
seq = seq[:length_used]
post_seq.append(seq)
return post_seq
def normalize_kernel(kernel):
nkernel = np.copy(kernel)
assert nkernel.ndim == 2
assert nkernel.shape[0] == nkernel.shape[1]
for i in range(nkernel.shape[0]):
for j in range(i + 1, nkernel.shape[0]):
q = np.sqrt(nkernel[i, i] * nkernel[j, j])
if q > 0:
nkernel[i, j] /= q
nkernel[j, i] = nkernel[i, j]
np.fill_diagonal(nkernel, 1.)
return nkernel
class MismatchTrie(object):
def __init__(self, label=None, parent=None):
self.label = label
self.level = 0
self.children = {}
self.full_label = ""
self.kmers = {}
self.parent = parent
if not parent is None:
parent.add_child(self)
def is_root(self):
return self.parent is None
def is_leaf(self):
return len(self.children) == 0
def is_empty(self):
return len(self.kmers) == 0
def copy_kmers(self):
return {index: np.array(substring_pointers) for index, substring_pointers in self.kmers.items()}
def add_child(self, child):
child.kmers = self.copy_kmers()
child.level = self.level + 1
child.full_label = '%s%s' % (self.full_label, child.label)
self.children[child.label] = child
child.parent = self
def delete_child(self, child):
label = child.label if isinstance(child, MismatchTrie) else child
del self.children[label]
def compute_kmers(self, training_data, k):
for index in range(len(training_data)):
self.kmers[index] = np.array([(offset, 0) for offset in range(len(training_data[index])-k+1)])
def process_node(self, training_data, k, m):
if self.is_root():
self.compute_kmers(training_data, k)
else:
for index, substring_pointers in self.kmers.items():
substring_pointers[..., 1] += (training_data[index][
substring_pointers[..., 0] + self.level - 1
] != self.label)
self.kmers[index] = np.delete(substring_pointers,
np.nonzero(substring_pointers[..., 1] > m),
axis=0)
self.kmers = {index: substring_pointers for (
index, substring_pointers) in self.kmers.items(
) if len(substring_pointers)}
return not self.is_empty()
def update_kernel(self, kernel):
for i in self.kmers:
for j in self.kmers:
kernel[i, j] += len(self.kmers[i]) * len(self.kmers[j])
def traverse(self, training_data, l, k, m, kernel=None, kernel_update_callback=None):
if kernel is None:
kernel = np.zeros((len(training_data), len(training_data)))
n_surviving_kmers = 0
go_ahead = self.process_node(training_data, k, m)
if go_ahead:
if k == 0:
n_surviving_kmers += 1
self.update_kernel(kernel)
else:
for j in range(l):
child = MismatchTrie(label=j, parent=self)
kernel, child_n_surviving_kmers, \
child_go_ahead = child.traverse(
training_data, l, k - 1, m, kernel=kernel)
if child.is_empty():
self.delete_child(child)
n_surviving_kmers += child_n_surviving_kmers if \
child_go_ahead else 0
return kernel, n_surviving_kmers, go_ahead
class MismatchKernel(MismatchTrie):
def __init__(self, l= 4, k=None, m=None, **kwargs):
MismatchTrie.__init__(self, **kwargs)
self.l = l
self.k = k
self.m = m
def get_kernel(self, X, normalize = True, **kwargs):
self.kernel, _, _ = self.traverse(X, self.l, self.k, self.m, **kwargs)
if normalize:
self.kernel = normalize_kernel(self.kernel)
return self | 0.432782 | 0.521227 |
import os
import cv2
import torch
import argparse
import numpy as np
from PIL import Image
from torchvision import transforms
from config import get_config
from Learner import face_learner
from data.data_pipe import get_val_pair
from mtcnn_pytorch.crop_and_aligned import mctnn_crop_face
def initialize_learner(conf, mdl_name):
learner = face_learner(conf, inference=True)
learner.load_state(conf, mdl_name, model_only=True,
from_save_folder=False, strict=True, model_atten=True)
return learner
def plotResults(conf, learner, exdir, img1, img2,
filename, dataset_name='lfw'):
transforms_mine = transforms.Compose([
transforms.Resize([112, 112]),
transforms.ToTensor(),
transforms.Normalize([.5, .5, .5], [.5, .5, .5]),
])
assert dataset_name in ['lfw', 'agedb_30', 'cfp_fp']
# dataset, dataset_issame = get_val_pair(conf.emore_folder, dataset_name)
img1 = Image.open(img1).convert('RGB')
img2 = Image.open(img2).convert('RGB')
# In fact, BGR2RGB=True turn RGB into BGR
img1 = mctnn_crop_face(img1, BGR2RGB=True)
img2 = mctnn_crop_face(img2, BGR2RGB=True)
# img1 = cv2.imread(img1)
# img2 = cv2.imread(img2)
# img1 = Image.fromarray(img1)
# img2 = Image.fromarray(img2)
img1 = transforms_mine(img1)
img2 = transforms_mine(img2)
# img1 = np.array(img1)
# img2 = np.array(img2)
# img1 = cv2.resize(img1, dim, interpolation=cv2.INTER_AREA)
# img2 = cv2.resize(img2, dim, interpolation=cv2.INTER_AREA)
# dataset = np.array([img1, img2])
# XXX It causes the meta and image returned with one reduntdant result
dataset = torch.stack([img1, img2, img1, img2])
print(dataset.size())
dataset_issame = np.array([1, 1])
img_base64, meta, xcos_score = learner.plot_Examples(conf,
dataset, dataset_issame,
nrof_folds=10, tta=False,
attention=None,
exDir=exdir,
filename=filename,
return_xcos=True)
return img_base64, meta, xcos_score
def getCroppedTensorFromFilename(filename, transform):
img = Image.open(filename).convert('RGB')
img = mctnn_crop_face(img)
img = transform(img)
return img
def getPairedTensors(query_filename, filesToCompare):
transforms_mine = transforms.Compose([
transforms.Resize([112, 112]),
transforms.ToTensor(),
transforms.Normalize([.5, .5, .5], [.5, .5, .5]),
])
image_stack = []
query_img_tensor = getCroppedTensorFromFilename(query_filename,
transforms_mine)
for filename in filesToCompare:
target_img_tensor = getCroppedTensorFromFilename(filename,
transforms_mine)
image_stack.append(query_img_tensor)
image_stack.append(target_img_tensor)
image_stack = torch.stack(image_stack)
return image_stack
def main(conf, mdl_name, exdir, dataset_name, img1, img2, filename):
print(f'>>>> Plot comparison of {img1} and {img2} '
'{mdl_name} on {dataset_name}, and save to {exdir}...')
learner = initialize_learner(conf, mdl_name)
plotResults(conf, learner, exdir, img1, img2, filename, dataset_name)
if __name__ == "__main__":
mdl_name_default = '2019-08-25-14-35_accuracy:0.9931666666666666_step:218349_None.pth'
parser = argparse.ArgumentParser(description='feature extraction')
# general
parser.add_argument('--model', default=mdl_name_default,
help='model to test')
parser.add_argument('--dataset', default='lfw',
help='plot on which dataset')
parser.add_argument('--img1', default='gakki1.jpg',
help='img to test')
parser.add_argument('--img2', default='gakki2.jpg',
help='img to test')
parser.add_argument('--filename', default='result.jpg',
help='img to test')
parser.add_argument('--exdir',
default='work_space/results/defaultPlotExDir_1124',
help='dir to save imgs')
args = parser.parse_args()
conf = get_config(training=False)
# Why bs_size can only be the number that divide 6000 well?
conf.batch_size = 200
# exdir = 'cosPatchFtWithMs1M_learnedAtten_LFW_1104_'
exdir = args.exdir
dataset_name = args.dataset
mdl_name = args.model
os.makedirs(exdir, exist_ok=True)
main(conf, mdl_name, exdir, dataset_name, args.img1, args.img2, args.filename) | plot_qualitative_results_given_2_imgs.py | import os
import cv2
import torch
import argparse
import numpy as np
from PIL import Image
from torchvision import transforms
from config import get_config
from Learner import face_learner
from data.data_pipe import get_val_pair
from mtcnn_pytorch.crop_and_aligned import mctnn_crop_face
def initialize_learner(conf, mdl_name):
learner = face_learner(conf, inference=True)
learner.load_state(conf, mdl_name, model_only=True,
from_save_folder=False, strict=True, model_atten=True)
return learner
def plotResults(conf, learner, exdir, img1, img2,
filename, dataset_name='lfw'):
transforms_mine = transforms.Compose([
transforms.Resize([112, 112]),
transforms.ToTensor(),
transforms.Normalize([.5, .5, .5], [.5, .5, .5]),
])
assert dataset_name in ['lfw', 'agedb_30', 'cfp_fp']
# dataset, dataset_issame = get_val_pair(conf.emore_folder, dataset_name)
img1 = Image.open(img1).convert('RGB')
img2 = Image.open(img2).convert('RGB')
# In fact, BGR2RGB=True turn RGB into BGR
img1 = mctnn_crop_face(img1, BGR2RGB=True)
img2 = mctnn_crop_face(img2, BGR2RGB=True)
# img1 = cv2.imread(img1)
# img2 = cv2.imread(img2)
# img1 = Image.fromarray(img1)
# img2 = Image.fromarray(img2)
img1 = transforms_mine(img1)
img2 = transforms_mine(img2)
# img1 = np.array(img1)
# img2 = np.array(img2)
# img1 = cv2.resize(img1, dim, interpolation=cv2.INTER_AREA)
# img2 = cv2.resize(img2, dim, interpolation=cv2.INTER_AREA)
# dataset = np.array([img1, img2])
# XXX It causes the meta and image returned with one reduntdant result
dataset = torch.stack([img1, img2, img1, img2])
print(dataset.size())
dataset_issame = np.array([1, 1])
img_base64, meta, xcos_score = learner.plot_Examples(conf,
dataset, dataset_issame,
nrof_folds=10, tta=False,
attention=None,
exDir=exdir,
filename=filename,
return_xcos=True)
return img_base64, meta, xcos_score
def getCroppedTensorFromFilename(filename, transform):
img = Image.open(filename).convert('RGB')
img = mctnn_crop_face(img)
img = transform(img)
return img
def getPairedTensors(query_filename, filesToCompare):
transforms_mine = transforms.Compose([
transforms.Resize([112, 112]),
transforms.ToTensor(),
transforms.Normalize([.5, .5, .5], [.5, .5, .5]),
])
image_stack = []
query_img_tensor = getCroppedTensorFromFilename(query_filename,
transforms_mine)
for filename in filesToCompare:
target_img_tensor = getCroppedTensorFromFilename(filename,
transforms_mine)
image_stack.append(query_img_tensor)
image_stack.append(target_img_tensor)
image_stack = torch.stack(image_stack)
return image_stack
def main(conf, mdl_name, exdir, dataset_name, img1, img2, filename):
print(f'>>>> Plot comparison of {img1} and {img2} '
'{mdl_name} on {dataset_name}, and save to {exdir}...')
learner = initialize_learner(conf, mdl_name)
plotResults(conf, learner, exdir, img1, img2, filename, dataset_name)
if __name__ == "__main__":
mdl_name_default = '2019-08-25-14-35_accuracy:0.9931666666666666_step:218349_None.pth'
parser = argparse.ArgumentParser(description='feature extraction')
# general
parser.add_argument('--model', default=mdl_name_default,
help='model to test')
parser.add_argument('--dataset', default='lfw',
help='plot on which dataset')
parser.add_argument('--img1', default='gakki1.jpg',
help='img to test')
parser.add_argument('--img2', default='gakki2.jpg',
help='img to test')
parser.add_argument('--filename', default='result.jpg',
help='img to test')
parser.add_argument('--exdir',
default='work_space/results/defaultPlotExDir_1124',
help='dir to save imgs')
args = parser.parse_args()
conf = get_config(training=False)
# Why bs_size can only be the number that divide 6000 well?
conf.batch_size = 200
# exdir = 'cosPatchFtWithMs1M_learnedAtten_LFW_1104_'
exdir = args.exdir
dataset_name = args.dataset
mdl_name = args.model
os.makedirs(exdir, exist_ok=True)
main(conf, mdl_name, exdir, dataset_name, args.img1, args.img2, args.filename) | 0.661267 | 0.397061 |
import random
print("This simulates f-pairs, L2-L3.")
f_his = []
f = 178
perturbation_pos_f = random.sample(range(6, f), round(f * 0.05))
perturbation_pos_f = sorted(perturbation_pos_f)
print(perturbation_pos_f)
def calculate_strategy_f(memoryA, memoryB):
t1 = [0,0,0]
t2 = [0,0,0]
for i in range(len(memoryA)):
t1[i] = memoryB[i][0]
t2[i] = memoryA[i][1]
dis1 = [t2.count(2)/3, t2.count(3)/3, t2.count(4)/3]
#print(dis1)
dis2 = [t1.count(1)/3, t1.count(2)/3, t1.count(3)/3]
#print(dis2)
payoff1_A = 0.1 * dis1[0] + 0.1 * dis1[1] + 0.1 * dis1[2]
payoff2_A = 1.2 * dis1[0] + 0.2 * dis1[1] + 0.2 * dis1[2]
payoff3_A = 0 * dis1[0] + 1 * dis1[1] + 0 * dis1[2]
if payoff1_A > payoff2_A and payoff1_A > payoff3_A:
n = 1
if payoff2_A >= payoff1_A and payoff2_A >= payoff3_A: #>= implements the tie-breaker
n = 2
if payoff3_A > payoff1_A and payoff3_A > payoff2_A:
n = 3
payoff2_B = 0 * dis2[0] + 1 * dis2[1] + 0 * dis2[2]
payoff3_B = 0.2 * dis2[0] + 0.2 * dis2[1] + 1.2 * dis2[2]
payoff4_B = 0.1 * dis2[0] + 0.1 * dis2[1] + 0.1 * dis2[2]
if payoff2_B > payoff3_B and payoff2_B > payoff4_B:
m = 2
if payoff3_B >= payoff2_B and payoff3_B >= payoff4_B:
m = 3
if payoff4_B > payoff2_B and payoff4_B > payoff3_B:
m = 4
return [n,m]
# testing a little bit
#memoryA = [[1, 2], [1, 2], [1, 3]]
#memoryB = [[1, 4], [1, 4], [1, 4]]
#print(calculate_strategy_f(memoryA, memoryB))
def random_pick_f():
n = random.randint(1,3)
m = random.randint(2,4)
return [n,m]
f_his = []
temp = [[0 for col in range(2)] for row in range(5)]
for i in range(5):
temp[i][0] = random.randint(1, 3)
temp[i][1] = random.randint(2, 4)
for item in temp:
f_his.append(item)
#print(b_his)
memoryA = []
memoryB = []
#form history iteratively
for i in range(5, 178):
memoryA = random.sample(f_his,3)
memoryB = random.sample(f_his,3)
if i not in perturbation_pos_f:
k = calculate_strategy_f(memoryA, memoryB)
f_his.append(k)
else:
k = random_pick_f()
f_his.append(k)
if len(f_his)!= 178:
print('ERROR')
else:
print(f_his) | testpayoff.py |
import random
print("This simulates f-pairs, L2-L3.")
f_his = []
f = 178
perturbation_pos_f = random.sample(range(6, f), round(f * 0.05))
perturbation_pos_f = sorted(perturbation_pos_f)
print(perturbation_pos_f)
def calculate_strategy_f(memoryA, memoryB):
t1 = [0,0,0]
t2 = [0,0,0]
for i in range(len(memoryA)):
t1[i] = memoryB[i][0]
t2[i] = memoryA[i][1]
dis1 = [t2.count(2)/3, t2.count(3)/3, t2.count(4)/3]
#print(dis1)
dis2 = [t1.count(1)/3, t1.count(2)/3, t1.count(3)/3]
#print(dis2)
payoff1_A = 0.1 * dis1[0] + 0.1 * dis1[1] + 0.1 * dis1[2]
payoff2_A = 1.2 * dis1[0] + 0.2 * dis1[1] + 0.2 * dis1[2]
payoff3_A = 0 * dis1[0] + 1 * dis1[1] + 0 * dis1[2]
if payoff1_A > payoff2_A and payoff1_A > payoff3_A:
n = 1
if payoff2_A >= payoff1_A and payoff2_A >= payoff3_A: #>= implements the tie-breaker
n = 2
if payoff3_A > payoff1_A and payoff3_A > payoff2_A:
n = 3
payoff2_B = 0 * dis2[0] + 1 * dis2[1] + 0 * dis2[2]
payoff3_B = 0.2 * dis2[0] + 0.2 * dis2[1] + 1.2 * dis2[2]
payoff4_B = 0.1 * dis2[0] + 0.1 * dis2[1] + 0.1 * dis2[2]
if payoff2_B > payoff3_B and payoff2_B > payoff4_B:
m = 2
if payoff3_B >= payoff2_B and payoff3_B >= payoff4_B:
m = 3
if payoff4_B > payoff2_B and payoff4_B > payoff3_B:
m = 4
return [n,m]
# testing a little bit
#memoryA = [[1, 2], [1, 2], [1, 3]]
#memoryB = [[1, 4], [1, 4], [1, 4]]
#print(calculate_strategy_f(memoryA, memoryB))
def random_pick_f():
n = random.randint(1,3)
m = random.randint(2,4)
return [n,m]
f_his = []
temp = [[0 for col in range(2)] for row in range(5)]
for i in range(5):
temp[i][0] = random.randint(1, 3)
temp[i][1] = random.randint(2, 4)
for item in temp:
f_his.append(item)
#print(b_his)
memoryA = []
memoryB = []
#form history iteratively
for i in range(5, 178):
memoryA = random.sample(f_his,3)
memoryB = random.sample(f_his,3)
if i not in perturbation_pos_f:
k = calculate_strategy_f(memoryA, memoryB)
f_his.append(k)
else:
k = random_pick_f()
f_his.append(k)
if len(f_his)!= 178:
print('ERROR')
else:
print(f_his) | 0.173288 | 0.515986 |
from collections import defaultdict
rules = defaultdict(list)
rev_rules = defaultdict(list)
with open('input') as f:
rep_text = f.read().splitlines(keepends=False)
calibration_molecule = rep_text[-1]
rep_text = rep_text[:-1]
for line in rep_text:
if len(line.strip()) > 0:
k, v = line.split(' => ')
rules[k].append(v)
rev_rules[v].append(k)
products = set()
for n in range(len(calibration_molecule)):
a, b = calibration_molecule[:n], calibration_molecule[n:]
for key, rep in rules.items():
if b.startswith(key):
c = b[len(key):]
for r in rep:
products.add(f'{a}{r}{c}')
print(f'Part 1: {len(products)}')
# Part 1: 576
total_elements = sum(1 for c in calibration_molecule if c.isupper())
total_open_brackets = calibration_molecule.count('Rn')
total_close_brackets = calibration_molecule.count('Ar')
total_commas = calibration_molecule.count('Y')
print(f'Part 2: {total_elements - total_open_brackets - total_close_brackets - total_commas * 2 - 1}')
'''
Al => ThF ThF A => RF
Al => ThRnFAr Th(F) A => R(F)
B => BCa BCa B => BD
B => TiB TiB B => TB
B => TiRnFAr Ti(F) B => T(F)
Ca => CaCa CaCa D => DD
Ca => PB PB D => PB
Ca => PRnFAr P(F) D => P(F)
Ca => SiRnFYFAr Si(F,F) D => S(F,F)
Ca => SiRnMgAr Si(Mg) D => S(M)
Ca => SiTh SiTh D => SR
F => CaF CaF F => DF
F => PMg PMg F => PM
F => SiAl SiAl F => SA
H => CRnAlAr C(Al) H => C(A)
H => CRnFYFYFAr C(F,F,F) H => C(F,F,F)
H => CRnFYMgAr C(F,Mg) H => C(F,M)
H => CRnMgYFAr C(Mg,F) H => C(M,F)
H => HCa HCa H => HD
H => NRnFYFAr N(F,F) H => N(F,F)
H => NRnMgAr N(Mg) H => N(M)
H => NTh NTh H => NR
H => OB OB H => OB
H => ORnFAr O(F) H => O(F)
Mg => BF BF M => BF
Mg => TiMg TiMg M => TM
N => CRnFAr C(F) N => C(F)
N => HSi HSi N => HS
O => CRnFYFAr C(F,F) O => C(F,F)
O => CRnMgAr C(Mg) O => C(M)
O => HP HP O => HP
O => NRnFAr N(F) O => N(F)
O => OTi OTi O => OT
P => CaP CaP P => DP
P => PTi PTi P => PT
P => SiRnFAr Si(F) P => S(F)
Si => CaSi CaSi S => DS
Th => ThCa ThCa R => RD
Ti => BP BP T => BP
Ti => TiTi TiTi T => TT
e => HF HF e => HF
e => NAl NAl e => NA
e => OMg OMg e => OM
ORnPBPMgArCaCaCaSiThCaCaSiThCaCaPBSiRnFArRnFArCaCaSiThCaCaSiThCaCaCaCaCaCaSiRnFYFArSiRnMgArCaSiRnPTiTiBFYPBFArSiRnCaSiRnTiRnFArSiAlArPTiBPTiRnCaSiAlArCaPTiTiBPMgYFArPTiRnFArSiRnCaCaFArRnCaFArCaSiRnSiRnMgArFYCaSiRnMgArCaCaSiThPRnFArPBCaSiRnMgArCaCaSiThCaSiRnTiMgArFArSiThSiThCaCaSiRnMgArCaCaSiRnFArTiBPTiRnCaSiAlArCaPTiRnFArPBPBCaCaSiThCaPBSiThPRnFArSiThCaSiThCaSiThCaPTiBSiRnFYFArCaCaPRnFArPBCaCaPBSiRnTiRnFArCaPRnFArSiRnCaCaCaSiThCaRnCaFArYCaSiRnFArBCaCaCaSiThFArPBFArCaSiRnFArRnCaCaCaFArSiRnFArTiRnPMgArF
O(PBPM)DDDSRDDSRDDPBS(F)(F)DDSRDDSRDDDDDDS(F,F)S(M)DS(PTTBF,PBF(S)DS(T(F)SA)PTBPT(DSA)DPTTBPM,F)PT(F)S(DDF)(DF)DS(S(M)F,DS(M)DDSRP(F)PBDS(M)DDSRDS(TM)F)SRSRDDS(M)DDS(F)TBPT(DSA)DPT(F)PBPBDDSRDPBSRP(F)SRDSRDSRDPTBS(F,F)DDP(F)PBDDPBS(T(F)DP(F)S(DDDSRD(DF),DS(F)BDDDSRF)PBF)DS(F)(DDDF)S(F)T(PM)F
''' | 2015/19/rednose_reactor.py | from collections import defaultdict
rules = defaultdict(list)
rev_rules = defaultdict(list)
with open('input') as f:
rep_text = f.read().splitlines(keepends=False)
calibration_molecule = rep_text[-1]
rep_text = rep_text[:-1]
for line in rep_text:
if len(line.strip()) > 0:
k, v = line.split(' => ')
rules[k].append(v)
rev_rules[v].append(k)
products = set()
for n in range(len(calibration_molecule)):
a, b = calibration_molecule[:n], calibration_molecule[n:]
for key, rep in rules.items():
if b.startswith(key):
c = b[len(key):]
for r in rep:
products.add(f'{a}{r}{c}')
print(f'Part 1: {len(products)}')
# Part 1: 576
total_elements = sum(1 for c in calibration_molecule if c.isupper())
total_open_brackets = calibration_molecule.count('Rn')
total_close_brackets = calibration_molecule.count('Ar')
total_commas = calibration_molecule.count('Y')
print(f'Part 2: {total_elements - total_open_brackets - total_close_brackets - total_commas * 2 - 1}')
'''
Al => ThF ThF A => RF
Al => ThRnFAr Th(F) A => R(F)
B => BCa BCa B => BD
B => TiB TiB B => TB
B => TiRnFAr Ti(F) B => T(F)
Ca => CaCa CaCa D => DD
Ca => PB PB D => PB
Ca => PRnFAr P(F) D => P(F)
Ca => SiRnFYFAr Si(F,F) D => S(F,F)
Ca => SiRnMgAr Si(Mg) D => S(M)
Ca => SiTh SiTh D => SR
F => CaF CaF F => DF
F => PMg PMg F => PM
F => SiAl SiAl F => SA
H => CRnAlAr C(Al) H => C(A)
H => CRnFYFYFAr C(F,F,F) H => C(F,F,F)
H => CRnFYMgAr C(F,Mg) H => C(F,M)
H => CRnMgYFAr C(Mg,F) H => C(M,F)
H => HCa HCa H => HD
H => NRnFYFAr N(F,F) H => N(F,F)
H => NRnMgAr N(Mg) H => N(M)
H => NTh NTh H => NR
H => OB OB H => OB
H => ORnFAr O(F) H => O(F)
Mg => BF BF M => BF
Mg => TiMg TiMg M => TM
N => CRnFAr C(F) N => C(F)
N => HSi HSi N => HS
O => CRnFYFAr C(F,F) O => C(F,F)
O => CRnMgAr C(Mg) O => C(M)
O => HP HP O => HP
O => NRnFAr N(F) O => N(F)
O => OTi OTi O => OT
P => CaP CaP P => DP
P => PTi PTi P => PT
P => SiRnFAr Si(F) P => S(F)
Si => CaSi CaSi S => DS
Th => ThCa ThCa R => RD
Ti => BP BP T => BP
Ti => TiTi TiTi T => TT
e => HF HF e => HF
e => NAl NAl e => NA
e => OMg OMg e => OM
ORnPBPMgArCaCaCaSiThCaCaSiThCaCaPBSiRnFArRnFArCaCaSiThCaCaSiThCaCaCaCaCaCaSiRnFYFArSiRnMgArCaSiRnPTiTiBFYPBFArSiRnCaSiRnTiRnFArSiAlArPTiBPTiRnCaSiAlArCaPTiTiBPMgYFArPTiRnFArSiRnCaCaFArRnCaFArCaSiRnSiRnMgArFYCaSiRnMgArCaCaSiThPRnFArPBCaSiRnMgArCaCaSiThCaSiRnTiMgArFArSiThSiThCaCaSiRnMgArCaCaSiRnFArTiBPTiRnCaSiAlArCaPTiRnFArPBPBCaCaSiThCaPBSiThPRnFArSiThCaSiThCaSiThCaPTiBSiRnFYFArCaCaPRnFArPBCaCaPBSiRnTiRnFArCaPRnFArSiRnCaCaCaSiThCaRnCaFArYCaSiRnFArBCaCaCaSiThFArPBFArCaSiRnFArRnCaCaCaFArSiRnFArTiRnPMgArF
O(PBPM)DDDSRDDSRDDPBS(F)(F)DDSRDDSRDDDDDDS(F,F)S(M)DS(PTTBF,PBF(S)DS(T(F)SA)PTBPT(DSA)DPTTBPM,F)PT(F)S(DDF)(DF)DS(S(M)F,DS(M)DDSRP(F)PBDS(M)DDSRDS(TM)F)SRSRDDS(M)DDS(F)TBPT(DSA)DPT(F)PBPBDDSRDPBSRP(F)SRDSRDSRDPTBS(F,F)DDP(F)PBDDPBS(T(F)DP(F)S(DDDSRD(DF),DS(F)BDDDSRF)PBF)DS(F)(DDDF)S(F)T(PM)F
''' | 0.252845 | 0.140661 |
from airflow.models.baseoperator import BaseOperator
from airflow.hooks.postgres_hook import PostgresHook
from airflow.utils.decorators import apply_defaults
from airflow.contrib.hooks.aws_hook import AwsHook
class StageTablesOperator(BaseOperator):
"""
@description:
This operator copies data from a specified S3 bucket to Amazons Redshift.
@params:
redshift_conn_id (STR): Redshift Connection ID created in Airflow.
aws_connection_id (STR): AWS connection ID created in Airflow.
table (STR): The name of the table the data in S3 should be copied to.
s3_bucket (STR): Created S3 bucket name
s3_key (STR): Folder in the S3 bucket that contains data to be transfered to Redshift
ignpre_headers (INT): Specifies if these dataset contain headers. 1 for True. 0 for False
delimeter (CHAR): Dataset separator. Aplicable with files in CSV format.
data_format (STR): Format the data is saved in S3. E.g. CSV, PARQUET, JSON.
"""
def __init__(
self,
redshift_conn_id = "redshift_conn_id",
aws_connection_id = "aws_conn_id",
table = "",
s3_bucket = "",
s3_key = "",
ignore_headers = 1,
delimeter = ",",
data_format = "csv",
*args, **kwargs
):
super(StageTablesOperator, self).__init__(*args, **kwargs)
self.redshift_conn_id = redshift_conn_id
self.aws_connection_id = aws_connection_id
self.table = table
self.s3_bucket = s3_bucket
self.s3_key = s3_key
self.ignore_headers = ignore_headers
self.delimeter = delimeter
self.data_format = data_format
def execute(self, context):
self.log.info("Fetching credentials")
aws_hook = AwsHook(self.aws_connection_id, client_type='s3')
aws_credentials = aws_hook.get_credentials()
redshift_conn = PostgresHook(postgres_conn_id=self.redshift_conn_id)
rendered_key = self.s3_key.format(**context)
s3_bucket_uri = f"s3://{self.s3_bucket}/{rendered_key}"
formatted_sql = f"""
COPY {self.table}
FROM '{s3_bucket_uri}/'
ACCESS_KEY_ID '{aws_credentials.access_key}'
SECRET_ACCESS_KEY '{aws_credentials.secret_key}'
FORMAT AS {self.data_format}
"""
self.log.info(f"Copying {self.table} data from s3 to redshift")
redshift_conn.run(formatted_sql)
return 'Done' | airflow/plugins/operators/StagTablesOperator.py | from airflow.models.baseoperator import BaseOperator
from airflow.hooks.postgres_hook import PostgresHook
from airflow.utils.decorators import apply_defaults
from airflow.contrib.hooks.aws_hook import AwsHook
class StageTablesOperator(BaseOperator):
"""
@description:
This operator copies data from a specified S3 bucket to Amazons Redshift.
@params:
redshift_conn_id (STR): Redshift Connection ID created in Airflow.
aws_connection_id (STR): AWS connection ID created in Airflow.
table (STR): The name of the table the data in S3 should be copied to.
s3_bucket (STR): Created S3 bucket name
s3_key (STR): Folder in the S3 bucket that contains data to be transfered to Redshift
ignpre_headers (INT): Specifies if these dataset contain headers. 1 for True. 0 for False
delimeter (CHAR): Dataset separator. Aplicable with files in CSV format.
data_format (STR): Format the data is saved in S3. E.g. CSV, PARQUET, JSON.
"""
def __init__(
self,
redshift_conn_id = "redshift_conn_id",
aws_connection_id = "aws_conn_id",
table = "",
s3_bucket = "",
s3_key = "",
ignore_headers = 1,
delimeter = ",",
data_format = "csv",
*args, **kwargs
):
super(StageTablesOperator, self).__init__(*args, **kwargs)
self.redshift_conn_id = redshift_conn_id
self.aws_connection_id = aws_connection_id
self.table = table
self.s3_bucket = s3_bucket
self.s3_key = s3_key
self.ignore_headers = ignore_headers
self.delimeter = delimeter
self.data_format = data_format
def execute(self, context):
self.log.info("Fetching credentials")
aws_hook = AwsHook(self.aws_connection_id, client_type='s3')
aws_credentials = aws_hook.get_credentials()
redshift_conn = PostgresHook(postgres_conn_id=self.redshift_conn_id)
rendered_key = self.s3_key.format(**context)
s3_bucket_uri = f"s3://{self.s3_bucket}/{rendered_key}"
formatted_sql = f"""
COPY {self.table}
FROM '{s3_bucket_uri}/'
ACCESS_KEY_ID '{aws_credentials.access_key}'
SECRET_ACCESS_KEY '{aws_credentials.secret_key}'
FORMAT AS {self.data_format}
"""
self.log.info(f"Copying {self.table} data from s3 to redshift")
redshift_conn.run(formatted_sql)
return 'Done' | 0.764892 | 0.202246 |
import inspect
import logging
import os
import requests
from requests import HTTPError
from requests.adapters import HTTPAdapter
from urllib3 import Retry
DEFAULT_TIMEOUT = 60 # seconds
class TimeoutHTTPAdapter(HTTPAdapter):
def __init__(self, *args, **kwargs):
self.timeout = DEFAULT_TIMEOUT
if "timeout" in kwargs:
self.timeout = kwargs["timeout"]
del kwargs["timeout"]
super().__init__(*args, **kwargs)
def send(self, request, **kwargs):
timeout = kwargs.get("timeout")
if timeout is None:
kwargs["timeout"] = self.timeout
return super().send(request, **kwargs)
class Util:
# Copied from the google library.
@staticmethod
def raise_detailed_error(request_object):
try:
if request_object.status_code not in [200, 201]:
print(request_object.text)
request_object.raise_for_status()
except HTTPError as e:
raise HTTPError(e, request_object.text)
@staticmethod
def mount_standard_session(session: requests.Session, retry_post=False):
# Remove previously mounted sessions.
session.close()
logging.basicConfig(level=logging.INFO)
# NOTE: We often use POST for "READ" operations. Can we retry on those specifically?
methods = ['HEAD', 'GET', 'OPTIONS', 'TRACE', 'PUT', 'PATCH', 'DELETE']
if retry_post:
methods.append('POST')
retries = Retry(total=5,
backoff_factor=0,
status_forcelist=[
100, 101, 102, 103, 104,
404, 408, 429,
500, 502, 503, 504
],
connect=5,
read=5,
method_whitelist=methods
)
# https://findwork.dev/blog/advanced-usage-python-requests-timeouts-retries-hooks/
session.mount('http://', TimeoutHTTPAdapter(max_retries=retries))
session.mount('https://', TimeoutHTTPAdapter(max_retries=retries))
return session
@staticmethod
def get_executed_file_location():
# @see https://stackoverflow.com/a/44592299
filename = inspect.getframeinfo(inspect.currentframe()).filename
return os.path.dirname(os.path.abspath(filename)) | library/util.py | import inspect
import logging
import os
import requests
from requests import HTTPError
from requests.adapters import HTTPAdapter
from urllib3 import Retry
DEFAULT_TIMEOUT = 60 # seconds
class TimeoutHTTPAdapter(HTTPAdapter):
def __init__(self, *args, **kwargs):
self.timeout = DEFAULT_TIMEOUT
if "timeout" in kwargs:
self.timeout = kwargs["timeout"]
del kwargs["timeout"]
super().__init__(*args, **kwargs)
def send(self, request, **kwargs):
timeout = kwargs.get("timeout")
if timeout is None:
kwargs["timeout"] = self.timeout
return super().send(request, **kwargs)
class Util:
# Copied from the google library.
@staticmethod
def raise_detailed_error(request_object):
try:
if request_object.status_code not in [200, 201]:
print(request_object.text)
request_object.raise_for_status()
except HTTPError as e:
raise HTTPError(e, request_object.text)
@staticmethod
def mount_standard_session(session: requests.Session, retry_post=False):
# Remove previously mounted sessions.
session.close()
logging.basicConfig(level=logging.INFO)
# NOTE: We often use POST for "READ" operations. Can we retry on those specifically?
methods = ['HEAD', 'GET', 'OPTIONS', 'TRACE', 'PUT', 'PATCH', 'DELETE']
if retry_post:
methods.append('POST')
retries = Retry(total=5,
backoff_factor=0,
status_forcelist=[
100, 101, 102, 103, 104,
404, 408, 429,
500, 502, 503, 504
],
connect=5,
read=5,
method_whitelist=methods
)
# https://findwork.dev/blog/advanced-usage-python-requests-timeouts-retries-hooks/
session.mount('http://', TimeoutHTTPAdapter(max_retries=retries))
session.mount('https://', TimeoutHTTPAdapter(max_retries=retries))
return session
@staticmethod
def get_executed_file_location():
# @see https://stackoverflow.com/a/44592299
filename = inspect.getframeinfo(inspect.currentframe()).filename
return os.path.dirname(os.path.abspath(filename)) | 0.403449 | 0.050941 |
import numpy as np
BATCHSIZE = 1000
class Evaluator(object):
def __init__(self, metric, nbest=None, filtered=False, whole_graph=None):
assert metric in ['mrr', 'hits', 'all'], 'Invalid metric: {}'.format(metric)
if metric == 'hits':
assert nbest, 'Please indicate n-best in using hits'
if filtered:
assert whole_graph, 'If use filtered metric, Please indicate whole graph'
self.all_graph = whole_graph
self.metric = metric
self.nbest = nbest
self.filtered = filtered
self.batchsize = BATCHSIZE
self.ress = []
self.id2sub_list = []
self.id2obj_list = []
self.sr2o = {}
self.ro2s = {}
def run(self, model, dataset):
if self.metric == 'mrr':
res = self.cal_mrr(model, dataset)
elif self.metric == 'hits':
res = self.cal_hits(model, dataset, self.nbest)
else:
raise NotImplementedError
self.ress.append(res)
return res
def run_all_matric(self, model, dataset):
"""
calculating MRR, Hits@1,3,10 (raw and filter)
"""
n_sample = len(dataset)
sum_rr_raw = 0.
sum_rr_flt = 0.
n_corr_h1_raw = 0
n_corr_h1_flt = 0
n_corr_h3_raw = 0
n_corr_h3_flt = 0
n_corr_h10_raw = 0
n_corr_h10_flt = 0
start_id = 0
for samples in dataset.batch_iter(self.batchsize, rand_flg=False):
subs, rels, objs = samples[:, 0], samples[:, 1], samples[:, 2]
ids = np.arange(start_id, start_id+len(samples))
# TODO: partitioned calculation
# search objects
raw_scores = model.cal_scores(subs, rels)
raw_ranks = self.cal_rank(raw_scores, objs)
sum_rr_raw += sum(float(1/rank) for rank in raw_ranks)
n_corr_h1_raw += sum(1 for rank in raw_ranks if rank <= 1)
n_corr_h3_raw += sum(1 for rank in raw_ranks if rank <= 3)
n_corr_h10_raw += sum(1 for rank in raw_ranks if rank <= 10)
# filter
if self.filtered:
flt_scores = self.cal_filtered_score_fast(subs, rels, objs, ids, raw_scores)
flt_ranks = self.cal_rank(flt_scores, objs)
sum_rr_flt += sum(float(1/rank) for rank in flt_ranks)
n_corr_h1_flt += sum(1 for rank in flt_ranks if rank <=1)
n_corr_h3_flt += sum(1 for rank in flt_ranks if rank <=3)
n_corr_h10_flt += sum(1 for rank in flt_ranks if rank <=10)
# search subjects
raw_scores_inv = model.cal_scores_inv(rels, objs)
raw_ranks_inv = self.cal_rank(raw_scores_inv, subs)
sum_rr_raw += sum(float(1/rank) for rank in raw_ranks_inv)
n_corr_h1_raw += sum(1 for rank in raw_ranks_inv if rank <= 1)
n_corr_h3_raw += sum(1 for rank in raw_ranks_inv if rank <= 3)
n_corr_h10_raw += sum(1 for rank in raw_ranks_inv if rank <= 10)
# filter
if self.filtered:
flt_scores_inv = self.cal_filtered_score_inv_fast(subs, rels, objs, ids, raw_scores_inv)
flt_ranks_inv = self.cal_rank(flt_scores_inv, subs)
sum_rr_flt += sum(float(1/rank) for rank in flt_ranks_inv)
n_corr_h1_flt += sum(1 for rank in flt_ranks_inv if rank <= 1)
n_corr_h3_flt += sum(1 for rank in flt_ranks_inv if rank <= 3)
n_corr_h10_flt += sum(1 for rank in flt_ranks_inv if rank <= 10)
start_id += len(samples)
return {'MRR': sum_rr_raw/n_sample/2,
'Hits@1': n_corr_h1_raw/n_sample/2,
'Hits@3': n_corr_h3_raw/n_sample/2,
'Hits@10': n_corr_h10_raw/n_sample/2,
'MRR(filter)': sum_rr_flt/n_sample/2,
'Hits@1(filter)': n_corr_h1_flt/n_sample/2,
'Hits@3(filter)': n_corr_h3_flt/n_sample/2,
'Hits@10(filter)': n_corr_h10_flt/n_sample/2}
def cal_mrr(self, model, dataset):
n_sample = len(dataset)
sum_rr = 0.
start_id = 0
for samples in dataset.batch_iter(self.batchsize, rand_flg=False):
subs, rels, objs = samples[:, 0], samples[:, 1], samples[:, 2]
ids = np.arange(start_id, start_id+len(samples))
scores = model.cal_scores(subs, rels)
if self.filtered:
scores = self.cal_filtered_score_fast(subs, rels, objs, ids, scores)
ranks1 = self.cal_rank(scores, objs)
scores = model.cal_scores_inv(rels, objs)
if self.filtered:
scores = self.cal_filtered_score_inv_fast(subs, rels, objs, ids, scores)
ranks2 = self.cal_rank(scores, subs)
sum_rr += sum(float(1/rank) for rank in ranks1 + ranks2)
start_id += len(samples)
return float(sum_rr/n_sample/2)
def cal_hits(self, model, dataset, nbest):
n_sample = len(dataset)
n_corr = 0
start_id = 0
for samples in dataset.batch_iter(self.batchsize, rand_flg=False):
subs, rels, objs = samples[:, 0], samples[:, 1], samples[:, 2]
ids = np.arange(start_id, start_id+len(samples))
scores = model.cal_scores(subs, rels)
if self.filtered:
scores = self.cal_filtered_score_fast(subs, rels, objs, ids, scores)
res = np.flip(np.argsort(scores), 1)[:, :nbest]
n_corr += sum(1 for i in range(len(objs)) if objs[i] in res[i])
scores = model.cal_scores_inv(rels, objs)
if self.filtered:
scores = self.cal_filtered_score_inv_fast(subs, rels, objs, ids, scores)
res = np.flip(np.argsort(scores), 1)
n_corr += sum(1 for i in range(len(subs)) if subs[i] in res[i])
start_id += len(samples)
return float(n_corr/n_sample/2)
def cal_filtered_score_fast(self, subs, rels, objs, ids, raw_scores, metric='sim'):
assert metric in ['sim', 'dist']
new_scores = []
for s, r, o, i, score in zip(subs, rels, objs, ids, raw_scores):
true_os = self.id2obj_list[i]
true_os_rm_o = np.delete(true_os, np.where(true_os == o))
if metric == 'sim':
score[true_os_rm_o] = -np.inf
else:
score[true_os_rm_o] = np.inf
new_scores.append(score)
return new_scores
def cal_filtered_score_inv_fast(self, subs, rels, objs, ids, raw_scores, metric='sim'):
assert metric in ['sim', 'dist']
new_scores = []
for s, r, o, i, score in zip(subs, rels, objs, ids, raw_scores):
true_ss = self.id2sub_list[i]
true_ss_rm_s = np.delete(true_ss, np.where(true_ss==s))
if metric == 'sim':
score[true_ss_rm_s] = -np.inf
else:
score[true_ss_rm_s] = np.inf
new_scores.append(score)
return new_scores
def cal_rank(self, score_mat, ents):
return [np.sum(score >= score[e]) for score, e in zip(score_mat, ents)]
def get_best_info(self):
if self.metric == 'mrr' or self.metric == 'hits' or self.metric == 'acc': # higher value is better
best_val = max(self.ress)
elif self.metric == 'mr':
best_val = min(self.ress)
else:
raise ValueError('Invalid')
best_epoch = self.ress.index(best_val) + 1
return best_epoch, best_val
def prepare_valid(self, dataset):
for i in range(len(dataset)):
s, r, o = dataset[i]
os = self.all_graph.search_obj_id(s, r)
ss = self.all_graph.search_sub_id(r, o)
self.id2obj_list.append(os)
self.id2sub_list.append(ss)
self.sr2o[(s, r)] = os
self.ro2s[(r, o)] = ss | src/processors/evaluator.py | import numpy as np
BATCHSIZE = 1000
class Evaluator(object):
def __init__(self, metric, nbest=None, filtered=False, whole_graph=None):
assert metric in ['mrr', 'hits', 'all'], 'Invalid metric: {}'.format(metric)
if metric == 'hits':
assert nbest, 'Please indicate n-best in using hits'
if filtered:
assert whole_graph, 'If use filtered metric, Please indicate whole graph'
self.all_graph = whole_graph
self.metric = metric
self.nbest = nbest
self.filtered = filtered
self.batchsize = BATCHSIZE
self.ress = []
self.id2sub_list = []
self.id2obj_list = []
self.sr2o = {}
self.ro2s = {}
def run(self, model, dataset):
if self.metric == 'mrr':
res = self.cal_mrr(model, dataset)
elif self.metric == 'hits':
res = self.cal_hits(model, dataset, self.nbest)
else:
raise NotImplementedError
self.ress.append(res)
return res
def run_all_matric(self, model, dataset):
"""
calculating MRR, Hits@1,3,10 (raw and filter)
"""
n_sample = len(dataset)
sum_rr_raw = 0.
sum_rr_flt = 0.
n_corr_h1_raw = 0
n_corr_h1_flt = 0
n_corr_h3_raw = 0
n_corr_h3_flt = 0
n_corr_h10_raw = 0
n_corr_h10_flt = 0
start_id = 0
for samples in dataset.batch_iter(self.batchsize, rand_flg=False):
subs, rels, objs = samples[:, 0], samples[:, 1], samples[:, 2]
ids = np.arange(start_id, start_id+len(samples))
# TODO: partitioned calculation
# search objects
raw_scores = model.cal_scores(subs, rels)
raw_ranks = self.cal_rank(raw_scores, objs)
sum_rr_raw += sum(float(1/rank) for rank in raw_ranks)
n_corr_h1_raw += sum(1 for rank in raw_ranks if rank <= 1)
n_corr_h3_raw += sum(1 for rank in raw_ranks if rank <= 3)
n_corr_h10_raw += sum(1 for rank in raw_ranks if rank <= 10)
# filter
if self.filtered:
flt_scores = self.cal_filtered_score_fast(subs, rels, objs, ids, raw_scores)
flt_ranks = self.cal_rank(flt_scores, objs)
sum_rr_flt += sum(float(1/rank) for rank in flt_ranks)
n_corr_h1_flt += sum(1 for rank in flt_ranks if rank <=1)
n_corr_h3_flt += sum(1 for rank in flt_ranks if rank <=3)
n_corr_h10_flt += sum(1 for rank in flt_ranks if rank <=10)
# search subjects
raw_scores_inv = model.cal_scores_inv(rels, objs)
raw_ranks_inv = self.cal_rank(raw_scores_inv, subs)
sum_rr_raw += sum(float(1/rank) for rank in raw_ranks_inv)
n_corr_h1_raw += sum(1 for rank in raw_ranks_inv if rank <= 1)
n_corr_h3_raw += sum(1 for rank in raw_ranks_inv if rank <= 3)
n_corr_h10_raw += sum(1 for rank in raw_ranks_inv if rank <= 10)
# filter
if self.filtered:
flt_scores_inv = self.cal_filtered_score_inv_fast(subs, rels, objs, ids, raw_scores_inv)
flt_ranks_inv = self.cal_rank(flt_scores_inv, subs)
sum_rr_flt += sum(float(1/rank) for rank in flt_ranks_inv)
n_corr_h1_flt += sum(1 for rank in flt_ranks_inv if rank <= 1)
n_corr_h3_flt += sum(1 for rank in flt_ranks_inv if rank <= 3)
n_corr_h10_flt += sum(1 for rank in flt_ranks_inv if rank <= 10)
start_id += len(samples)
return {'MRR': sum_rr_raw/n_sample/2,
'Hits@1': n_corr_h1_raw/n_sample/2,
'Hits@3': n_corr_h3_raw/n_sample/2,
'Hits@10': n_corr_h10_raw/n_sample/2,
'MRR(filter)': sum_rr_flt/n_sample/2,
'Hits@1(filter)': n_corr_h1_flt/n_sample/2,
'Hits@3(filter)': n_corr_h3_flt/n_sample/2,
'Hits@10(filter)': n_corr_h10_flt/n_sample/2}
def cal_mrr(self, model, dataset):
n_sample = len(dataset)
sum_rr = 0.
start_id = 0
for samples in dataset.batch_iter(self.batchsize, rand_flg=False):
subs, rels, objs = samples[:, 0], samples[:, 1], samples[:, 2]
ids = np.arange(start_id, start_id+len(samples))
scores = model.cal_scores(subs, rels)
if self.filtered:
scores = self.cal_filtered_score_fast(subs, rels, objs, ids, scores)
ranks1 = self.cal_rank(scores, objs)
scores = model.cal_scores_inv(rels, objs)
if self.filtered:
scores = self.cal_filtered_score_inv_fast(subs, rels, objs, ids, scores)
ranks2 = self.cal_rank(scores, subs)
sum_rr += sum(float(1/rank) for rank in ranks1 + ranks2)
start_id += len(samples)
return float(sum_rr/n_sample/2)
def cal_hits(self, model, dataset, nbest):
n_sample = len(dataset)
n_corr = 0
start_id = 0
for samples in dataset.batch_iter(self.batchsize, rand_flg=False):
subs, rels, objs = samples[:, 0], samples[:, 1], samples[:, 2]
ids = np.arange(start_id, start_id+len(samples))
scores = model.cal_scores(subs, rels)
if self.filtered:
scores = self.cal_filtered_score_fast(subs, rels, objs, ids, scores)
res = np.flip(np.argsort(scores), 1)[:, :nbest]
n_corr += sum(1 for i in range(len(objs)) if objs[i] in res[i])
scores = model.cal_scores_inv(rels, objs)
if self.filtered:
scores = self.cal_filtered_score_inv_fast(subs, rels, objs, ids, scores)
res = np.flip(np.argsort(scores), 1)
n_corr += sum(1 for i in range(len(subs)) if subs[i] in res[i])
start_id += len(samples)
return float(n_corr/n_sample/2)
def cal_filtered_score_fast(self, subs, rels, objs, ids, raw_scores, metric='sim'):
assert metric in ['sim', 'dist']
new_scores = []
for s, r, o, i, score in zip(subs, rels, objs, ids, raw_scores):
true_os = self.id2obj_list[i]
true_os_rm_o = np.delete(true_os, np.where(true_os == o))
if metric == 'sim':
score[true_os_rm_o] = -np.inf
else:
score[true_os_rm_o] = np.inf
new_scores.append(score)
return new_scores
def cal_filtered_score_inv_fast(self, subs, rels, objs, ids, raw_scores, metric='sim'):
assert metric in ['sim', 'dist']
new_scores = []
for s, r, o, i, score in zip(subs, rels, objs, ids, raw_scores):
true_ss = self.id2sub_list[i]
true_ss_rm_s = np.delete(true_ss, np.where(true_ss==s))
if metric == 'sim':
score[true_ss_rm_s] = -np.inf
else:
score[true_ss_rm_s] = np.inf
new_scores.append(score)
return new_scores
def cal_rank(self, score_mat, ents):
return [np.sum(score >= score[e]) for score, e in zip(score_mat, ents)]
def get_best_info(self):
if self.metric == 'mrr' or self.metric == 'hits' or self.metric == 'acc': # higher value is better
best_val = max(self.ress)
elif self.metric == 'mr':
best_val = min(self.ress)
else:
raise ValueError('Invalid')
best_epoch = self.ress.index(best_val) + 1
return best_epoch, best_val
def prepare_valid(self, dataset):
for i in range(len(dataset)):
s, r, o = dataset[i]
os = self.all_graph.search_obj_id(s, r)
ss = self.all_graph.search_sub_id(r, o)
self.id2obj_list.append(os)
self.id2sub_list.append(ss)
self.sr2o[(s, r)] = os
self.ro2s[(r, o)] = ss | 0.479991 | 0.52476 |
import logging
from ignition.service.framework import ServiceRegistration
from ignition.boot.config import BootProperties
from ignition.boot.configurators.utils import validate_no_service_with_capability_exists
from ignition.service.messaging import MessagingProperties, InboxCapability, DeliveryCapability, PostalCapability, PostalService, KafkaDeliveryService, KafkaInboxService
logger = logging.getLogger(__name__)
class MessagingConfigurator():
def __init__(self):
pass
def configure(self, configuration, service_register):
self.__configure_postal(configuration, service_register)
self.__configure_delivery(configuration, service_register)
self.__configure_inbox(configuration, service_register)
def __configure_postal(self, configuration, service_register):
auto_config = configuration.property_groups.get_property_group(BootProperties)
if auto_config.messaging.postal_enabled is True:
logger.debug('Bootstrapping Messaging Postal Service')
validate_no_service_with_capability_exists(service_register, PostalCapability, 'Postal Service', 'bootstrap.messaging.postal_enabled')
service_register.add_service(ServiceRegistration(PostalService, delivery_service=DeliveryCapability))
else:
logger.debug('Disabled: bootstrapped Messaging Postal Service')
def __configure_delivery(self, configuration, service_register):
auto_config = configuration.property_groups.get_property_group(BootProperties)
if auto_config.messaging.delivery_enabled is True:
logger.debug('Bootstrapping Messaging Delivery Service')
messaging_config = configuration.property_groups.get_property_group(MessagingProperties)
if messaging_config.connection_address is None:
raise ValueError('messaging.connection_address must be set when bootstrap.messaging.delivery_enabled is True')
validate_no_service_with_capability_exists(service_register, DeliveryCapability, 'Delivery Service', 'bootstrap.messaging.delivery_enabled')
service_register.add_service(ServiceRegistration(KafkaDeliveryService, messaging_properties=MessagingProperties))
else:
logger.debug('Disabled: bootstrapped Messaging Delivery Service')
def __configure_inbox(self, configuration, service_register):
auto_config = configuration.property_groups.get_property_group(BootProperties)
if auto_config.messaging.inbox_enabled is True:
logger.debug('Bootstrapping Messaging Inbox Service')
messaging_config = configuration.property_groups.get_property_group(MessagingProperties)
if messaging_config.connection_address is None:
raise ValueError('messaging.connection_address must be set when bootstrap.messaging.inbox_enabled is True')
validate_no_service_with_capability_exists(service_register, InboxCapability, 'Inbox Service', 'bootstrap.messaging.inbox_enabled')
service_register.add_service(ServiceRegistration(KafkaInboxService, messaging_properties=MessagingProperties))
else:
logger.debug('Disabled: bootstrapped Messaging Inbox Service') | ignition/boot/configurators/messaging.py | import logging
from ignition.service.framework import ServiceRegistration
from ignition.boot.config import BootProperties
from ignition.boot.configurators.utils import validate_no_service_with_capability_exists
from ignition.service.messaging import MessagingProperties, InboxCapability, DeliveryCapability, PostalCapability, PostalService, KafkaDeliveryService, KafkaInboxService
logger = logging.getLogger(__name__)
class MessagingConfigurator():
def __init__(self):
pass
def configure(self, configuration, service_register):
self.__configure_postal(configuration, service_register)
self.__configure_delivery(configuration, service_register)
self.__configure_inbox(configuration, service_register)
def __configure_postal(self, configuration, service_register):
auto_config = configuration.property_groups.get_property_group(BootProperties)
if auto_config.messaging.postal_enabled is True:
logger.debug('Bootstrapping Messaging Postal Service')
validate_no_service_with_capability_exists(service_register, PostalCapability, 'Postal Service', 'bootstrap.messaging.postal_enabled')
service_register.add_service(ServiceRegistration(PostalService, delivery_service=DeliveryCapability))
else:
logger.debug('Disabled: bootstrapped Messaging Postal Service')
def __configure_delivery(self, configuration, service_register):
auto_config = configuration.property_groups.get_property_group(BootProperties)
if auto_config.messaging.delivery_enabled is True:
logger.debug('Bootstrapping Messaging Delivery Service')
messaging_config = configuration.property_groups.get_property_group(MessagingProperties)
if messaging_config.connection_address is None:
raise ValueError('messaging.connection_address must be set when bootstrap.messaging.delivery_enabled is True')
validate_no_service_with_capability_exists(service_register, DeliveryCapability, 'Delivery Service', 'bootstrap.messaging.delivery_enabled')
service_register.add_service(ServiceRegistration(KafkaDeliveryService, messaging_properties=MessagingProperties))
else:
logger.debug('Disabled: bootstrapped Messaging Delivery Service')
def __configure_inbox(self, configuration, service_register):
auto_config = configuration.property_groups.get_property_group(BootProperties)
if auto_config.messaging.inbox_enabled is True:
logger.debug('Bootstrapping Messaging Inbox Service')
messaging_config = configuration.property_groups.get_property_group(MessagingProperties)
if messaging_config.connection_address is None:
raise ValueError('messaging.connection_address must be set when bootstrap.messaging.inbox_enabled is True')
validate_no_service_with_capability_exists(service_register, InboxCapability, 'Inbox Service', 'bootstrap.messaging.inbox_enabled')
service_register.add_service(ServiceRegistration(KafkaInboxService, messaging_properties=MessagingProperties))
else:
logger.debug('Disabled: bootstrapped Messaging Inbox Service') | 0.428473 | 0.051415 |
from morepath.request import Response
from onegov.core.security import Public
from onegov.election_day import _
from onegov.election_day import ElectionDayApp
from onegov.election_day.collections import EmailSubscriberCollection
from onegov.election_day.collections import SmsSubscriberCollection
from onegov.election_day.forms import EmailSubscriptionForm
from onegov.election_day.forms import SmsSubscriptionForm
from onegov.election_day.layouts import DefaultLayout
from onegov.election_day.models import Principal
@ElectionDayApp.form(
model=Principal,
name='subscribe-email',
template='form.pt',
form=EmailSubscriptionForm,
permission=Public
)
def subscribe_email(self, request, form):
""" Adds the given email address to the email subscribers."""
layout = DefaultLayout(self, request)
callout = None
if form.submitted(request):
subscribers = EmailSubscriberCollection(request.session)
subscribers.subscribe(form.email.data, request)
callout = _(
"Successfully subscribed to the email service. You will receive "
"an email every time new results are published."
)
return {
'layout': layout,
'form': form,
'title': _("Get email alerts"),
'message': _(
"You will receive an email as soon as new results have been "
"published. You can unsubscribe at any time."
),
'cancel': layout.homepage_link,
'callout': callout,
'show_form': False if callout else True
}
@ElectionDayApp.form(
model=Principal,
name='unsubscribe-email',
template='form.pt',
form=EmailSubscriptionForm,
permission=Public
)
def unsubscribe_email(self, request, form):
""" Removes the email number from the email subscribers.
Allows one-click unsubscription as defined by RFC-8058:
curl -X POST http://localhost:8080/xx/zg/unsubscribe-oneclick?opaque=yy
"""
layout = DefaultLayout(self, request)
subscribers = EmailSubscriberCollection(request.session)
try:
email = request.params.get('opaque')
email = request.load_url_safe_token(email)
email = email.get('address')
except (AttributeError, TypeError):
email = None
# one-click unsubscribe
if request.method == 'POST' and email:
subscribers.unsubscribe(email)
return Response()
# regular unsubscribe
callout = None
if form.submitted(request):
subscribers.unsubscribe(form.email.data)
callout = _(
"Successfully unsubscribed from the email services. You will no "
"longer receive an email when new results are published."
)
if email and not form.email.data:
form.email.data = email
return {
'layout': layout,
'form': form,
'title': _("Stop email subscription"),
'cancel': layout.homepage_link,
'callout': callout,
'show_form': False if callout else True
}
@ElectionDayApp.form(
model=Principal,
name='subscribe-sms',
template='form.pt',
form=SmsSubscriptionForm,
permission=Public
)
def subscribe_sms(self, request, form):
""" Adds the given phone number to the SMS subscribers."""
layout = DefaultLayout(self, request)
callout = None
if form.submitted(request):
subscribers = SmsSubscriberCollection(request.session)
subscribers.subscribe(form.phone_number.formatted_data, request)
callout = _(
"Successfully subscribed to the SMS service. You will receive a "
"SMS every time new results are published."
)
return {
'layout': layout,
'form': form,
'title': _("Get SMS alerts"),
'message': _(
"You will receive a SMS as soon as new results have been "
"published. The SMS service is free of charge. You can "
"unsubscribe at any time."
),
'cancel': layout.homepage_link,
'callout': callout,
'show_form': False if callout else True
}
@ElectionDayApp.form(
model=Principal,
name='unsubscribe-sms',
template='form.pt',
form=SmsSubscriptionForm,
permission=Public
)
def unsubscribe_sms(self, request, form):
""" Removes the given phone number from the SMS subscribers."""
layout = DefaultLayout(self, request)
callout = None
if form.submitted(request):
subscribers = SmsSubscriberCollection(request.session)
subscribers.unsubscribe(form.phone_number.formatted_data)
callout = _(
"Successfully unsubscribed from the SMS services. You will no "
"longer receive SMS when new results are published."
)
return {
'layout': layout,
'form': form,
'title': _("Stop SMS subscription"),
'cancel': layout.homepage_link,
'callout': callout,
'show_form': False if callout else True
} | src/onegov/election_day/views/subscription.py | from morepath.request import Response
from onegov.core.security import Public
from onegov.election_day import _
from onegov.election_day import ElectionDayApp
from onegov.election_day.collections import EmailSubscriberCollection
from onegov.election_day.collections import SmsSubscriberCollection
from onegov.election_day.forms import EmailSubscriptionForm
from onegov.election_day.forms import SmsSubscriptionForm
from onegov.election_day.layouts import DefaultLayout
from onegov.election_day.models import Principal
@ElectionDayApp.form(
model=Principal,
name='subscribe-email',
template='form.pt',
form=EmailSubscriptionForm,
permission=Public
)
def subscribe_email(self, request, form):
""" Adds the given email address to the email subscribers."""
layout = DefaultLayout(self, request)
callout = None
if form.submitted(request):
subscribers = EmailSubscriberCollection(request.session)
subscribers.subscribe(form.email.data, request)
callout = _(
"Successfully subscribed to the email service. You will receive "
"an email every time new results are published."
)
return {
'layout': layout,
'form': form,
'title': _("Get email alerts"),
'message': _(
"You will receive an email as soon as new results have been "
"published. You can unsubscribe at any time."
),
'cancel': layout.homepage_link,
'callout': callout,
'show_form': False if callout else True
}
@ElectionDayApp.form(
model=Principal,
name='unsubscribe-email',
template='form.pt',
form=EmailSubscriptionForm,
permission=Public
)
def unsubscribe_email(self, request, form):
""" Removes the email number from the email subscribers.
Allows one-click unsubscription as defined by RFC-8058:
curl -X POST http://localhost:8080/xx/zg/unsubscribe-oneclick?opaque=yy
"""
layout = DefaultLayout(self, request)
subscribers = EmailSubscriberCollection(request.session)
try:
email = request.params.get('opaque')
email = request.load_url_safe_token(email)
email = email.get('address')
except (AttributeError, TypeError):
email = None
# one-click unsubscribe
if request.method == 'POST' and email:
subscribers.unsubscribe(email)
return Response()
# regular unsubscribe
callout = None
if form.submitted(request):
subscribers.unsubscribe(form.email.data)
callout = _(
"Successfully unsubscribed from the email services. You will no "
"longer receive an email when new results are published."
)
if email and not form.email.data:
form.email.data = email
return {
'layout': layout,
'form': form,
'title': _("Stop email subscription"),
'cancel': layout.homepage_link,
'callout': callout,
'show_form': False if callout else True
}
@ElectionDayApp.form(
model=Principal,
name='subscribe-sms',
template='form.pt',
form=SmsSubscriptionForm,
permission=Public
)
def subscribe_sms(self, request, form):
""" Adds the given phone number to the SMS subscribers."""
layout = DefaultLayout(self, request)
callout = None
if form.submitted(request):
subscribers = SmsSubscriberCollection(request.session)
subscribers.subscribe(form.phone_number.formatted_data, request)
callout = _(
"Successfully subscribed to the SMS service. You will receive a "
"SMS every time new results are published."
)
return {
'layout': layout,
'form': form,
'title': _("Get SMS alerts"),
'message': _(
"You will receive a SMS as soon as new results have been "
"published. The SMS service is free of charge. You can "
"unsubscribe at any time."
),
'cancel': layout.homepage_link,
'callout': callout,
'show_form': False if callout else True
}
@ElectionDayApp.form(
model=Principal,
name='unsubscribe-sms',
template='form.pt',
form=SmsSubscriptionForm,
permission=Public
)
def unsubscribe_sms(self, request, form):
""" Removes the given phone number from the SMS subscribers."""
layout = DefaultLayout(self, request)
callout = None
if form.submitted(request):
subscribers = SmsSubscriberCollection(request.session)
subscribers.unsubscribe(form.phone_number.formatted_data)
callout = _(
"Successfully unsubscribed from the SMS services. You will no "
"longer receive SMS when new results are published."
)
return {
'layout': layout,
'form': form,
'title': _("Stop SMS subscription"),
'cancel': layout.homepage_link,
'callout': callout,
'show_form': False if callout else True
} | 0.656328 | 0.111 |
import PyIgnition, pygame, sys, math, random
pygame.font.init()
screen = pygame.display.set_mode((800, 600))
pygame.display.set_caption("PyIgnition 'Controlled Eruption' demo")
clock = pygame.time.Clock()
curframe = 0
started = False
# 'Press space to start' text
starttextfont = pygame.font.Font("courbd.ttf", 50)
starttext = starttextfont.render("Press space to start", True, (255, 255, 255), (0, 0, 0))
starttextpos = ((400 - (starttext.get_width() / 2)), (300 - (starttext.get_height() / 2)))
# Background
background = PyIgnition.ParticleEffect(screen, (0, 0), (800, 600))
backgroundsource = background.CreateSource((10, 10), initspeed = 5.0, initdirection = 2.35619449, initspeedrandrange = 2.0, initdirectionrandrange = 1.0, particlesperframe = 5, particlelife = 125, drawtype = PyIgnition.DRAWTYPE_SCALELINE, colour = (255, 255, 255), length = 10.0)
backgroundsource.CreateParticleKeyframe(50, colour = (0, 255, 0), length = 10.0)
backgroundsource.CreateParticleKeyframe(75, colour = (255, 255, 0), length = 10.0)
backgroundsource.CreateParticleKeyframe(100, colour = (0, 255, 255), length = 10.0)
backgroundsource.CreateParticleKeyframe(125, colour = (0, 0, 0), length = 10.0)
backgroundsource2 = background.CreateSource((790, 10), initspeed = 5.0, initdirection = -2.35619449, initspeedrandrange = 2.0, initdirectionrandrange = 1.0, particlesperframe = 0, particlelife = 125, drawtype = PyIgnition.DRAWTYPE_SCALELINE, colour = (255, 255, 255), length = 10.0)
backgroundsource2.CreateParticleKeyframe(50, colour = (0, 255, 0), length = 10.0)
backgroundsource2.CreateParticleKeyframe(75, colour = (255, 255, 0), length = 10.0)
backgroundsource2.CreateParticleKeyframe(100, colour = (0, 255, 255), length = 10.0)
backgroundsource2.CreateParticleKeyframe(125, colour = (0, 0, 0), length = 10.0)
# Periodic firework
fireworkcounter = 0.0
fireworkdist = 200.0
firework = PyIgnition.ParticleEffect(screen, (0, 0), (800, 600))
firework.CreateDirectedGravity(strength = 0.2, direction = [0, 1])
fireworksource = firework.CreateSource((10, 10), initspeed = 8.0, initdirection = 0.0, initspeedrandrange = 2.0, initdirectionrandrange = math.pi, particlesperframe = 0, particlelife = 150, drawtype = PyIgnition.DRAWTYPE_IMAGE, imagepath = "Spark.png")
fireworkblast = background.CreateCircle(pos = (1000, 1000), colour = (0, 0, 0), bounce = 1.5, radius = 100.0)
# Ground-level bubbles
bubbles = PyIgnition.ParticleEffect(screen, (0, 0), (800, 600))
bubblesource = bubbles.CreateSource(initspeed = 1.0, initdirection = 0.0, initspeedrandrange = 0.5, initdirectionrandrange = math.pi, particlesperframe = 0, particlelife = 200, colour = (200, 255, 200), drawtype = PyIgnition.DRAWTYPE_BUBBLE, radius = 5.0, genspacing = 5)
bubblesource.CreateParticleKeyframe(500, colour = (250, 100, 250))
bubblesource.CreateParticleKeyframe(75, colour = (190, 190, 200))
bubblesource.CreateParticleKeyframe(100, colour = (50, 250, 252))
bubblesource.CreateParticleKeyframe(125, colour = (250, 250, 255))
bubbles.CreateDirectedGravity(strength = 0.04, direction = [0, -1])
# Fire, just for laughs
fire = PyIgnition.ParticleEffect(screen, (0, 0), (800, 600))
gravity = fire.CreateDirectedGravity(strength = 0.07, direction = [0, -1])
wind = fire.CreateDirectedGravity(strength = 0.05, direction = [1, 0])
source = fire.CreateSource((150, 500), initspeed = 2.0, initdirection = 0.0, initspeedrandrange = 1.0, initdirectionrandrange = 0.5, particlesperframe = 10, particlelife = 100, drawtype = PyIgnition.DRAWTYPE_CIRCLE, colour = (255, 200, 100), radius = 3.0)
source.CreateParticleKeyframe(10, colour = (200, 50, 20), radius = 4.0)
source.CreateParticleKeyframe(30, colour = (150, 0, 0), radius = 6.0)
source.CreateParticleKeyframe(60, colour = (50, 20, 20), radius = 20.0)
source.CreateParticleKeyframe(80, colour = (0, 0, 0), radius = 50.0)
# Text
font = pygame.font.Font("euphemia.ttf", 70)
font2 = pygame.font.Font("euphemia.ttf", 40)
text = font.render("PyIgnition", True, (255, 255, 255), (0, 0, 0))
text2 = font2.render("ExeSoft", True, (200, 200, 200), (0, 0, 0))
textalpha = font.render("PyIgnition", True, (255, 255, 255))
text2alpha = font2.render("ExeSoft", True, (200, 200, 200))
temptext = text.copy()
temptext2 = text2.copy()
temptext.set_alpha(0)
temptext2.set_alpha(0)
textpos = ((400 - (text.get_width() / 2)), 250)
textpos2 = (textpos[0] + 110, textpos[1] - 30)
font3 = pygame.font.Font("courbd.ttf", 20)
text3 = font3.render("Version 1.0", True, (200, 200, 255), (0, 0, 0))
textpos3 = ((800 - text3.get_width()) - 5, (600 - text3.get_height()))
def Update():
global curframe, fireworkcounter, temptext, temptext2
background.Update()
if curframe == 100:
backgroundsource2.SetParticlesPerFrame(5)
fireworksource.SetPos((400 + fireworkdist * math.cos(fireworkcounter), 300 + fireworkdist * math.sin(fireworkcounter)))
if (curframe > 200) and (curframe % 50 == 0):
fireworksource.CreateKeyframe(fireworksource.curframe, particlesperframe = 10)
fireworksource.CreateKeyframe(fireworksource.curframe + 4, particlesperframe = 0)
firework.Update()
fireworkblast.SetPos(fireworksource.pos)
fireworksource.ConsolidateKeyframes()
#fireworkblast.ConsolidateKeyframes()
else:
if curframe % 30 == 0:
fireworkblast.ConsolidateKeyframes()
firework.Update()
fireworkblast.SetPos((1000, 1000))
fireworkcounter = fireworkcounter + 0.1
random.seed()
if curframe == 400:
bubblesource.SetParticlesPerFrame(1)
bubbles.Update()
bubblesource.SetPos((random.randint(0, 800), 600))
if curframe % 30 == 0:
bubblesource.ConsolidateKeyframes()
if curframe > 500:
fire.Update()
source.SetPos(pygame.mouse.get_pos())
if curframe % 30 == 0:
source.ConsolidateKeyframes()
if curframe > 400:
if curframe > 500:
temptext = textalpha.copy()
temptext2 = text2alpha.copy()
else:
factor = (float(curframe) - 400.0) / 100.0
if factor > 1.0:
factor = 1.0
alpha = int(factor * 255.0)
temptext = text.copy()
temptext.set_alpha(alpha)
temptext2 = text2.copy()
temptext2.set_alpha(alpha)
curframe = curframe + 1
def Redraw():
if curframe > 500:
screen.blit(text3, textpos3)
fire.Redraw()
screen.blit(temptext, textpos)
screen.blit(temptext2, textpos2)
background.Redraw()
firework.Redraw()
bubbles.Redraw()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
started = True
screen.fill((0, 0, 0))
if started:
Update()
Redraw()
else:
screen.blit(starttext, starttextpos)
pygame.display.update()
clock.tick(30) | display_dominik/Controlled Eruption.py |
import PyIgnition, pygame, sys, math, random
pygame.font.init()
screen = pygame.display.set_mode((800, 600))
pygame.display.set_caption("PyIgnition 'Controlled Eruption' demo")
clock = pygame.time.Clock()
curframe = 0
started = False
# 'Press space to start' text
starttextfont = pygame.font.Font("courbd.ttf", 50)
starttext = starttextfont.render("Press space to start", True, (255, 255, 255), (0, 0, 0))
starttextpos = ((400 - (starttext.get_width() / 2)), (300 - (starttext.get_height() / 2)))
# Background
background = PyIgnition.ParticleEffect(screen, (0, 0), (800, 600))
backgroundsource = background.CreateSource((10, 10), initspeed = 5.0, initdirection = 2.35619449, initspeedrandrange = 2.0, initdirectionrandrange = 1.0, particlesperframe = 5, particlelife = 125, drawtype = PyIgnition.DRAWTYPE_SCALELINE, colour = (255, 255, 255), length = 10.0)
backgroundsource.CreateParticleKeyframe(50, colour = (0, 255, 0), length = 10.0)
backgroundsource.CreateParticleKeyframe(75, colour = (255, 255, 0), length = 10.0)
backgroundsource.CreateParticleKeyframe(100, colour = (0, 255, 255), length = 10.0)
backgroundsource.CreateParticleKeyframe(125, colour = (0, 0, 0), length = 10.0)
backgroundsource2 = background.CreateSource((790, 10), initspeed = 5.0, initdirection = -2.35619449, initspeedrandrange = 2.0, initdirectionrandrange = 1.0, particlesperframe = 0, particlelife = 125, drawtype = PyIgnition.DRAWTYPE_SCALELINE, colour = (255, 255, 255), length = 10.0)
backgroundsource2.CreateParticleKeyframe(50, colour = (0, 255, 0), length = 10.0)
backgroundsource2.CreateParticleKeyframe(75, colour = (255, 255, 0), length = 10.0)
backgroundsource2.CreateParticleKeyframe(100, colour = (0, 255, 255), length = 10.0)
backgroundsource2.CreateParticleKeyframe(125, colour = (0, 0, 0), length = 10.0)
# Periodic firework
fireworkcounter = 0.0
fireworkdist = 200.0
firework = PyIgnition.ParticleEffect(screen, (0, 0), (800, 600))
firework.CreateDirectedGravity(strength = 0.2, direction = [0, 1])
fireworksource = firework.CreateSource((10, 10), initspeed = 8.0, initdirection = 0.0, initspeedrandrange = 2.0, initdirectionrandrange = math.pi, particlesperframe = 0, particlelife = 150, drawtype = PyIgnition.DRAWTYPE_IMAGE, imagepath = "Spark.png")
fireworkblast = background.CreateCircle(pos = (1000, 1000), colour = (0, 0, 0), bounce = 1.5, radius = 100.0)
# Ground-level bubbles
bubbles = PyIgnition.ParticleEffect(screen, (0, 0), (800, 600))
bubblesource = bubbles.CreateSource(initspeed = 1.0, initdirection = 0.0, initspeedrandrange = 0.5, initdirectionrandrange = math.pi, particlesperframe = 0, particlelife = 200, colour = (200, 255, 200), drawtype = PyIgnition.DRAWTYPE_BUBBLE, radius = 5.0, genspacing = 5)
bubblesource.CreateParticleKeyframe(500, colour = (250, 100, 250))
bubblesource.CreateParticleKeyframe(75, colour = (190, 190, 200))
bubblesource.CreateParticleKeyframe(100, colour = (50, 250, 252))
bubblesource.CreateParticleKeyframe(125, colour = (250, 250, 255))
bubbles.CreateDirectedGravity(strength = 0.04, direction = [0, -1])
# Fire, just for laughs
fire = PyIgnition.ParticleEffect(screen, (0, 0), (800, 600))
gravity = fire.CreateDirectedGravity(strength = 0.07, direction = [0, -1])
wind = fire.CreateDirectedGravity(strength = 0.05, direction = [1, 0])
source = fire.CreateSource((150, 500), initspeed = 2.0, initdirection = 0.0, initspeedrandrange = 1.0, initdirectionrandrange = 0.5, particlesperframe = 10, particlelife = 100, drawtype = PyIgnition.DRAWTYPE_CIRCLE, colour = (255, 200, 100), radius = 3.0)
source.CreateParticleKeyframe(10, colour = (200, 50, 20), radius = 4.0)
source.CreateParticleKeyframe(30, colour = (150, 0, 0), radius = 6.0)
source.CreateParticleKeyframe(60, colour = (50, 20, 20), radius = 20.0)
source.CreateParticleKeyframe(80, colour = (0, 0, 0), radius = 50.0)
# Text
font = pygame.font.Font("euphemia.ttf", 70)
font2 = pygame.font.Font("euphemia.ttf", 40)
text = font.render("PyIgnition", True, (255, 255, 255), (0, 0, 0))
text2 = font2.render("ExeSoft", True, (200, 200, 200), (0, 0, 0))
textalpha = font.render("PyIgnition", True, (255, 255, 255))
text2alpha = font2.render("ExeSoft", True, (200, 200, 200))
temptext = text.copy()
temptext2 = text2.copy()
temptext.set_alpha(0)
temptext2.set_alpha(0)
textpos = ((400 - (text.get_width() / 2)), 250)
textpos2 = (textpos[0] + 110, textpos[1] - 30)
font3 = pygame.font.Font("courbd.ttf", 20)
text3 = font3.render("Version 1.0", True, (200, 200, 255), (0, 0, 0))
textpos3 = ((800 - text3.get_width()) - 5, (600 - text3.get_height()))
def Update():
global curframe, fireworkcounter, temptext, temptext2
background.Update()
if curframe == 100:
backgroundsource2.SetParticlesPerFrame(5)
fireworksource.SetPos((400 + fireworkdist * math.cos(fireworkcounter), 300 + fireworkdist * math.sin(fireworkcounter)))
if (curframe > 200) and (curframe % 50 == 0):
fireworksource.CreateKeyframe(fireworksource.curframe, particlesperframe = 10)
fireworksource.CreateKeyframe(fireworksource.curframe + 4, particlesperframe = 0)
firework.Update()
fireworkblast.SetPos(fireworksource.pos)
fireworksource.ConsolidateKeyframes()
#fireworkblast.ConsolidateKeyframes()
else:
if curframe % 30 == 0:
fireworkblast.ConsolidateKeyframes()
firework.Update()
fireworkblast.SetPos((1000, 1000))
fireworkcounter = fireworkcounter + 0.1
random.seed()
if curframe == 400:
bubblesource.SetParticlesPerFrame(1)
bubbles.Update()
bubblesource.SetPos((random.randint(0, 800), 600))
if curframe % 30 == 0:
bubblesource.ConsolidateKeyframes()
if curframe > 500:
fire.Update()
source.SetPos(pygame.mouse.get_pos())
if curframe % 30 == 0:
source.ConsolidateKeyframes()
if curframe > 400:
if curframe > 500:
temptext = textalpha.copy()
temptext2 = text2alpha.copy()
else:
factor = (float(curframe) - 400.0) / 100.0
if factor > 1.0:
factor = 1.0
alpha = int(factor * 255.0)
temptext = text.copy()
temptext.set_alpha(alpha)
temptext2 = text2.copy()
temptext2.set_alpha(alpha)
curframe = curframe + 1
def Redraw():
if curframe > 500:
screen.blit(text3, textpos3)
fire.Redraw()
screen.blit(temptext, textpos)
screen.blit(temptext2, textpos2)
background.Redraw()
firework.Redraw()
bubbles.Redraw()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
started = True
screen.fill((0, 0, 0))
if started:
Update()
Redraw()
else:
screen.blit(starttext, starttextpos)
pygame.display.update()
clock.tick(30) | 0.178669 | 0.208612 |
import random
from environment.selfplay import SelfPlay
from environment.mazebase_wrapper import MazebaseWrapper
from environment.observation import ObservationTuple, Observation
from utils.constant import *
from copy import deepcopy
import numpy as np
class SelfPlayTarget(SelfPlay):
"""
Wrapper class over self play environment
"""
def __init__(self, environment, task=TARGET):
super(SelfPlayTarget, self).__init__(environment=environment, task=task)
self.name = SELFPLAY + "_" + TARGET + "_" + self.environment.name
self.alice_start_environment = None
self.agent_id = 1
self.agents = (BOB)
self.observation = Observation()
self.alice_observations = None
self.bob_observations = ObservationTuple()
_all_possible_actions = self.environment.all_possible_actions()
self.stop_action = None
self.actions = _all_possible_actions
self.is_over = None
self.task = task
def observe(self):
return self.observation
def reset(self):
self.observation = self.environment.reset()
self.bob_observations.start = deepcopy(self.observation)
self.bob_observations.start.state = np.zeros_like(self.bob_observations.start.state)
self.is_over = False
self.agent_id = 1
return self.observe()
def alice_observe(self):
return None
def bob_observe(self):
observation = self.observe()
return (observation, self.bob_observations.start)
def alice_start(self):
return None
def alice_stop(self):
return None
def bob_start(self):
self.agent_id = 1
self.is_over = False
def bob_stop(self):
return None
def agent_stop(self):
return None
def display(self):
return self.environment.display()
def is_over(self):
return self.is_over
def act(self, action):
self.observation = self.environment.act(action=action)
return self.bob_observe()
if __name__ == "__main__":
play = SelfPlay(environment=MazebaseWrapper(), task=COPY)
# env.display()
actions = play.all_possible_actions()
print(actions)
for i in range(100):
print("==============")
_action = random.choice(actions)
print(_action)
play.act(_action)
print((play.observe()).reward) | SelfPlay/environment/selfplay_target.py | import random
from environment.selfplay import SelfPlay
from environment.mazebase_wrapper import MazebaseWrapper
from environment.observation import ObservationTuple, Observation
from utils.constant import *
from copy import deepcopy
import numpy as np
class SelfPlayTarget(SelfPlay):
"""
Wrapper class over self play environment
"""
def __init__(self, environment, task=TARGET):
super(SelfPlayTarget, self).__init__(environment=environment, task=task)
self.name = SELFPLAY + "_" + TARGET + "_" + self.environment.name
self.alice_start_environment = None
self.agent_id = 1
self.agents = (BOB)
self.observation = Observation()
self.alice_observations = None
self.bob_observations = ObservationTuple()
_all_possible_actions = self.environment.all_possible_actions()
self.stop_action = None
self.actions = _all_possible_actions
self.is_over = None
self.task = task
def observe(self):
return self.observation
def reset(self):
self.observation = self.environment.reset()
self.bob_observations.start = deepcopy(self.observation)
self.bob_observations.start.state = np.zeros_like(self.bob_observations.start.state)
self.is_over = False
self.agent_id = 1
return self.observe()
def alice_observe(self):
return None
def bob_observe(self):
observation = self.observe()
return (observation, self.bob_observations.start)
def alice_start(self):
return None
def alice_stop(self):
return None
def bob_start(self):
self.agent_id = 1
self.is_over = False
def bob_stop(self):
return None
def agent_stop(self):
return None
def display(self):
return self.environment.display()
def is_over(self):
return self.is_over
def act(self, action):
self.observation = self.environment.act(action=action)
return self.bob_observe()
if __name__ == "__main__":
play = SelfPlay(environment=MazebaseWrapper(), task=COPY)
# env.display()
actions = play.all_possible_actions()
print(actions)
for i in range(100):
print("==============")
_action = random.choice(actions)
print(_action)
play.act(_action)
print((play.observe()).reward) | 0.567937 | 0.186428 |
import botCore
import parserCore
from aiogram import utils
from modules import adblocker, database, rating
async def send_post(message):
if adblocker.post_contains_ad(message):
for user in database.User.select() \
.where(database.User.observing_channels.contains(str(message.sender_chat.id))):
try:
channel_info = await parserCore.get_chat_info_by_link(float(message.sender_chat.id))
if message.text is not None:
text = f'Переслано из "{channel_info.title}"📢\n\n {message.text}'
await botCore.send_message(chat_id=user.user_id, text=text,
reply_markup=rating.generate_rate_keyboard(
f"{message.sender_chat.id}_{message.message_id}"),
disable_notifications=user.silent_mode)
elif message.sticker is not None:
await botCore.send_message(user.user_id, f'Переслано из "{channel_info.title}"📢'
f"\n\nПросмотров:{message.views}",
disable_notifications=user.silent_mode,
reply_markup=None)
await botCore.bot.send_sticker(user.user_id, message.sticker.file_id,
reply_markup=rating.generate_rate_keyboard(
f"{message.sender_chat.id}_{message.message_id}"))
elif message.media:
text = f'Переслано (медиаконтент) из "{channel_info.title}"📢' \
f'\n\n {message.caption}\n\nПоддержка медиаконтента будет добавлена в ближайшее время'
await botCore.send_message(chat_id=user.user_id, text=text,
reply_markup=rating.generate_rate_keyboard(
f"{message.sender_chat.id}_{message.message_id}"),
disable_notifications=user.silent_mode)
except utils.exceptions.BotBlocked as e:
print(f"User {user.user_id} was deleted. Cause: {e.text}")
database.User.delete().where(database.User.user_id == user.user_id).execute() | modules/messages.py | import botCore
import parserCore
from aiogram import utils
from modules import adblocker, database, rating
async def send_post(message):
if adblocker.post_contains_ad(message):
for user in database.User.select() \
.where(database.User.observing_channels.contains(str(message.sender_chat.id))):
try:
channel_info = await parserCore.get_chat_info_by_link(float(message.sender_chat.id))
if message.text is not None:
text = f'Переслано из "{channel_info.title}"📢\n\n {message.text}'
await botCore.send_message(chat_id=user.user_id, text=text,
reply_markup=rating.generate_rate_keyboard(
f"{message.sender_chat.id}_{message.message_id}"),
disable_notifications=user.silent_mode)
elif message.sticker is not None:
await botCore.send_message(user.user_id, f'Переслано из "{channel_info.title}"📢'
f"\n\nПросмотров:{message.views}",
disable_notifications=user.silent_mode,
reply_markup=None)
await botCore.bot.send_sticker(user.user_id, message.sticker.file_id,
reply_markup=rating.generate_rate_keyboard(
f"{message.sender_chat.id}_{message.message_id}"))
elif message.media:
text = f'Переслано (медиаконтент) из "{channel_info.title}"📢' \
f'\n\n {message.caption}\n\nПоддержка медиаконтента будет добавлена в ближайшее время'
await botCore.send_message(chat_id=user.user_id, text=text,
reply_markup=rating.generate_rate_keyboard(
f"{message.sender_chat.id}_{message.message_id}"),
disable_notifications=user.silent_mode)
except utils.exceptions.BotBlocked as e:
print(f"User {user.user_id} was deleted. Cause: {e.text}")
database.User.delete().where(database.User.user_id == user.user_id).execute() | 0.115224 | 0.038683 |
from module import *
from models import *
from flask import request, session
class Viper(object):
def __init__(self, vphone="", vname="", vrank="0", vid=""):
self.phone = vphone
self.name = vname
self.rank = vrank
self.vid = vid
def addVip(self):
if self.name == "" or self.phone == "":
return return_json(-1, "请填写完整信息")
if len(self.phone) != 11:
return return_json(-1, "手机号必须为11位数字")
if not self.phone.isdigit():
return return_json(-1, "手机号必须为纯数字")
rs = db_session.query(Vip).filter(Vip.vphone == self.phone).count()
if rs != 0:
return return_json(-1, '用户已存在')
try:
db_session.add(Vip(vphone=self.phone, vname=self.name, vrank=self.rank))
db_session.commit()
return return_json(1, "添加成功")
except:
db_session.rollback()
return return_json(-1, "添加失败")
finally:
db_session.remove()
def editVip(self):
if self.vid == "":
return return_json(-1, "err")
if self.name == "" or self.phone == "":
return return_json(-1, "请填写完整信息")
if len(self.phone) != 11:
return return_json(-1, "手机号必须为11位数字")
if not self.phone.isdigit():
return return_json(-1, "手机号必须为纯数字")
rs = db_session.query(Vip).filter(Vip.vid == self.vid).first()
if rs.vphone !=self.phone:
rs = db_session.query(Vip).filter(Vip.vphone == self.phone).count()
if rs != 0:
return return_json(-1, '用户已存在')
try:
rs = db_session.query(Vip).filter(Vip.vid == self.vid).first()
rs.vphone = self.phone
rs.vname = self.name
db_session.commit()
return return_json(1, "修改成功")
except:
db_session.rollback()
return return_json(-1, "修改失败")
finally:
db_session.remove()
def delVip(self):
if self.vid == "":
return return_json(-1, "err")
try:
db_session.query(Vip).filter(Vip.vid == self.vid).delete()
db_session.commit()
return return_json(1,"删除成功")
except:
db_session.rollback()
return return_json(-1, "删除失败")
finally:
db_session.remove()
def listVip(self):
if self.phone != "":
rs = db_session.query(Vip).filter(Vip.vphone.like('%' + self.phone + '%')).all()
else:
rs = db_session.query(Vip).all()
jsons = []
for i in rs:
jsons.append({'vid': i.vid, 'vname': i.vname,'vphone':i.vphone,'vrank':i.vrank})
return return_jsons(1, jsons)
def checkVip(self):
rs = db_session.query(Vip).filter(Vip.vphone == self.phone).count()
if rs != 0:
return return_json(-1, '用户已存在')
else:
return return_json(1,"手机号输入正确") | classes/vip.py | from module import *
from models import *
from flask import request, session
class Viper(object):
def __init__(self, vphone="", vname="", vrank="0", vid=""):
self.phone = vphone
self.name = vname
self.rank = vrank
self.vid = vid
def addVip(self):
if self.name == "" or self.phone == "":
return return_json(-1, "请填写完整信息")
if len(self.phone) != 11:
return return_json(-1, "手机号必须为11位数字")
if not self.phone.isdigit():
return return_json(-1, "手机号必须为纯数字")
rs = db_session.query(Vip).filter(Vip.vphone == self.phone).count()
if rs != 0:
return return_json(-1, '用户已存在')
try:
db_session.add(Vip(vphone=self.phone, vname=self.name, vrank=self.rank))
db_session.commit()
return return_json(1, "添加成功")
except:
db_session.rollback()
return return_json(-1, "添加失败")
finally:
db_session.remove()
def editVip(self):
if self.vid == "":
return return_json(-1, "err")
if self.name == "" or self.phone == "":
return return_json(-1, "请填写完整信息")
if len(self.phone) != 11:
return return_json(-1, "手机号必须为11位数字")
if not self.phone.isdigit():
return return_json(-1, "手机号必须为纯数字")
rs = db_session.query(Vip).filter(Vip.vid == self.vid).first()
if rs.vphone !=self.phone:
rs = db_session.query(Vip).filter(Vip.vphone == self.phone).count()
if rs != 0:
return return_json(-1, '用户已存在')
try:
rs = db_session.query(Vip).filter(Vip.vid == self.vid).first()
rs.vphone = self.phone
rs.vname = self.name
db_session.commit()
return return_json(1, "修改成功")
except:
db_session.rollback()
return return_json(-1, "修改失败")
finally:
db_session.remove()
def delVip(self):
if self.vid == "":
return return_json(-1, "err")
try:
db_session.query(Vip).filter(Vip.vid == self.vid).delete()
db_session.commit()
return return_json(1,"删除成功")
except:
db_session.rollback()
return return_json(-1, "删除失败")
finally:
db_session.remove()
def listVip(self):
if self.phone != "":
rs = db_session.query(Vip).filter(Vip.vphone.like('%' + self.phone + '%')).all()
else:
rs = db_session.query(Vip).all()
jsons = []
for i in rs:
jsons.append({'vid': i.vid, 'vname': i.vname,'vphone':i.vphone,'vrank':i.vrank})
return return_jsons(1, jsons)
def checkVip(self):
rs = db_session.query(Vip).filter(Vip.vphone == self.phone).count()
if rs != 0:
return return_json(-1, '用户已存在')
else:
return return_json(1,"手机号输入正确") | 0.355887 | 0.091788 |
def binary_search_base(nums: list, target: int) -> int:
"""
Time complexi O(logn)
The basic binary search
nums is a sorted list
if multi targets in nums, return one target index
else return -1
"""
if not nums:
return -1
left, right = 0, len(nums) - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] == target:
return mid
elif nums[mid] < target:
left = mid + 1
elif nums[mid] > target:
right = mid - 1
return -1
def lower_bound(nums: list, target: int) -> int:
'''
return the target lower bound index in nums
c++ algorithms
'''
first, last = 0, len(nums)
while first < last:
mid = first + (last - first) // 2
if nums[mid] < target:
first = mid + 1
else:
last = mid
return first
def upper_bound(nums: list, target: int) -> int:
'''
return the first idx in nums when nums[idx] > target
'''
first, last = 0, len(nums)
while first < last:
mid = first + (last - first) // 2
if nums[mid] <= target:
first = mid + 1
else:
last = mid
return first
def left_bound(nums: list, target: int) -> int:
'''
return the target left_bound index in nums
if target not in nums, return -1
e.g.,
nums = [1, 2, 2, 3, 3, 3, 4], target = 2
return 1
'''
if not nums:
return -1
left, right = 0, len(nums)
while left < right:
mid = (left + right) // 2
if nums[mid] == target:
right = mid
elif nums[mid] < target:
left = mid + 1
elif nums[mid] > target:
right = mid
if -1 < left < len(nums) and nums[left] == target:
return left
return -1
def right_bound(nums: list, target: int) -> int:
'''
return the target left_bound index in nums
if target not in nums, return -1
e.g.,
nums = [1, 2, 2, 3, 3, 3, 4], target = 2
return 2
'''
if not nums:
return -1
left, right = 0, len(nums)
while left < right:
mid = (left + right) // 2
if nums[mid] == target:
left = mid + 1
elif nums[mid] < target:
left = mid + 1
elif nums[mid] > target:
right = mid
res = left - 1
if -1 < res < len(nums) and nums[res] == target:
return res
return -1
if __name__ == '__main__':
nums = [1, 2, 2, 3, 3, 3, 4, 5, 6]
target = 3
print(binary_search_base(nums, target))
print(left_bound(nums, target))
print(right_bound(nums, target))
print(lower_bound(nums, target))
print(upper_bound(nums, target)) | search and sort/binary_search.py | def binary_search_base(nums: list, target: int) -> int:
"""
Time complexi O(logn)
The basic binary search
nums is a sorted list
if multi targets in nums, return one target index
else return -1
"""
if not nums:
return -1
left, right = 0, len(nums) - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] == target:
return mid
elif nums[mid] < target:
left = mid + 1
elif nums[mid] > target:
right = mid - 1
return -1
def lower_bound(nums: list, target: int) -> int:
'''
return the target lower bound index in nums
c++ algorithms
'''
first, last = 0, len(nums)
while first < last:
mid = first + (last - first) // 2
if nums[mid] < target:
first = mid + 1
else:
last = mid
return first
def upper_bound(nums: list, target: int) -> int:
'''
return the first idx in nums when nums[idx] > target
'''
first, last = 0, len(nums)
while first < last:
mid = first + (last - first) // 2
if nums[mid] <= target:
first = mid + 1
else:
last = mid
return first
def left_bound(nums: list, target: int) -> int:
'''
return the target left_bound index in nums
if target not in nums, return -1
e.g.,
nums = [1, 2, 2, 3, 3, 3, 4], target = 2
return 1
'''
if not nums:
return -1
left, right = 0, len(nums)
while left < right:
mid = (left + right) // 2
if nums[mid] == target:
right = mid
elif nums[mid] < target:
left = mid + 1
elif nums[mid] > target:
right = mid
if -1 < left < len(nums) and nums[left] == target:
return left
return -1
def right_bound(nums: list, target: int) -> int:
'''
return the target left_bound index in nums
if target not in nums, return -1
e.g.,
nums = [1, 2, 2, 3, 3, 3, 4], target = 2
return 2
'''
if not nums:
return -1
left, right = 0, len(nums)
while left < right:
mid = (left + right) // 2
if nums[mid] == target:
left = mid + 1
elif nums[mid] < target:
left = mid + 1
elif nums[mid] > target:
right = mid
res = left - 1
if -1 < res < len(nums) and nums[res] == target:
return res
return -1
if __name__ == '__main__':
nums = [1, 2, 2, 3, 3, 3, 4, 5, 6]
target = 3
print(binary_search_base(nums, target))
print(left_bound(nums, target))
print(right_bound(nums, target))
print(lower_bound(nums, target))
print(upper_bound(nums, target)) | 0.703448 | 0.722796 |
import os
import random
from string import ascii_uppercase, digits
from Bio import Seq, SeqUtils, SeqIO, SeqRecord
from Bio.Alphabet import IUPAC
from Bio.Blast import NCBIWWW, NCBIXML
from matplotlib import pylab
__author__ = '<NAME>'
__license__ = "MIT"
__version__ = "1.0"
__status__ = "Production"
class Sequence:
dna_bases = IUPAC.IUPACUnambiguousDNA.letters
rna_bases = IUPAC.IUPACUnambiguousRNA.letters
amino_acids = IUPAC.IUPACProtein.letters
def __init__(self, size: int = 100, seq_type: str = 'D', id: str = None, seq=None):
"""Creates random Sequence of given size and type.
:param size: Size of sequence.
:param seq_type: Sequence type, D = DNA, R = RNA, P = Protein.
:param id: ID of sequence.
:param seq: Ready Sequence object.
"""
self.s_type = {'D': 'DNA', 'R': 'RNA', 'P': 'PROTEIN'}[str(seq_type)]
# If size is not multiple of 3, then make it bigger
self.size = size if not size % 3 else size + (3 - size % 3)
# If sequence is not none and if it's instance of Sequence class
self.seq = seq if seq else self.generate_sequence()
self.id = id if id else ''.join(random.choice(ascii_uppercase) for i in range(2)) + '_' \
+ ''.join(random.choice(digits) for i in range(random.randint(4, 7)))
self.record = SeqRecord.SeqRecord(self.seq, id=self.id)
def show(self):
"""Prints sequence of object and it's ID.
:return: None
"""
print('Sequence: {}\nID: {}'.format(self.seq, self.id))
def generate_sequence(self):
"""Generates random sequence based on type.
:return: Bio.Seq object.
"""
if self.s_type not in {'DNA', 'RNA', 'PROTEIN'}:
raise TypeError('Wrong type of sequence')
else:
if self.s_type == 'DNA':
seq = Seq.Seq(''.join(random.choice(Sequence.dna_bases) for i in range(self.size)))
elif self.s_type == 'RNA':
seq = Seq.Seq(''.join(random.choice(Sequence.rna_bases) for i in range(self.size)))
else:
seq = Seq.Seq(''.join(random.choice(Sequence.amino_acids) for i in range(self.size)))
return seq
def calculate_gc(self):
"""Calculates the GC percent in sequence.
:return: Float number - GC percent.
"""
if self.s_type == 'PROTEIN':
raise TypeError('GC are not in {} sequence'.format(self.s_type))
return SeqUtils.GC(self.seq)
def transcribe(self):
"""Transcribes to RNA sequence if sequence is type D (DNA).
:return: Seq object of type RNA.
"""
if self.s_type != 'DNA':
raise TypeError('Sequence type {} can not be transcribed.'.format(self.s_type))
return Seq.Seq.transcribe(self.seq)
def translate(self):
"""Translates to Protein sequence if sequence type is R (RNA).
:return: Seq object of type Protein.
"""
if self.s_type != 'RNA':
raise TypeError('Sequence type {} can not be translated.'.format(self.s_type))
return Seq.Seq.translate(self.seq)
def reversed_transcription(self):
"""Given the seq of type RNA transcribes it to DNA.
:return: Seq object of type DNA.
"""
if self.s_type != 'RNA':
raise TypeError('Sequence type {} can not be transcribed in reverse.'.format(self.s_type))
return Seq.back_transcribe(self.seq)
def get_complement(self):
"""Creates iterator of all bases in complement sequence.
:return: Complement sequence iterator.
"""
return Seq.reverse_complement(self.seq)
def get_sequence_elems(self):
"""Creates iterator of all bases in sequence.
:return: Sequence iterator.
"""
for base in self.seq:
yield base
def get_complement_elems(self):
"""Gives the complement strand of sequence.
:return: Complement Seq iterator.
"""
for base in Seq.reverse_complement(self.seq):
yield base
def save_to_fasta(self, fn=None, description='None'):
"""Saves sequence to file in fasta format.
:param fn: Filename.
:param description: Record description
:return: None
"""
if fn is None:
fn = '{}.fasta'.format(self.record.id)
self.record.description = description
try:
with open(fn, 'w') as fl:
SeqIO.write(self.record, handle=fl, format='fasta')
fl.close()
except OSError as exc:
print(exc)
else:
print('File {} saved!'.format(fn))
def read_from_fasta(self, fn=None):
"""Reads SeqRecord from file.
If given file doesn't exists, the method takes first file in current directory.
:param fn: Filename of fasta file.
:return: True if file was loaded, else False
"""
if fn is None:
for fn in os.listdir(os.curdir):
if not fn.endswith('.fasta'):
continue
with open(fn, 'r') as fl:
self.record = SeqIO.read(fl, 'fasta')
self.seq = self.record.seq
self.id = self.record.id
fl.close()
print('File {} loaded!'.format(fn))
return True
else:
self.record = SeqIO.read(fn, 'fasta')
self.seq = self.record.seq
self.id = self.record.id
print('File {} loaded!'.format(fn))
return True
return False
def blast_search(self, fn=None, dot_plot=False, window=8):
"""Makes a blast search.
:param fn: File in which results will be saved.
:param dot_plot: True/False - show dot plot of two sequences or not.
:param window: Threshold, for example (5)
:return: None
"""
if self.s_type == 'DNA' or self.s_type == 'RNA':
try:
print('Task running...')
income = NCBIWWW.qblast('blastn', 'nt', self.record.format('fasta'))
except ValueError:
income = None
else:
try:
print('Task running...')
income = NCBIWWW.qblast('blastp', 'pdb', self.record.format('fasta'))
print('Got results!')
except ValueError:
income = None
if income is not None:
if fn is None:
with open('results/{}_blast_results.xml'.format(self.id), 'w') as of:
of.write(income.read())
of.close()
else:
with open(fn, 'w') as of:
of.write(income.read())
of.close()
result = NCBIXML.read(income)
align = result.alignment[0]
print(align.title)
print(align.lenght)
print(align.hsps[0].expect)
print(align.hsps[0].query[0:70] + '...')
print(align.hsps[0].match[0:70] + '...')
print(align.hsps[0].sbjct[0:70] + '...')
if dot_plot:
seq_one = str(align.hsps[0].query).upper()
seq_two = str(align.hsps[0].match).upper()
data = [[(seq_one[i:i + window] != seq_two[j:j + window]) for j in range(len(seq_one) - window)] for i
in range(len(seq_two) - window)]
pylab.gray()
pylab.imshow(data)
pylab.xlabel('{} (length {} bp)'.format(align.hsps[0].query, len(align.hsps[0].query)))
pylab.ylabel('{} (length {} bp)'.format(align.hsps[0].match, len(align.hsps[0].match)))
pylab.title('Dot plot using window size {}\n(allowing no mis-matches)'.format(window))
pylab.show()
else:
raise ValueError("No sequence found!") | Sequence.py | import os
import random
from string import ascii_uppercase, digits
from Bio import Seq, SeqUtils, SeqIO, SeqRecord
from Bio.Alphabet import IUPAC
from Bio.Blast import NCBIWWW, NCBIXML
from matplotlib import pylab
__author__ = '<NAME>'
__license__ = "MIT"
__version__ = "1.0"
__status__ = "Production"
class Sequence:
dna_bases = IUPAC.IUPACUnambiguousDNA.letters
rna_bases = IUPAC.IUPACUnambiguousRNA.letters
amino_acids = IUPAC.IUPACProtein.letters
def __init__(self, size: int = 100, seq_type: str = 'D', id: str = None, seq=None):
"""Creates random Sequence of given size and type.
:param size: Size of sequence.
:param seq_type: Sequence type, D = DNA, R = RNA, P = Protein.
:param id: ID of sequence.
:param seq: Ready Sequence object.
"""
self.s_type = {'D': 'DNA', 'R': 'RNA', 'P': 'PROTEIN'}[str(seq_type)]
# If size is not multiple of 3, then make it bigger
self.size = size if not size % 3 else size + (3 - size % 3)
# If sequence is not none and if it's instance of Sequence class
self.seq = seq if seq else self.generate_sequence()
self.id = id if id else ''.join(random.choice(ascii_uppercase) for i in range(2)) + '_' \
+ ''.join(random.choice(digits) for i in range(random.randint(4, 7)))
self.record = SeqRecord.SeqRecord(self.seq, id=self.id)
def show(self):
"""Prints sequence of object and it's ID.
:return: None
"""
print('Sequence: {}\nID: {}'.format(self.seq, self.id))
def generate_sequence(self):
"""Generates random sequence based on type.
:return: Bio.Seq object.
"""
if self.s_type not in {'DNA', 'RNA', 'PROTEIN'}:
raise TypeError('Wrong type of sequence')
else:
if self.s_type == 'DNA':
seq = Seq.Seq(''.join(random.choice(Sequence.dna_bases) for i in range(self.size)))
elif self.s_type == 'RNA':
seq = Seq.Seq(''.join(random.choice(Sequence.rna_bases) for i in range(self.size)))
else:
seq = Seq.Seq(''.join(random.choice(Sequence.amino_acids) for i in range(self.size)))
return seq
def calculate_gc(self):
"""Calculates the GC percent in sequence.
:return: Float number - GC percent.
"""
if self.s_type == 'PROTEIN':
raise TypeError('GC are not in {} sequence'.format(self.s_type))
return SeqUtils.GC(self.seq)
def transcribe(self):
"""Transcribes to RNA sequence if sequence is type D (DNA).
:return: Seq object of type RNA.
"""
if self.s_type != 'DNA':
raise TypeError('Sequence type {} can not be transcribed.'.format(self.s_type))
return Seq.Seq.transcribe(self.seq)
def translate(self):
"""Translates to Protein sequence if sequence type is R (RNA).
:return: Seq object of type Protein.
"""
if self.s_type != 'RNA':
raise TypeError('Sequence type {} can not be translated.'.format(self.s_type))
return Seq.Seq.translate(self.seq)
def reversed_transcription(self):
"""Given the seq of type RNA transcribes it to DNA.
:return: Seq object of type DNA.
"""
if self.s_type != 'RNA':
raise TypeError('Sequence type {} can not be transcribed in reverse.'.format(self.s_type))
return Seq.back_transcribe(self.seq)
def get_complement(self):
"""Creates iterator of all bases in complement sequence.
:return: Complement sequence iterator.
"""
return Seq.reverse_complement(self.seq)
def get_sequence_elems(self):
"""Creates iterator of all bases in sequence.
:return: Sequence iterator.
"""
for base in self.seq:
yield base
def get_complement_elems(self):
"""Gives the complement strand of sequence.
:return: Complement Seq iterator.
"""
for base in Seq.reverse_complement(self.seq):
yield base
def save_to_fasta(self, fn=None, description='None'):
"""Saves sequence to file in fasta format.
:param fn: Filename.
:param description: Record description
:return: None
"""
if fn is None:
fn = '{}.fasta'.format(self.record.id)
self.record.description = description
try:
with open(fn, 'w') as fl:
SeqIO.write(self.record, handle=fl, format='fasta')
fl.close()
except OSError as exc:
print(exc)
else:
print('File {} saved!'.format(fn))
def read_from_fasta(self, fn=None):
"""Reads SeqRecord from file.
If given file doesn't exists, the method takes first file in current directory.
:param fn: Filename of fasta file.
:return: True if file was loaded, else False
"""
if fn is None:
for fn in os.listdir(os.curdir):
if not fn.endswith('.fasta'):
continue
with open(fn, 'r') as fl:
self.record = SeqIO.read(fl, 'fasta')
self.seq = self.record.seq
self.id = self.record.id
fl.close()
print('File {} loaded!'.format(fn))
return True
else:
self.record = SeqIO.read(fn, 'fasta')
self.seq = self.record.seq
self.id = self.record.id
print('File {} loaded!'.format(fn))
return True
return False
def blast_search(self, fn=None, dot_plot=False, window=8):
"""Makes a blast search.
:param fn: File in which results will be saved.
:param dot_plot: True/False - show dot plot of two sequences or not.
:param window: Threshold, for example (5)
:return: None
"""
if self.s_type == 'DNA' or self.s_type == 'RNA':
try:
print('Task running...')
income = NCBIWWW.qblast('blastn', 'nt', self.record.format('fasta'))
except ValueError:
income = None
else:
try:
print('Task running...')
income = NCBIWWW.qblast('blastp', 'pdb', self.record.format('fasta'))
print('Got results!')
except ValueError:
income = None
if income is not None:
if fn is None:
with open('results/{}_blast_results.xml'.format(self.id), 'w') as of:
of.write(income.read())
of.close()
else:
with open(fn, 'w') as of:
of.write(income.read())
of.close()
result = NCBIXML.read(income)
align = result.alignment[0]
print(align.title)
print(align.lenght)
print(align.hsps[0].expect)
print(align.hsps[0].query[0:70] + '...')
print(align.hsps[0].match[0:70] + '...')
print(align.hsps[0].sbjct[0:70] + '...')
if dot_plot:
seq_one = str(align.hsps[0].query).upper()
seq_two = str(align.hsps[0].match).upper()
data = [[(seq_one[i:i + window] != seq_two[j:j + window]) for j in range(len(seq_one) - window)] for i
in range(len(seq_two) - window)]
pylab.gray()
pylab.imshow(data)
pylab.xlabel('{} (length {} bp)'.format(align.hsps[0].query, len(align.hsps[0].query)))
pylab.ylabel('{} (length {} bp)'.format(align.hsps[0].match, len(align.hsps[0].match)))
pylab.title('Dot plot using window size {}\n(allowing no mis-matches)'.format(window))
pylab.show()
else:
raise ValueError("No sequence found!") | 0.585931 | 0.25247 |
import kivy
from kivy.app import App
from kivy.config import Config
kivy.require("1.10.0")
Config.read("mitc.ini")
from kivy.core.window import Window
from kivy.uix.textinput import TextInput
from kivy.uix.button import Button
from kivy.uix.stacklayout import StackLayout
INPUT_REFERENCE = 0
class Input(TextInput):
"""
Class responsável pelo Input e suas caracteristicas.
pontuation : sinaliza se o texto já foi pontuado ou não
text_float : variável usada nas operações matemáticas
allowed_characters : caracteres permitidos no software
first_float : boolean que sinaliza se o número do input já foi tratado
"""
pontuation = False
first_float = True
text_float = 0
def __init__(self, **kwargs):
super(Input, self).__init__(**kwargs)
global INPUT_REFERENCE
INPUT_REFERENCE = self
self.size_hint = (1,0.25)
padding_height = (self.height - self.line_height)/2
self.multiline = False
self.font_size = "32sp"
self.text = ""
self.background_normal = ""
self.background_active = ""
self.background_color = (1, 1, 1, 1)
self.cursor_color = (0, 0, 0, 1)
self.padding = (5, padding_height)
self.readonly = True
def on_text(self, instance, text):
"""
Evento responsável por qualquer modificação do texto do input, além
de ser responsável pelas pontuações e verificações
"""
if self.first_float and instance.text != "":
self.text_float = float(instance.text)
self.first_float = False
elif not self.first_float and instance.text != "":
self.text_float = float(instance.text.replace(".", "").replace(",", "."))
else:
self.text_float = float("0")
if not self.pontuation:
text = str(self.text_float).replace(".", ",")
if len(text) > 3:
pre_text = ""
count = 0
before_virg = text.find(",")
for number in list(text)[:before_virg][::-1]:
pre_text += number
count += 1
if count == 3:
pre_text += "."
count = 0
self.pontuation = True
if text[before_virg:] != ",0":
pre_text = "{}{}".format(pre_text[::-1], text[before_virg:])
else:
pre_text = pre_text[::-1]
instance.text = pre_text[1:] if pre_text[0] == "." else pre_text
self.pontuation = False
class CalcButton(Button):
"""
Classe padrão para todos os botões da calculadora
"""
def __init__(self, **kwargs):
super(CalcButton, self).__init__(**kwargs)
self.background_normal = ""
self.size_hint_y = 0.15
self.font_size = "27sp"
self.background_color = (41/255, 128/255, 185/255,1.0)
self.background_down = "images/texture_press.png"
class Main_layout(StackLayout):
"""
Layout principal da calculadora, organiza toda a estrutura básica
da calculadora
buttons_instances : dicionário de todas as instâncias dos botões usados
key_operation_reference : dicionário que relaciona um botão a uma tecla
class_operation : instância da classe que irá tratar as operações
"""
buttons_instances = {}
key_operation_reference = {
"C" : ("backspace", ""),
"+" : ("=", "shift"),
"-" : ("-", ""),
"*" : ("8", "shift"),
"÷" : ("q", "alt"),
"%" : ("5", "shift"),
"^" : (1073741824, "shift"),
"=" : ("enter", "")
}
def __init__(self, **kwargs):
super(Main_layout, self).__init__(**kwargs)
my_keyboard = Window.request_keyboard(None, None)
my_keyboard.bind(on_key_down=self.on_key_down)
self.add_widget(Input())
self.class_operation = OperationsFunctions()
operations_button = {
"C": self.class_operation.clean,
"^" : self.class_operation.potentiation,
"%" : self.class_operation.percentage,
"+" : self.class_operation.sum
}
self.add_buttons(operations_button, orientation="horizontal")
alfa_numbers_layout = StackLayout(orientation="rl-tb")
alfa_numbers_layout.size_hint_x = 0.75
for number in range(9, 0, -1):
btn_alfa = CalcButton()
btn_alfa.size_hint_x = 1/3
btn_alfa.text = str(number)
self.buttons_instances[btn_alfa.text] = btn_alfa
btn_alfa.bind(on_press=self.numbers_insert)
alfa_numbers_layout.add_widget(btn_alfa)
alfa_virg = CalcButton(
text = ",",
size_hint_x=1/3,
on_press=self.numbers_insert)
self.buttons_instances[","] = alfa_virg
alfa_numbers_layout.add_widget(alfa_virg)
alfa_zero = CalcButton(
text = "0",
size_hint_x=2/3,
on_press=self.numbers_insert)
self.buttons_instances["0"] = alfa_zero
alfa_numbers_layout.add_widget(alfa_zero)
self.add_widget(alfa_numbers_layout)
operations_button = {
"-": self.class_operation.decrease,
"*": self.class_operation.multiplication,
"÷": self.class_operation.division,
"=": self.class_operation.equal
}
self.add_buttons(operations_button, orientation="vertical")
def add_buttons(self, operations_dict={}, orientation="horizontal"):
"""
Metódo que adiciona vários botões de uma vez, baseado na orientação desejada
"""
if operations_dict != {} and orientation == "horizontal":
layout = False
width_button = 1/len(operations_dict)
elif orientation == "vertical":
layout = StackLayout()
layout.size_hint_x = 0.25
width_button = 1
for text_put, function in operations_dict.items():
button = CalcButton(text=str(text_put))
self.buttons_instances[button.text] = button
button.bind(on_press=function)
button.size_hint_x = width_button
if layout != False: layout.add_widget(button)
else : self.add_widget(button)
else:
if layout != False: self.add_widget(layout)
def numbers_insert(self, button_instance):
"""
Metódo que adiciona ao input os valores dos botões númericos
button_instance: instância do botão
"""
self.class_operation.insert_anything(button_instance.text)
def on_key_down(self, instance, keycode, text, modifiers):
"""
Evento responsável pela checagem das teclas pressionadas,
relacionando-as seus respectivos botões
instance : instância do teclado
keycode : (ansii_key, key)
text : text_key
modifiers : sub_keys
"""
modifiers.append("")
for btn_text, key in self.key_operation_reference.items():
if key[0] in keycode and modifiers[0].find(key[1]) != -1:
self.buttons_instances[btn_text].trigger_action(duration=0.15)
return True
if keycode[1] in self.buttons_instances:
self.buttons_instances[keycode[1]].trigger_action(duration=0.15)
self.class_operation.local_input.do_cursor_movement("cursor_end")
class OperationsFunctions:
"""
Class responsável por todas as operações matemáticas disponíveis na calculadora
newest_operation : instância da operação mais recente
local_input : variável local que faz referência ao input
ans : último valor resultante
code_error : dicionário de erros e suas mensagens
"""
newest_operation = None
def __init__(self):
global INPUT_REFERENCE
self.local_input = INPUT_REFERENCE
self.ans = []
def clean(self, *args):
"""
Limpa o input
"""
self.local_input.text = ""
def generic_operation(self, operation_newest):
"""
Operação genérica, equivalente a um intermediário entre a requisição
e a operação
operation_newest : instância da operação que a chamou
"""
if self.ans == []:
self.ans.append(self.local_input.text_float)
self.newest_operation = operation_newest
self.clean()
else:
self.local_input.first_float = True
self.local_input.text = self.newest_operation(
self.ans[0],
self.local_input.text_float)
self.ans = []
def potentiation(self, *args):
if type(args[0]) != float:
self.generic_operation(self.potentiation)
else:
return str(args[0] ** args[1])
def percentage(self, *args):
if type(args[0]) != float:
self.generic_operation(self.percentage)
else:
return str((args[0] * args[1]) // 100)
def sum(self, *args):
if type(args[0]) != float:
self.generic_operation(self.sum)
else:
return str(args[0] + args[1])
def decrease(self, *args):
if type(args[0]) != float:
self.generic_operation(self.decrease)
else:
return str(args[0] - args[1])
def multiplication(self, *args):
if type(args[0]) != float:
self.generic_operation(self.multiplication)
else:
return str(args[0] * args[1])
def division(self, *args):
if type(args[0]) != float:
self.generic_operation(self.division)
else:
return str(args[0] / args[1])
def equal(self, *args):
if self.newest_operation != None: self.newest_operation(None)
def insert_anything(self, text):
"""
Metódo responsável por inserir os números nas posições corretas
text: texto a ser inserido
"""
if self.local_input.text.find("e") != -1:
virg = self.local_input.text.find(",")
self.local_input.text = self.local_input.text[:virg] + text + self.local_input.text[virg:]
else:
self.local_input.text += text
class MitC(App):
def build(self):
self.icon = "images/icon.png"
return Main_layout()
if __name__ == "__main__":
MitC().run() | src/MitC.py | import kivy
from kivy.app import App
from kivy.config import Config
kivy.require("1.10.0")
Config.read("mitc.ini")
from kivy.core.window import Window
from kivy.uix.textinput import TextInput
from kivy.uix.button import Button
from kivy.uix.stacklayout import StackLayout
INPUT_REFERENCE = 0
class Input(TextInput):
"""
Class responsável pelo Input e suas caracteristicas.
pontuation : sinaliza se o texto já foi pontuado ou não
text_float : variável usada nas operações matemáticas
allowed_characters : caracteres permitidos no software
first_float : boolean que sinaliza se o número do input já foi tratado
"""
pontuation = False
first_float = True
text_float = 0
def __init__(self, **kwargs):
super(Input, self).__init__(**kwargs)
global INPUT_REFERENCE
INPUT_REFERENCE = self
self.size_hint = (1,0.25)
padding_height = (self.height - self.line_height)/2
self.multiline = False
self.font_size = "32sp"
self.text = ""
self.background_normal = ""
self.background_active = ""
self.background_color = (1, 1, 1, 1)
self.cursor_color = (0, 0, 0, 1)
self.padding = (5, padding_height)
self.readonly = True
def on_text(self, instance, text):
"""
Evento responsável por qualquer modificação do texto do input, além
de ser responsável pelas pontuações e verificações
"""
if self.first_float and instance.text != "":
self.text_float = float(instance.text)
self.first_float = False
elif not self.first_float and instance.text != "":
self.text_float = float(instance.text.replace(".", "").replace(",", "."))
else:
self.text_float = float("0")
if not self.pontuation:
text = str(self.text_float).replace(".", ",")
if len(text) > 3:
pre_text = ""
count = 0
before_virg = text.find(",")
for number in list(text)[:before_virg][::-1]:
pre_text += number
count += 1
if count == 3:
pre_text += "."
count = 0
self.pontuation = True
if text[before_virg:] != ",0":
pre_text = "{}{}".format(pre_text[::-1], text[before_virg:])
else:
pre_text = pre_text[::-1]
instance.text = pre_text[1:] if pre_text[0] == "." else pre_text
self.pontuation = False
class CalcButton(Button):
"""
Classe padrão para todos os botões da calculadora
"""
def __init__(self, **kwargs):
super(CalcButton, self).__init__(**kwargs)
self.background_normal = ""
self.size_hint_y = 0.15
self.font_size = "27sp"
self.background_color = (41/255, 128/255, 185/255,1.0)
self.background_down = "images/texture_press.png"
class Main_layout(StackLayout):
"""
Layout principal da calculadora, organiza toda a estrutura básica
da calculadora
buttons_instances : dicionário de todas as instâncias dos botões usados
key_operation_reference : dicionário que relaciona um botão a uma tecla
class_operation : instância da classe que irá tratar as operações
"""
buttons_instances = {}
key_operation_reference = {
"C" : ("backspace", ""),
"+" : ("=", "shift"),
"-" : ("-", ""),
"*" : ("8", "shift"),
"÷" : ("q", "alt"),
"%" : ("5", "shift"),
"^" : (1073741824, "shift"),
"=" : ("enter", "")
}
def __init__(self, **kwargs):
super(Main_layout, self).__init__(**kwargs)
my_keyboard = Window.request_keyboard(None, None)
my_keyboard.bind(on_key_down=self.on_key_down)
self.add_widget(Input())
self.class_operation = OperationsFunctions()
operations_button = {
"C": self.class_operation.clean,
"^" : self.class_operation.potentiation,
"%" : self.class_operation.percentage,
"+" : self.class_operation.sum
}
self.add_buttons(operations_button, orientation="horizontal")
alfa_numbers_layout = StackLayout(orientation="rl-tb")
alfa_numbers_layout.size_hint_x = 0.75
for number in range(9, 0, -1):
btn_alfa = CalcButton()
btn_alfa.size_hint_x = 1/3
btn_alfa.text = str(number)
self.buttons_instances[btn_alfa.text] = btn_alfa
btn_alfa.bind(on_press=self.numbers_insert)
alfa_numbers_layout.add_widget(btn_alfa)
alfa_virg = CalcButton(
text = ",",
size_hint_x=1/3,
on_press=self.numbers_insert)
self.buttons_instances[","] = alfa_virg
alfa_numbers_layout.add_widget(alfa_virg)
alfa_zero = CalcButton(
text = "0",
size_hint_x=2/3,
on_press=self.numbers_insert)
self.buttons_instances["0"] = alfa_zero
alfa_numbers_layout.add_widget(alfa_zero)
self.add_widget(alfa_numbers_layout)
operations_button = {
"-": self.class_operation.decrease,
"*": self.class_operation.multiplication,
"÷": self.class_operation.division,
"=": self.class_operation.equal
}
self.add_buttons(operations_button, orientation="vertical")
def add_buttons(self, operations_dict={}, orientation="horizontal"):
"""
Metódo que adiciona vários botões de uma vez, baseado na orientação desejada
"""
if operations_dict != {} and orientation == "horizontal":
layout = False
width_button = 1/len(operations_dict)
elif orientation == "vertical":
layout = StackLayout()
layout.size_hint_x = 0.25
width_button = 1
for text_put, function in operations_dict.items():
button = CalcButton(text=str(text_put))
self.buttons_instances[button.text] = button
button.bind(on_press=function)
button.size_hint_x = width_button
if layout != False: layout.add_widget(button)
else : self.add_widget(button)
else:
if layout != False: self.add_widget(layout)
def numbers_insert(self, button_instance):
"""
Metódo que adiciona ao input os valores dos botões númericos
button_instance: instância do botão
"""
self.class_operation.insert_anything(button_instance.text)
def on_key_down(self, instance, keycode, text, modifiers):
"""
Evento responsável pela checagem das teclas pressionadas,
relacionando-as seus respectivos botões
instance : instância do teclado
keycode : (ansii_key, key)
text : text_key
modifiers : sub_keys
"""
modifiers.append("")
for btn_text, key in self.key_operation_reference.items():
if key[0] in keycode and modifiers[0].find(key[1]) != -1:
self.buttons_instances[btn_text].trigger_action(duration=0.15)
return True
if keycode[1] in self.buttons_instances:
self.buttons_instances[keycode[1]].trigger_action(duration=0.15)
self.class_operation.local_input.do_cursor_movement("cursor_end")
class OperationsFunctions:
"""
Class responsável por todas as operações matemáticas disponíveis na calculadora
newest_operation : instância da operação mais recente
local_input : variável local que faz referência ao input
ans : último valor resultante
code_error : dicionário de erros e suas mensagens
"""
newest_operation = None
def __init__(self):
global INPUT_REFERENCE
self.local_input = INPUT_REFERENCE
self.ans = []
def clean(self, *args):
"""
Limpa o input
"""
self.local_input.text = ""
def generic_operation(self, operation_newest):
"""
Operação genérica, equivalente a um intermediário entre a requisição
e a operação
operation_newest : instância da operação que a chamou
"""
if self.ans == []:
self.ans.append(self.local_input.text_float)
self.newest_operation = operation_newest
self.clean()
else:
self.local_input.first_float = True
self.local_input.text = self.newest_operation(
self.ans[0],
self.local_input.text_float)
self.ans = []
def potentiation(self, *args):
if type(args[0]) != float:
self.generic_operation(self.potentiation)
else:
return str(args[0] ** args[1])
def percentage(self, *args):
if type(args[0]) != float:
self.generic_operation(self.percentage)
else:
return str((args[0] * args[1]) // 100)
def sum(self, *args):
if type(args[0]) != float:
self.generic_operation(self.sum)
else:
return str(args[0] + args[1])
def decrease(self, *args):
if type(args[0]) != float:
self.generic_operation(self.decrease)
else:
return str(args[0] - args[1])
def multiplication(self, *args):
if type(args[0]) != float:
self.generic_operation(self.multiplication)
else:
return str(args[0] * args[1])
def division(self, *args):
if type(args[0]) != float:
self.generic_operation(self.division)
else:
return str(args[0] / args[1])
def equal(self, *args):
if self.newest_operation != None: self.newest_operation(None)
def insert_anything(self, text):
"""
Metódo responsável por inserir os números nas posições corretas
text: texto a ser inserido
"""
if self.local_input.text.find("e") != -1:
virg = self.local_input.text.find(",")
self.local_input.text = self.local_input.text[:virg] + text + self.local_input.text[virg:]
else:
self.local_input.text += text
class MitC(App):
def build(self):
self.icon = "images/icon.png"
return Main_layout()
if __name__ == "__main__":
MitC().run() | 0.411347 | 0.1692 |
import numpy as np
import torch
import torch.nn as nn
from gnn_cnn_model.modules import *
class MultiHeadAttention(nn.Module):
def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1):
super(MultiHeadAttention, self).__init__()
self.n_head = n_head
self.d_k = d_k
self.d_v = d_v
self.w_qs = nn.Linear(d_model, n_head*d_k, bias=False)
self.w_ks = nn.Linear(d_model, n_head*d_k, bias=False)
self.w_vs = nn.Linear(d_model, n_head*d_v, bias=False)
self.fc = nn.Linear(n_head*d_v, d_model, bias=False)
self.attention = ScaledDotProductAttention(temperature=d_k ** 0.5)
self.dropout = nn.Dropout(dropout)
self.layer_norm = nn.LayerNorm(d_model, eps=1e-6)
def forward(self, q, k, v):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
batch_size, len_q, len_k, len_v = q.size(0), q.size(1), k.size(1), v.size(1)
residual = q
# Pass through the pre-attention projection: b x lq x (n*dv)
# Separate different heads: b x lq x n x dv
q = self.w_qs(q).view(batch_size, len_q, n_head, d_k)
k = self.w_ks(k).view(batch_size, len_k, n_head, d_k)
v = self.w_vs(v).view(batch_size, len_v, n_head, d_v)
# Transpose for attention dot product: b x n x lq x dv
q, k, v = q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2)
# query x key x value
q, attn = self.attention(q, k, v)
# Transpose to move the head dimension back: b x lq x n x dv
# Combine the last two dimensions to concatenate all the heads together: b x lq x (n*dv)
q = q.transpose(1, 2).contiguous().view(batch_size, len_q, -1)
q = self.dropout(self.fc(q))
q += residual
q = self.layer_norm(q)
return q, attn
class PositionwiseFeedForward(nn.Module):
''' A two-feed-forward-layer module '''
def __init__(self, d_in, d_hid, dropout=0.1):
super().__init__()
self.w_1 = nn.Linear(d_in, d_hid) # position-wise
self.w_2 = nn.Linear(d_hid, d_in) # position-wise
self.layer_norm = nn.LayerNorm(d_in, eps=1e-6)
self.dropout = nn.Dropout(dropout)
def forward(self, x):
residual = x
x = self.w_2(F.relu(self.w_1(x)))
x = self.dropout(x)
x += residual
x = self.layer_norm(x)
return x | gnn_cnn_model/sublayers.py | import numpy as np
import torch
import torch.nn as nn
from gnn_cnn_model.modules import *
class MultiHeadAttention(nn.Module):
def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1):
super(MultiHeadAttention, self).__init__()
self.n_head = n_head
self.d_k = d_k
self.d_v = d_v
self.w_qs = nn.Linear(d_model, n_head*d_k, bias=False)
self.w_ks = nn.Linear(d_model, n_head*d_k, bias=False)
self.w_vs = nn.Linear(d_model, n_head*d_v, bias=False)
self.fc = nn.Linear(n_head*d_v, d_model, bias=False)
self.attention = ScaledDotProductAttention(temperature=d_k ** 0.5)
self.dropout = nn.Dropout(dropout)
self.layer_norm = nn.LayerNorm(d_model, eps=1e-6)
def forward(self, q, k, v):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
batch_size, len_q, len_k, len_v = q.size(0), q.size(1), k.size(1), v.size(1)
residual = q
# Pass through the pre-attention projection: b x lq x (n*dv)
# Separate different heads: b x lq x n x dv
q = self.w_qs(q).view(batch_size, len_q, n_head, d_k)
k = self.w_ks(k).view(batch_size, len_k, n_head, d_k)
v = self.w_vs(v).view(batch_size, len_v, n_head, d_v)
# Transpose for attention dot product: b x n x lq x dv
q, k, v = q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2)
# query x key x value
q, attn = self.attention(q, k, v)
# Transpose to move the head dimension back: b x lq x n x dv
# Combine the last two dimensions to concatenate all the heads together: b x lq x (n*dv)
q = q.transpose(1, 2).contiguous().view(batch_size, len_q, -1)
q = self.dropout(self.fc(q))
q += residual
q = self.layer_norm(q)
return q, attn
class PositionwiseFeedForward(nn.Module):
''' A two-feed-forward-layer module '''
def __init__(self, d_in, d_hid, dropout=0.1):
super().__init__()
self.w_1 = nn.Linear(d_in, d_hid) # position-wise
self.w_2 = nn.Linear(d_hid, d_in) # position-wise
self.layer_norm = nn.LayerNorm(d_in, eps=1e-6)
self.dropout = nn.Dropout(dropout)
def forward(self, x):
residual = x
x = self.w_2(F.relu(self.w_1(x)))
x = self.dropout(x)
x += residual
x = self.layer_norm(x)
return x | 0.941196 | 0.328375 |
import struct
import re
import zipfile
import os
import logging
from io import BytesIO
from collections import OrderedDict
from urllib.parse import urlparse
import requests
from ckan.lib import uploader, formatters
log = logging.getLogger(__name__)
ALLOWED_FMTS = ('zip', 'application/zip', 'application/x-zip-compressed')
def get_zip_list(rsc):
if rsc.get('url_type') == 'upload':
upload = uploader.ResourceUpload(rsc)
value = None
try:
zf = zipfile.ZipFile(upload.get_path(rsc['id']), 'r')
value = zf.filelist
except Exception:
# Sometimes values that can't be converted to ints can sneak
# into the db. In this case, just leave them as they are.
pass
if value:
return value
upload = uploader.get_resource_uploader(rsc)
url = urlparse(rsc['url'])
filename = os.path.basename(url.path)
URL = upload.get_url_from_filename(rsc['id'], filename, '')
return get_ziplist_from_url(URL)
else:
return get_ziplist_from_url(rsc.get('url'))
return
def get_ziplist_from_url(url):
try:
head = requests.head(url)
if 'content-length' in head.headers:
end = int(head.headers['content-length'])
if 'content-range' in head.headers:
end = int(head.headers['content-range'].split("/")[1])
return _get_list(url, end-65536, end)
except Exception:
pass
try:
return _get_list_advanced(url)
except Exception:
return
def _get_list(url, start, end):
resp = requests.get(
url, headers={'Range': 'bytes={}-{}'.format(start, end)})
fp = BytesIO(resp.content)
return zipfile.ZipFile(fp).filelist
def _get_list_advanced(url):
# https://superuser.com/questions/981301/is-there-a-way-to-download-parts-of-the-content-of-a-zip-file
offset = 0
fp = _open_remote_zip(url)
header = fp.read(30)
file_list = []
while header[:4] == 'PK\x03\x04':
compressed_len, uncompressed_len = struct.unpack('<II', header[18:26])
filename_len, extra_len = struct.unpack('<HH', header[26:30])
header_len = 30 + filename_len + extra_len
total_len = header_len + compressed_len
filename = fp.read(filename_len)
zi = zipfile.ZipInfo(filename)
zi.file_size = uncompressed_len
file_list.append(zi)
fp.close()
offset += total_len
fp = _open_remote_zip(url, offset)
header = fp.read(30)
fp.close()
return file_list
def _open_remote_zip(url, offset=0):
return requests.get(url, headers={'Range': 'bytes={}-'.format(offset)})
def get_zip_tree(rsc):
zip_list = get_zip_list(rsc)
if not zip_list:
return
tree = OrderedDict()
for compressed_file in zip_list:
if "/" not in compressed_file.filename:
tree[compressed_file.filename] = _prepare_file_data(
compressed_file)
else:
parts = compressed_file.filename.split("/")
if parts[-1] != "":
child = _prepare_child_data(compressed_file)
parent_filename = '/'.join(parts[:-1])
if parent_filename not in tree:
tree[parent_filename] = _prepare_parent_data(
parent_filename)
tree[parent_filename]['children'].append(child)
return tree.values()
def _prepare_file_data(zip_info):
return {
"title": zip_info.filename,
"file_size": formatters.localised_filesize(zip_info.file_size),
"children": [],
"icon": _get_file_icon(zip_info.filename)
}
def _prepare_child_data(zip_info):
file_title = zip_info.filename.split("/").pop()
return {
"title": re.sub(r'[^\x00-\x7f]', r'', file_title),
"file_size": formatters.localised_filesize(zip_info.file_size),
"children": [],
"icon": _get_file_icon(re.sub(r'[^\x00-\x7f]', r'', zip_info.filename))
}
def _prepare_parent_data(file_name):
return {
"title": file_name,
"children": [],
"icon": 'folder-open'
}
def _get_file_icon(item):
"""returns icon class based on file format"""
extension = item.split('.')[-1].lower()
if extension in ['xml', 'txt', 'json']:
return "file-text"
if extension in ['csv', 'xls']:
return "bar-chart-o"
if extension in ['shp', 'geojson', 'kml', 'kmz']:
return "globe"
return "file"
def is_resource_supported(res):
"""Check if resource format is in allowed formats"""
res_fmt = res.get('format', '').lower()
if not res_fmt:
splitted_url = os.path.splitext(res['url'])
res_fmt = splitted_url[1][1:].lower()
return True if res_fmt in ALLOWED_FMTS else False | ckanext/zippreview/utils.py | import struct
import re
import zipfile
import os
import logging
from io import BytesIO
from collections import OrderedDict
from urllib.parse import urlparse
import requests
from ckan.lib import uploader, formatters
log = logging.getLogger(__name__)
ALLOWED_FMTS = ('zip', 'application/zip', 'application/x-zip-compressed')
def get_zip_list(rsc):
if rsc.get('url_type') == 'upload':
upload = uploader.ResourceUpload(rsc)
value = None
try:
zf = zipfile.ZipFile(upload.get_path(rsc['id']), 'r')
value = zf.filelist
except Exception:
# Sometimes values that can't be converted to ints can sneak
# into the db. In this case, just leave them as they are.
pass
if value:
return value
upload = uploader.get_resource_uploader(rsc)
url = urlparse(rsc['url'])
filename = os.path.basename(url.path)
URL = upload.get_url_from_filename(rsc['id'], filename, '')
return get_ziplist_from_url(URL)
else:
return get_ziplist_from_url(rsc.get('url'))
return
def get_ziplist_from_url(url):
try:
head = requests.head(url)
if 'content-length' in head.headers:
end = int(head.headers['content-length'])
if 'content-range' in head.headers:
end = int(head.headers['content-range'].split("/")[1])
return _get_list(url, end-65536, end)
except Exception:
pass
try:
return _get_list_advanced(url)
except Exception:
return
def _get_list(url, start, end):
resp = requests.get(
url, headers={'Range': 'bytes={}-{}'.format(start, end)})
fp = BytesIO(resp.content)
return zipfile.ZipFile(fp).filelist
def _get_list_advanced(url):
# https://superuser.com/questions/981301/is-there-a-way-to-download-parts-of-the-content-of-a-zip-file
offset = 0
fp = _open_remote_zip(url)
header = fp.read(30)
file_list = []
while header[:4] == 'PK\x03\x04':
compressed_len, uncompressed_len = struct.unpack('<II', header[18:26])
filename_len, extra_len = struct.unpack('<HH', header[26:30])
header_len = 30 + filename_len + extra_len
total_len = header_len + compressed_len
filename = fp.read(filename_len)
zi = zipfile.ZipInfo(filename)
zi.file_size = uncompressed_len
file_list.append(zi)
fp.close()
offset += total_len
fp = _open_remote_zip(url, offset)
header = fp.read(30)
fp.close()
return file_list
def _open_remote_zip(url, offset=0):
return requests.get(url, headers={'Range': 'bytes={}-'.format(offset)})
def get_zip_tree(rsc):
zip_list = get_zip_list(rsc)
if not zip_list:
return
tree = OrderedDict()
for compressed_file in zip_list:
if "/" not in compressed_file.filename:
tree[compressed_file.filename] = _prepare_file_data(
compressed_file)
else:
parts = compressed_file.filename.split("/")
if parts[-1] != "":
child = _prepare_child_data(compressed_file)
parent_filename = '/'.join(parts[:-1])
if parent_filename not in tree:
tree[parent_filename] = _prepare_parent_data(
parent_filename)
tree[parent_filename]['children'].append(child)
return tree.values()
def _prepare_file_data(zip_info):
return {
"title": zip_info.filename,
"file_size": formatters.localised_filesize(zip_info.file_size),
"children": [],
"icon": _get_file_icon(zip_info.filename)
}
def _prepare_child_data(zip_info):
file_title = zip_info.filename.split("/").pop()
return {
"title": re.sub(r'[^\x00-\x7f]', r'', file_title),
"file_size": formatters.localised_filesize(zip_info.file_size),
"children": [],
"icon": _get_file_icon(re.sub(r'[^\x00-\x7f]', r'', zip_info.filename))
}
def _prepare_parent_data(file_name):
return {
"title": file_name,
"children": [],
"icon": 'folder-open'
}
def _get_file_icon(item):
"""returns icon class based on file format"""
extension = item.split('.')[-1].lower()
if extension in ['xml', 'txt', 'json']:
return "file-text"
if extension in ['csv', 'xls']:
return "bar-chart-o"
if extension in ['shp', 'geojson', 'kml', 'kmz']:
return "globe"
return "file"
def is_resource_supported(res):
"""Check if resource format is in allowed formats"""
res_fmt = res.get('format', '').lower()
if not res_fmt:
splitted_url = os.path.splitext(res['url'])
res_fmt = splitted_url[1][1:].lower()
return True if res_fmt in ALLOWED_FMTS else False | 0.302082 | 0.101947 |
"""---------------- Importing libraries ----------------
"""
# System tools
import sys
import os
sys.path.append(os.path.join(".."))
# Import pandas for working with dataframes
import pandas as pd
# Neural networks with numpy
import numpy as np
from utils.neuralnetwork import NeuralNetwork
# Machine learning tools
from sklearn.preprocessing import LabelBinarizer
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report
from sklearn import datasets
from sklearn.datasets import fetch_openml
# Command-line interface
import argparse
"""---------------- Main script ----------------
"""
def main():
"""------ Argparse parameters ------
"""
# instantiating the ArgumentParser object as parser
parser = argparse.ArgumentParser()
# adding optional arguments
parser.add_argument("-trs", "--train_size", default = 0.8, type = float, help = "The size of the training data as a percentage. Default = 0.8 (80%)")
parser.add_argument("-tes", "--test_size", default = 0.2, type = float, help = "The size of the training data as a percentage. Default = 0.2 (20%)")
parser.add_argument("-hl1", "--hidden_layer_1", default = 32, type = int, help="Size of the hidden layer 1. Default = 32")
parser.add_argument("-hl2", "--hidden_layer_2", default = 16, type = int, help="Size of the hidden layer 2. Default = 16")
parser.add_argument("-hl3", "--hidden_layer_3", default = 0, type = int, help="Size of the hidden layer 3. Default = 0")
parser.add_argument("-ep", "--epochs", default = 500, type = int, help = "Defines how many times the learning algorithm will work through the entire training dataset. Default = 500")
parser.add_argument("-n", "--name", default = "NN_report", help="Name of the classification report to be saved as .csv file")
# parsing the arguments
args = parser.parse_args()
"""------ Loading full data and preprocessing ------
"""
print("[INFO] Loading and preprocessing data...")
# Fetching data
X, y = fetch_openml("mnist_784", version=1, return_X_y=True)
# Converting to numpy arrays
X = np.array(X) #data
y = np.array(y) #labels
# MinMax regularization (rescaling from 0-255 to 0-1)
X = (X - X.min())/(X.max() - X.min())
# Creating training data and test dataset
X_train, X_test, y_train, y_test = train_test_split(X,
y,
train_size = args.train_size,
test_size = args.test_size)
# Converting labels from integers to vectors (binary)
y_train = LabelBinarizer().fit_transform(y_train)
y_test = LabelBinarizer().fit_transform(y_test)
"""------ Loading sample data and preprocessing ------
"""
# Load sample data
###digits = datasets.load_digits()
# Convert to floats
###data = digits.data.astype("float")
# MinMax regularization (rescaling from 0-255 to 0-1)
###data = (data - data.min())/(data.max() - data.min())
# Creating train and test datasets
###X_train, X_test, y_train, y_test = train_test_split(data,
#digits.target,
#train_size = args.train_size,
#test_size = args.test_size)
# Converting labels from integers to vectors
###y_train = LabelBinarizer().fit_transform(y_train)
###y_test = LabelBinarizer().fit_transform(y_test)
"""------ Training the network (behavior with optional hidden layers) ------
"""
# If user inputs 1 hidden layer:
if args.hidden_layer_1 > 0 and args.hidden_layer_2 == 0 and args.hidden_layer_3 == 0:
# Training a neural network
print("[INFO] training Neural Network...")
nn = NeuralNetwork([X_train.shape[1], args.hidden_layer_1, 10])
print("[INFO] {}".format(nn))
nn.fit(X_train, y_train, epochs = args.epochs)
# If user inputs 2 hidden layers:
elif args.hidden_layer_1 > 0 and args.hidden_layer_2 > 0 and args.hidden_layer_3 == 0:
## Training a neural network
print("[INFO] training Neural Network...")
nn = NeuralNetwork([X_train.shape[1], args.hidden_layer_1, args.hidden_layer_2, 10])
print("[INFO] {}".format(nn))
nn.fit(X_train, y_train, epochs = args.epochs)
# If user inputs 3 hidden layers:
elif args.hidden_layer_1 > 0 and args.hidden_layer_2 > 0 and args.hidden_layer_3 > 0:
## Training a neural network
print("[INFO] training Neural Network...")
nn = NeuralNetwork([X_train.shape[1], args.hidden_layer_1, args.hidden_layer_2, args.hidden_layer_3, 10])
print("[INFO] {}".format(nn))
nn.fit(X_train, y_train, epochs = args.epochs)
"""------ Evaluating the network ------
"""
# Evaluating the network
print(["[INFO] evaluating Neural Network..."])
predictions = nn.predict(X_test)
predictions = predictions.argmax(axis=1)
print(classification_report(y_test.argmax(axis=1), predictions))
"""------ Saving classification report as .csv file (optional) ------
"""
# If user inputs optional argument 'name', save as .csv file:
if args.name:
#Create ouput folder for saving the classification report if it doesn´t exist already
if not os.path.exists("../out"):
os.makedirs("../out")
# Turning classification report into a dataframe
report_df = pd.DataFrame(classification_report(y_test.argmax(axis=1), predictions, output_dict = True)).transpose()
# Defining full filepath to save csv file
outfile = os.path.join("..", "out", args.name)
# Saving a dataframe as .csv
report_df.to_csv(outfile)
# Printing that .csv file has been saved
print(f"\n[INFO] classification report is saved in directory {outfile}")
"""------ Final messages ------
"""
# Printing a message to the user
print("The script was executed successfully. Have a nice day!")
# Define behaviour when called from command line
if __name__=="__main__":
main() | src/Neural_Network.py | """---------------- Importing libraries ----------------
"""
# System tools
import sys
import os
sys.path.append(os.path.join(".."))
# Import pandas for working with dataframes
import pandas as pd
# Neural networks with numpy
import numpy as np
from utils.neuralnetwork import NeuralNetwork
# Machine learning tools
from sklearn.preprocessing import LabelBinarizer
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report
from sklearn import datasets
from sklearn.datasets import fetch_openml
# Command-line interface
import argparse
"""---------------- Main script ----------------
"""
def main():
"""------ Argparse parameters ------
"""
# instantiating the ArgumentParser object as parser
parser = argparse.ArgumentParser()
# adding optional arguments
parser.add_argument("-trs", "--train_size", default = 0.8, type = float, help = "The size of the training data as a percentage. Default = 0.8 (80%)")
parser.add_argument("-tes", "--test_size", default = 0.2, type = float, help = "The size of the training data as a percentage. Default = 0.2 (20%)")
parser.add_argument("-hl1", "--hidden_layer_1", default = 32, type = int, help="Size of the hidden layer 1. Default = 32")
parser.add_argument("-hl2", "--hidden_layer_2", default = 16, type = int, help="Size of the hidden layer 2. Default = 16")
parser.add_argument("-hl3", "--hidden_layer_3", default = 0, type = int, help="Size of the hidden layer 3. Default = 0")
parser.add_argument("-ep", "--epochs", default = 500, type = int, help = "Defines how many times the learning algorithm will work through the entire training dataset. Default = 500")
parser.add_argument("-n", "--name", default = "NN_report", help="Name of the classification report to be saved as .csv file")
# parsing the arguments
args = parser.parse_args()
"""------ Loading full data and preprocessing ------
"""
print("[INFO] Loading and preprocessing data...")
# Fetching data
X, y = fetch_openml("mnist_784", version=1, return_X_y=True)
# Converting to numpy arrays
X = np.array(X) #data
y = np.array(y) #labels
# MinMax regularization (rescaling from 0-255 to 0-1)
X = (X - X.min())/(X.max() - X.min())
# Creating training data and test dataset
X_train, X_test, y_train, y_test = train_test_split(X,
y,
train_size = args.train_size,
test_size = args.test_size)
# Converting labels from integers to vectors (binary)
y_train = LabelBinarizer().fit_transform(y_train)
y_test = LabelBinarizer().fit_transform(y_test)
"""------ Loading sample data and preprocessing ------
"""
# Load sample data
###digits = datasets.load_digits()
# Convert to floats
###data = digits.data.astype("float")
# MinMax regularization (rescaling from 0-255 to 0-1)
###data = (data - data.min())/(data.max() - data.min())
# Creating train and test datasets
###X_train, X_test, y_train, y_test = train_test_split(data,
#digits.target,
#train_size = args.train_size,
#test_size = args.test_size)
# Converting labels from integers to vectors
###y_train = LabelBinarizer().fit_transform(y_train)
###y_test = LabelBinarizer().fit_transform(y_test)
"""------ Training the network (behavior with optional hidden layers) ------
"""
# If user inputs 1 hidden layer:
if args.hidden_layer_1 > 0 and args.hidden_layer_2 == 0 and args.hidden_layer_3 == 0:
# Training a neural network
print("[INFO] training Neural Network...")
nn = NeuralNetwork([X_train.shape[1], args.hidden_layer_1, 10])
print("[INFO] {}".format(nn))
nn.fit(X_train, y_train, epochs = args.epochs)
# If user inputs 2 hidden layers:
elif args.hidden_layer_1 > 0 and args.hidden_layer_2 > 0 and args.hidden_layer_3 == 0:
## Training a neural network
print("[INFO] training Neural Network...")
nn = NeuralNetwork([X_train.shape[1], args.hidden_layer_1, args.hidden_layer_2, 10])
print("[INFO] {}".format(nn))
nn.fit(X_train, y_train, epochs = args.epochs)
# If user inputs 3 hidden layers:
elif args.hidden_layer_1 > 0 and args.hidden_layer_2 > 0 and args.hidden_layer_3 > 0:
## Training a neural network
print("[INFO] training Neural Network...")
nn = NeuralNetwork([X_train.shape[1], args.hidden_layer_1, args.hidden_layer_2, args.hidden_layer_3, 10])
print("[INFO] {}".format(nn))
nn.fit(X_train, y_train, epochs = args.epochs)
"""------ Evaluating the network ------
"""
# Evaluating the network
print(["[INFO] evaluating Neural Network..."])
predictions = nn.predict(X_test)
predictions = predictions.argmax(axis=1)
print(classification_report(y_test.argmax(axis=1), predictions))
"""------ Saving classification report as .csv file (optional) ------
"""
# If user inputs optional argument 'name', save as .csv file:
if args.name:
#Create ouput folder for saving the classification report if it doesn´t exist already
if not os.path.exists("../out"):
os.makedirs("../out")
# Turning classification report into a dataframe
report_df = pd.DataFrame(classification_report(y_test.argmax(axis=1), predictions, output_dict = True)).transpose()
# Defining full filepath to save csv file
outfile = os.path.join("..", "out", args.name)
# Saving a dataframe as .csv
report_df.to_csv(outfile)
# Printing that .csv file has been saved
print(f"\n[INFO] classification report is saved in directory {outfile}")
"""------ Final messages ------
"""
# Printing a message to the user
print("The script was executed successfully. Have a nice day!")
# Define behaviour when called from command line
if __name__=="__main__":
main() | 0.508544 | 0.637285 |
import secrets
import time
import asyncio
from typing import (
Any,
cast,
Dict,
List,
)
from eth_utils import encode_hex
from hp2p.constants import PEER_STAKE_GONE_STALE_TIME_PERIOD
from hvm.exceptions import (
CanonicalHeadNotFound,
)
from hp2p.exceptions import HandshakeFailure
from hp2p.p2p_proto import DisconnectReason, Disconnect
from hp2p.protocol import (
Command,
_DecodedMsgType,
)
from hp2p.utils import (
extract_wallet_verification_sender,
create_wallet_verification_signature,
validate_transaction_signature,
)
from hp2p.kademlia import Node
from helios.protocol.common.peer import (
BaseChainPeer,
BaseChainPeerFactory,
BaseChainPeerPool,
)
from hvm.types import Timestamp
from .commands import (
Status,
WalletAddressVerification,
)
from .constants import MAX_HEADERS_FETCH
from .proto import HLSProtocol
from .handlers import HLSExchangeHandler
from eth_typing import Address
from helios.protocol.common.datastructures import HashFragmentRequestHistory
class HLSPeer(BaseChainPeer):
max_headers_fetch = MAX_HEADERS_FETCH
_supported_sub_protocols = [HLSProtocol]
sub_proto: HLSProtocol = None
_requests: HLSExchangeHandler = None
_last_stake_check_time: Timestamp = 0
_stake: int = None
wallet_address = None
local_salt = None
peer_salt = None
chain_head_root_hashes = None
node_type = None
hash_fragment_request_history_type_1: HashFragmentRequestHistory = None
hash_fragment_request_history_type_2: HashFragmentRequestHistory = None
def get_extra_stats(self) -> List[str]:
stats_pairs = self.requests.get_stats().items()
return ['%s: %s' % (cmd_name, stats) for cmd_name, stats in stats_pairs]
@property
async def stake(self) -> int:
if self._last_stake_check_time < (int(time.time()) - PEER_STAKE_GONE_STALE_TIME_PERIOD):
try:
self._stake = await self.chains[0].coro_get_mature_stake(self.wallet_address, raise_canonical_head_not_found_error = True)
# coin_mature_time_for_staking = self.chains[0].get_vm(timestamp=Timestamp(int(time.time()))).consensus_db.coin_mature_time_for_staking
# self._stake = await self.chaindb.coro_get_mature_stake(Address(self.wallet_address), coin_mature_time_for_staking, raise_canonical_head_not_found_error = True)
except CanonicalHeadNotFound:
self._stake = None
self._last_stake_check_time = int(time.time())
return self._stake
@property
def requests(self) -> HLSExchangeHandler:
if self._requests is None:
self._requests = HLSExchangeHandler(self)
return self._requests
def handle_sub_proto_msg(self, cmd: Command, msg: _DecodedMsgType) -> None:
super().handle_sub_proto_msg(cmd, msg)
async def send_sub_proto_handshake(self) -> None:
local_salt = secrets.token_bytes(32)
chain_info = await self._local_chain_info
self.sub_proto.send_handshake(chain_info, local_salt)
self.local_salt = local_salt
async def process_sub_proto_handshake(
self, cmd: Command, msg: _DecodedMsgType) -> None:
if not isinstance(cmd, Status):
await self.disconnect(DisconnectReason.other)
raise HandshakeFailure(
"Expected a HLS Status msg, got {}, disconnecting".format(cmd))
msg = cast(Dict[str, Any], msg)
if msg['network_id'] != self.network_id:
await self.disconnect(DisconnectReason.useless_peer)
raise HandshakeFailure(
"{} network ({}) does not match ours ({}), disconnecting".format(
self, msg['network_id'], self.network_id))
chain_info = await self._local_chain_info
genesis_block_hash = chain_info.genesis_block_hash
if msg['genesis_block_hash'] != genesis_block_hash:
await self.disconnect(DisconnectReason.useless_peer)
raise HandshakeFailure(
"{} genesis ({}) does not match ours ({}), disconnecting".format(
self, encode_hex(msg['genesis_block_hash']), encode_hex(genesis_block_hash)))
self.node_type = msg['node_type']
self.send_wallet_address_verification(self.local_salt, msg['salt'])
# After the sub_proto handshake, the peer will send back a signed message containing the wallet address
cmd, msg = await self.read_msg()
if isinstance(cmd, Disconnect):
# Peers sometimes send a disconnect msg before they send the sub-proto handshake.
raise HandshakeFailure(
"{} disconnected before completing wallet address verification: {}".format(
self, msg['reason_name']))
await self.process_sub_proto_wallet_address_verification(cmd, msg)
async def process_sub_proto_wallet_address_verification(
self, cmd: Command, msg: _DecodedMsgType) -> None:
if not isinstance(cmd, WalletAddressVerification):
await self.disconnect(DisconnectReason.other)
raise HandshakeFailure(
"Expected a HLS WalletAddressVerification msg, got {}, disconnecting".format(cmd))
msg = cast(Dict[str, Any], msg)
# make sure the salt they replied with is the salt we sent:
if msg['peer_salt'] != self.local_salt:
raise HandshakeFailure("The peer replied with a signed message using the wrong salt")
salt = msg['local_salt'] + msg['peer_salt']
validate_transaction_signature(salt, msg['v'], msg['r'], msg['s'])
self.wallet_address = extract_wallet_verification_sender(salt, msg['v'], msg['r'], msg['s'])
def send_wallet_address_verification(self, local_salt, peer_salt):
salt = local_salt + peer_salt
v, r, s = create_wallet_verification_signature(salt, self.chain_config.node_private_helios_key)
self.sub_proto.send_wallet_address_verification(local_salt, peer_salt, v, r, s)
self.peer_salt = salt
# self.logger.debug("sending wallet address verification for wallet {}".format(self.chain_config.node_wallet_address))
class HLSPeerFactory(BaseChainPeerFactory):
peer_class = HLSPeer
class HLSPeerPool(BaseChainPeerPool):
connected_nodes: Dict[Node, HLSPeer] # type: ignore
peer_factory_class = HLSPeerFactory
@property
def peers(self, min_stake: int = 0) -> List[HLSPeer]:
return cast(List[HLSPeer], self.get_peers(min_stake)) | helios/protocol/hls/peer.py | import secrets
import time
import asyncio
from typing import (
Any,
cast,
Dict,
List,
)
from eth_utils import encode_hex
from hp2p.constants import PEER_STAKE_GONE_STALE_TIME_PERIOD
from hvm.exceptions import (
CanonicalHeadNotFound,
)
from hp2p.exceptions import HandshakeFailure
from hp2p.p2p_proto import DisconnectReason, Disconnect
from hp2p.protocol import (
Command,
_DecodedMsgType,
)
from hp2p.utils import (
extract_wallet_verification_sender,
create_wallet_verification_signature,
validate_transaction_signature,
)
from hp2p.kademlia import Node
from helios.protocol.common.peer import (
BaseChainPeer,
BaseChainPeerFactory,
BaseChainPeerPool,
)
from hvm.types import Timestamp
from .commands import (
Status,
WalletAddressVerification,
)
from .constants import MAX_HEADERS_FETCH
from .proto import HLSProtocol
from .handlers import HLSExchangeHandler
from eth_typing import Address
from helios.protocol.common.datastructures import HashFragmentRequestHistory
class HLSPeer(BaseChainPeer):
max_headers_fetch = MAX_HEADERS_FETCH
_supported_sub_protocols = [HLSProtocol]
sub_proto: HLSProtocol = None
_requests: HLSExchangeHandler = None
_last_stake_check_time: Timestamp = 0
_stake: int = None
wallet_address = None
local_salt = None
peer_salt = None
chain_head_root_hashes = None
node_type = None
hash_fragment_request_history_type_1: HashFragmentRequestHistory = None
hash_fragment_request_history_type_2: HashFragmentRequestHistory = None
def get_extra_stats(self) -> List[str]:
stats_pairs = self.requests.get_stats().items()
return ['%s: %s' % (cmd_name, stats) for cmd_name, stats in stats_pairs]
@property
async def stake(self) -> int:
if self._last_stake_check_time < (int(time.time()) - PEER_STAKE_GONE_STALE_TIME_PERIOD):
try:
self._stake = await self.chains[0].coro_get_mature_stake(self.wallet_address, raise_canonical_head_not_found_error = True)
# coin_mature_time_for_staking = self.chains[0].get_vm(timestamp=Timestamp(int(time.time()))).consensus_db.coin_mature_time_for_staking
# self._stake = await self.chaindb.coro_get_mature_stake(Address(self.wallet_address), coin_mature_time_for_staking, raise_canonical_head_not_found_error = True)
except CanonicalHeadNotFound:
self._stake = None
self._last_stake_check_time = int(time.time())
return self._stake
@property
def requests(self) -> HLSExchangeHandler:
if self._requests is None:
self._requests = HLSExchangeHandler(self)
return self._requests
def handle_sub_proto_msg(self, cmd: Command, msg: _DecodedMsgType) -> None:
super().handle_sub_proto_msg(cmd, msg)
async def send_sub_proto_handshake(self) -> None:
local_salt = secrets.token_bytes(32)
chain_info = await self._local_chain_info
self.sub_proto.send_handshake(chain_info, local_salt)
self.local_salt = local_salt
async def process_sub_proto_handshake(
self, cmd: Command, msg: _DecodedMsgType) -> None:
if not isinstance(cmd, Status):
await self.disconnect(DisconnectReason.other)
raise HandshakeFailure(
"Expected a HLS Status msg, got {}, disconnecting".format(cmd))
msg = cast(Dict[str, Any], msg)
if msg['network_id'] != self.network_id:
await self.disconnect(DisconnectReason.useless_peer)
raise HandshakeFailure(
"{} network ({}) does not match ours ({}), disconnecting".format(
self, msg['network_id'], self.network_id))
chain_info = await self._local_chain_info
genesis_block_hash = chain_info.genesis_block_hash
if msg['genesis_block_hash'] != genesis_block_hash:
await self.disconnect(DisconnectReason.useless_peer)
raise HandshakeFailure(
"{} genesis ({}) does not match ours ({}), disconnecting".format(
self, encode_hex(msg['genesis_block_hash']), encode_hex(genesis_block_hash)))
self.node_type = msg['node_type']
self.send_wallet_address_verification(self.local_salt, msg['salt'])
# After the sub_proto handshake, the peer will send back a signed message containing the wallet address
cmd, msg = await self.read_msg()
if isinstance(cmd, Disconnect):
# Peers sometimes send a disconnect msg before they send the sub-proto handshake.
raise HandshakeFailure(
"{} disconnected before completing wallet address verification: {}".format(
self, msg['reason_name']))
await self.process_sub_proto_wallet_address_verification(cmd, msg)
async def process_sub_proto_wallet_address_verification(
self, cmd: Command, msg: _DecodedMsgType) -> None:
if not isinstance(cmd, WalletAddressVerification):
await self.disconnect(DisconnectReason.other)
raise HandshakeFailure(
"Expected a HLS WalletAddressVerification msg, got {}, disconnecting".format(cmd))
msg = cast(Dict[str, Any], msg)
# make sure the salt they replied with is the salt we sent:
if msg['peer_salt'] != self.local_salt:
raise HandshakeFailure("The peer replied with a signed message using the wrong salt")
salt = msg['local_salt'] + msg['peer_salt']
validate_transaction_signature(salt, msg['v'], msg['r'], msg['s'])
self.wallet_address = extract_wallet_verification_sender(salt, msg['v'], msg['r'], msg['s'])
def send_wallet_address_verification(self, local_salt, peer_salt):
salt = local_salt + peer_salt
v, r, s = create_wallet_verification_signature(salt, self.chain_config.node_private_helios_key)
self.sub_proto.send_wallet_address_verification(local_salt, peer_salt, v, r, s)
self.peer_salt = salt
# self.logger.debug("sending wallet address verification for wallet {}".format(self.chain_config.node_wallet_address))
class HLSPeerFactory(BaseChainPeerFactory):
peer_class = HLSPeer
class HLSPeerPool(BaseChainPeerPool):
connected_nodes: Dict[Node, HLSPeer] # type: ignore
peer_factory_class = HLSPeerFactory
@property
def peers(self, min_stake: int = 0) -> List[HLSPeer]:
return cast(List[HLSPeer], self.get_peers(min_stake)) | 0.603348 | 0.136983 |
import py
from rpython.flowspace.argument import (ArgumentsForTranslation, rawshape,
Signature)
class TestSignature(object):
def test_helpers(self):
sig = Signature(["a", "b", "c"], None, None)
assert sig.num_argnames() == 3
assert not sig.has_vararg()
assert not sig.has_kwarg()
assert sig.scope_length() == 3
assert sig.getallvarnames() == ["a", "b", "c"]
sig = Signature(["a", "b", "c"], "c", None)
assert sig.num_argnames() == 3
assert sig.has_vararg()
assert not sig.has_kwarg()
assert sig.scope_length() == 4
assert sig.getallvarnames() == ["a", "b", "c", "c"]
sig = Signature(["a", "b", "c"], None, "c")
assert sig.num_argnames() == 3
assert not sig.has_vararg()
assert sig.has_kwarg()
assert sig.scope_length() == 4
assert sig.getallvarnames() == ["a", "b", "c", "c"]
sig = Signature(["a", "b", "c"], "d", "c")
assert sig.num_argnames() == 3
assert sig.has_vararg()
assert sig.has_kwarg()
assert sig.scope_length() == 5
assert sig.getallvarnames() == ["a", "b", "c", "d", "c"]
def test_eq(self):
sig1 = Signature(["a", "b", "c"], "d", "c")
sig2 = Signature(["a", "b", "c"], "d", "c")
assert sig1 == sig2
def test_find_argname(self):
sig = Signature(["a", "b", "c"], None, None)
assert sig.find_argname("a") == 0
assert sig.find_argname("b") == 1
assert sig.find_argname("c") == 2
assert sig.find_argname("d") == -1
def test_tuply(self):
sig = Signature(["a", "b", "c"], "d", "e")
x, y, z = sig
assert x == ["a", "b", "c"]
assert y == "d"
assert z == "e"
class dummy_wrapped_dict(dict):
def __nonzero__(self):
raise NotImplementedError
class kwargsdict(dict):
pass
class DummySpace(object):
def newtuple(self, items):
return tuple(items)
def is_true(self, obj):
if isinstance(obj, dummy_wrapped_dict):
return bool(dict(obj))
return bool(obj)
def fixedview(self, it):
return list(it)
def listview(self, it):
return list(it)
def unpackiterable(self, it):
return list(it)
def view_as_kwargs(self, x):
if len(x) == 0:
return [], []
return None, None
def newdict(self):
return {}
def newlist(self, l=[]):
return l
def setitem(self, obj, key, value):
obj[key] = value
def getitem(self, obj, key):
return obj[key]
def wrap(self, obj):
return obj
def str_w(self, s):
return str(s)
def len(self, x):
return len(x)
def int_w(self, x):
return x
def eq_w(self, x, y):
return x == y
def isinstance(self, obj, cls):
return isinstance(obj, cls)
isinstance_w = isinstance
def exception_match(self, w_type1, w_type2):
return issubclass(w_type1, w_type2)
def call_method(self, obj, name, *args):
method = getattr(obj, name)
return method(*args)
def type(self, obj):
class Type:
def getname(self, space, default='?'):
return type(obj).__name__
return Type()
w_TypeError = TypeError
w_AttributeError = AttributeError
w_UnicodeEncodeError = UnicodeEncodeError
w_dict = dict
w_str = str
def make_arguments_for_translation(space, args_w, keywords_w={},
w_stararg=None, w_starstararg=None):
return ArgumentsForTranslation(space, args_w, keywords_w.keys(),
keywords_w.values(), w_stararg,
w_starstararg)
class TestArgumentsForTranslation(object):
def test_prepend(self):
space = DummySpace()
args = ArgumentsForTranslation(space, ["0"])
args1 = args.prepend("thingy")
assert args1 is not args
assert args1.arguments_w == ["thingy", "0"]
assert args1.keywords is args.keywords
assert args1.keywords_w is args.keywords_w
def test_fixedunpacked(self):
space = DummySpace()
args = ArgumentsForTranslation(space, [], ["k"], [1])
py.test.raises(ValueError, args.fixedunpack, 1)
args = ArgumentsForTranslation(space, ["a", "b"])
py.test.raises(ValueError, args.fixedunpack, 0)
py.test.raises(ValueError, args.fixedunpack, 1)
py.test.raises(ValueError, args.fixedunpack, 3)
py.test.raises(ValueError, args.fixedunpack, 4)
assert args.fixedunpack(2) == ['a', 'b']
def test_unmatch_signature(self):
space = DummySpace()
args = make_arguments_for_translation(space, [1,2,3])
sig = Signature(['a', 'b', 'c'], None, None)
data = args.match_signature(sig, [])
new_args = args.unmatch_signature(sig, data)
assert args.unpack() == new_args.unpack()
args = make_arguments_for_translation(space, [1])
sig = Signature(['a', 'b', 'c'], None, None)
data = args.match_signature(sig, [2, 3])
new_args = args.unmatch_signature(sig, data)
assert args.unpack() == new_args.unpack()
args = make_arguments_for_translation(space, [1,2,3,4,5])
sig = Signature(['a', 'b', 'c'], 'r', None)
data = args.match_signature(sig, [])
new_args = args.unmatch_signature(sig, data)
assert args.unpack() == new_args.unpack()
args = make_arguments_for_translation(space, [1], {'c': 3, 'b': 2})
sig = Signature(['a', 'b', 'c'], None, None)
data = args.match_signature(sig, [])
new_args = args.unmatch_signature(sig, data)
assert args.unpack() == new_args.unpack()
args = make_arguments_for_translation(space, [1], {'c': 5})
sig = Signature(['a', 'b', 'c'], None, None)
data = args.match_signature(sig, [2, 3])
new_args = args.unmatch_signature(sig, data)
assert args.unpack() == new_args.unpack()
args = make_arguments_for_translation(space, [1], {'c': 5, 'd': 7})
sig = Signature(['a', 'b', 'c'], None, 'kw')
py.test.raises(TypeError, args.match_signature, sig, [2, 3])
def test_rawshape(self):
space = DummySpace()
args = make_arguments_for_translation(space, [1,2,3])
assert rawshape(args) == (3, (), False, False)
args = make_arguments_for_translation(space, [1])
assert rawshape(args, 2) == (3, (), False, False)
args = make_arguments_for_translation(space, [1,2,3,4,5])
assert rawshape(args) == (5, (), False, False)
args = make_arguments_for_translation(space, [1], {'c': 3, 'b': 2})
assert rawshape(args) == (1, ('b', 'c'), False, False)
args = make_arguments_for_translation(space, [1], {'c': 5})
assert rawshape(args) == (1, ('c', ), False, False)
args = make_arguments_for_translation(space, [1], {'c': 5, 'd': 7})
assert rawshape(args) == (1, ('c', 'd'), False, False)
args = make_arguments_for_translation(space, [1,2,3,4,5], {'e': 5, 'd': 7})
assert rawshape(args) == (5, ('d', 'e'), False, False)
args = make_arguments_for_translation(space, [], {},
w_stararg=[1],
w_starstararg={'c': 5, 'd': 7})
assert rawshape(args) == (0, (), True, True)
args = make_arguments_for_translation(space, [1,2], {'g': 9},
w_stararg=[3,4,5],
w_starstararg={'e': 5, 'd': 7})
assert rawshape(args) == (2, ('g', ), True, True)
def test_copy_and_shape(self):
space = DummySpace()
args = ArgumentsForTranslation(space, ['a'], ['x'], [1],
['w1'], {'y': 'w2'})
args1 = args.copy()
args.combine_if_necessary()
assert rawshape(args1) == (1, ('x',), True, True)
def test_flatten(self):
space = DummySpace()
args = make_arguments_for_translation(space, [1,2,3])
assert args.flatten() == ((3, (), False, False), [1, 2, 3])
args = make_arguments_for_translation(space, [1])
assert args.flatten() == ((1, (), False, False), [1])
args = make_arguments_for_translation(space, [1,2,3,4,5])
assert args.flatten() == ((5, (), False, False), [1,2,3,4,5])
args = make_arguments_for_translation(space, [1], {'c': 3, 'b': 2})
assert args.flatten() == ((1, ('b', 'c'), False, False), [1, 2, 3])
args = make_arguments_for_translation(space, [1], {'c': 5})
assert args.flatten() == ((1, ('c', ), False, False), [1, 5])
args = make_arguments_for_translation(space, [1], {'c': 5, 'd': 7})
assert args.flatten() == ((1, ('c', 'd'), False, False), [1, 5, 7])
args = make_arguments_for_translation(space, [1,2,3,4,5], {'e': 5, 'd': 7})
assert args.flatten() == ((5, ('d', 'e'), False, False), [1, 2, 3, 4, 5, 7, 5])
args = make_arguments_for_translation(space, [], {},
w_stararg=[1],
w_starstararg={'c': 5, 'd': 7})
assert args.flatten() == ((0, (), True, True), [[1], {'c': 5, 'd': 7}])
args = make_arguments_for_translation(space, [1,2], {'g': 9},
w_stararg=[3,4,5],
w_starstararg={'e': 5, 'd': 7})
assert args.flatten() == ((2, ('g', ), True, True), [1, 2, 9, [3, 4, 5], {'e': 5, 'd': 7}])
def test_stararg_flowspace_variable(self):
space = DummySpace()
var = object()
shape = ((2, ('g', ), True, False), [1, 2, 9, var])
args = make_arguments_for_translation(space, [1,2], {'g': 9},
w_stararg=var)
assert args.flatten() == shape
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
def test_fromshape(self):
space = DummySpace()
shape = ((3, (), False, False), [1, 2, 3])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((1, (), False, False), [1])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((5, (), False, False), [1,2,3,4,5])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((1, ('b', 'c'), False, False), [1, 2, 3])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((1, ('c', ), False, False), [1, 5])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((1, ('c', 'd'), False, False), [1, 5, 7])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((5, ('d', 'e'), False, False), [1, 2, 3, 4, 5, 7, 5])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((0, (), True, True), [[1], {'c': 5, 'd': 7}])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((2, ('g', ), True, True), [1, 2, 9, [3, 4, 5], {'e': 5, 'd': 7}])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape | rpython/flowspace/test/test_argument.py | import py
from rpython.flowspace.argument import (ArgumentsForTranslation, rawshape,
Signature)
class TestSignature(object):
def test_helpers(self):
sig = Signature(["a", "b", "c"], None, None)
assert sig.num_argnames() == 3
assert not sig.has_vararg()
assert not sig.has_kwarg()
assert sig.scope_length() == 3
assert sig.getallvarnames() == ["a", "b", "c"]
sig = Signature(["a", "b", "c"], "c", None)
assert sig.num_argnames() == 3
assert sig.has_vararg()
assert not sig.has_kwarg()
assert sig.scope_length() == 4
assert sig.getallvarnames() == ["a", "b", "c", "c"]
sig = Signature(["a", "b", "c"], None, "c")
assert sig.num_argnames() == 3
assert not sig.has_vararg()
assert sig.has_kwarg()
assert sig.scope_length() == 4
assert sig.getallvarnames() == ["a", "b", "c", "c"]
sig = Signature(["a", "b", "c"], "d", "c")
assert sig.num_argnames() == 3
assert sig.has_vararg()
assert sig.has_kwarg()
assert sig.scope_length() == 5
assert sig.getallvarnames() == ["a", "b", "c", "d", "c"]
def test_eq(self):
sig1 = Signature(["a", "b", "c"], "d", "c")
sig2 = Signature(["a", "b", "c"], "d", "c")
assert sig1 == sig2
def test_find_argname(self):
sig = Signature(["a", "b", "c"], None, None)
assert sig.find_argname("a") == 0
assert sig.find_argname("b") == 1
assert sig.find_argname("c") == 2
assert sig.find_argname("d") == -1
def test_tuply(self):
sig = Signature(["a", "b", "c"], "d", "e")
x, y, z = sig
assert x == ["a", "b", "c"]
assert y == "d"
assert z == "e"
class dummy_wrapped_dict(dict):
def __nonzero__(self):
raise NotImplementedError
class kwargsdict(dict):
pass
class DummySpace(object):
def newtuple(self, items):
return tuple(items)
def is_true(self, obj):
if isinstance(obj, dummy_wrapped_dict):
return bool(dict(obj))
return bool(obj)
def fixedview(self, it):
return list(it)
def listview(self, it):
return list(it)
def unpackiterable(self, it):
return list(it)
def view_as_kwargs(self, x):
if len(x) == 0:
return [], []
return None, None
def newdict(self):
return {}
def newlist(self, l=[]):
return l
def setitem(self, obj, key, value):
obj[key] = value
def getitem(self, obj, key):
return obj[key]
def wrap(self, obj):
return obj
def str_w(self, s):
return str(s)
def len(self, x):
return len(x)
def int_w(self, x):
return x
def eq_w(self, x, y):
return x == y
def isinstance(self, obj, cls):
return isinstance(obj, cls)
isinstance_w = isinstance
def exception_match(self, w_type1, w_type2):
return issubclass(w_type1, w_type2)
def call_method(self, obj, name, *args):
method = getattr(obj, name)
return method(*args)
def type(self, obj):
class Type:
def getname(self, space, default='?'):
return type(obj).__name__
return Type()
w_TypeError = TypeError
w_AttributeError = AttributeError
w_UnicodeEncodeError = UnicodeEncodeError
w_dict = dict
w_str = str
def make_arguments_for_translation(space, args_w, keywords_w={},
w_stararg=None, w_starstararg=None):
return ArgumentsForTranslation(space, args_w, keywords_w.keys(),
keywords_w.values(), w_stararg,
w_starstararg)
class TestArgumentsForTranslation(object):
def test_prepend(self):
space = DummySpace()
args = ArgumentsForTranslation(space, ["0"])
args1 = args.prepend("thingy")
assert args1 is not args
assert args1.arguments_w == ["thingy", "0"]
assert args1.keywords is args.keywords
assert args1.keywords_w is args.keywords_w
def test_fixedunpacked(self):
space = DummySpace()
args = ArgumentsForTranslation(space, [], ["k"], [1])
py.test.raises(ValueError, args.fixedunpack, 1)
args = ArgumentsForTranslation(space, ["a", "b"])
py.test.raises(ValueError, args.fixedunpack, 0)
py.test.raises(ValueError, args.fixedunpack, 1)
py.test.raises(ValueError, args.fixedunpack, 3)
py.test.raises(ValueError, args.fixedunpack, 4)
assert args.fixedunpack(2) == ['a', 'b']
def test_unmatch_signature(self):
space = DummySpace()
args = make_arguments_for_translation(space, [1,2,3])
sig = Signature(['a', 'b', 'c'], None, None)
data = args.match_signature(sig, [])
new_args = args.unmatch_signature(sig, data)
assert args.unpack() == new_args.unpack()
args = make_arguments_for_translation(space, [1])
sig = Signature(['a', 'b', 'c'], None, None)
data = args.match_signature(sig, [2, 3])
new_args = args.unmatch_signature(sig, data)
assert args.unpack() == new_args.unpack()
args = make_arguments_for_translation(space, [1,2,3,4,5])
sig = Signature(['a', 'b', 'c'], 'r', None)
data = args.match_signature(sig, [])
new_args = args.unmatch_signature(sig, data)
assert args.unpack() == new_args.unpack()
args = make_arguments_for_translation(space, [1], {'c': 3, 'b': 2})
sig = Signature(['a', 'b', 'c'], None, None)
data = args.match_signature(sig, [])
new_args = args.unmatch_signature(sig, data)
assert args.unpack() == new_args.unpack()
args = make_arguments_for_translation(space, [1], {'c': 5})
sig = Signature(['a', 'b', 'c'], None, None)
data = args.match_signature(sig, [2, 3])
new_args = args.unmatch_signature(sig, data)
assert args.unpack() == new_args.unpack()
args = make_arguments_for_translation(space, [1], {'c': 5, 'd': 7})
sig = Signature(['a', 'b', 'c'], None, 'kw')
py.test.raises(TypeError, args.match_signature, sig, [2, 3])
def test_rawshape(self):
space = DummySpace()
args = make_arguments_for_translation(space, [1,2,3])
assert rawshape(args) == (3, (), False, False)
args = make_arguments_for_translation(space, [1])
assert rawshape(args, 2) == (3, (), False, False)
args = make_arguments_for_translation(space, [1,2,3,4,5])
assert rawshape(args) == (5, (), False, False)
args = make_arguments_for_translation(space, [1], {'c': 3, 'b': 2})
assert rawshape(args) == (1, ('b', 'c'), False, False)
args = make_arguments_for_translation(space, [1], {'c': 5})
assert rawshape(args) == (1, ('c', ), False, False)
args = make_arguments_for_translation(space, [1], {'c': 5, 'd': 7})
assert rawshape(args) == (1, ('c', 'd'), False, False)
args = make_arguments_for_translation(space, [1,2,3,4,5], {'e': 5, 'd': 7})
assert rawshape(args) == (5, ('d', 'e'), False, False)
args = make_arguments_for_translation(space, [], {},
w_stararg=[1],
w_starstararg={'c': 5, 'd': 7})
assert rawshape(args) == (0, (), True, True)
args = make_arguments_for_translation(space, [1,2], {'g': 9},
w_stararg=[3,4,5],
w_starstararg={'e': 5, 'd': 7})
assert rawshape(args) == (2, ('g', ), True, True)
def test_copy_and_shape(self):
space = DummySpace()
args = ArgumentsForTranslation(space, ['a'], ['x'], [1],
['w1'], {'y': 'w2'})
args1 = args.copy()
args.combine_if_necessary()
assert rawshape(args1) == (1, ('x',), True, True)
def test_flatten(self):
space = DummySpace()
args = make_arguments_for_translation(space, [1,2,3])
assert args.flatten() == ((3, (), False, False), [1, 2, 3])
args = make_arguments_for_translation(space, [1])
assert args.flatten() == ((1, (), False, False), [1])
args = make_arguments_for_translation(space, [1,2,3,4,5])
assert args.flatten() == ((5, (), False, False), [1,2,3,4,5])
args = make_arguments_for_translation(space, [1], {'c': 3, 'b': 2})
assert args.flatten() == ((1, ('b', 'c'), False, False), [1, 2, 3])
args = make_arguments_for_translation(space, [1], {'c': 5})
assert args.flatten() == ((1, ('c', ), False, False), [1, 5])
args = make_arguments_for_translation(space, [1], {'c': 5, 'd': 7})
assert args.flatten() == ((1, ('c', 'd'), False, False), [1, 5, 7])
args = make_arguments_for_translation(space, [1,2,3,4,5], {'e': 5, 'd': 7})
assert args.flatten() == ((5, ('d', 'e'), False, False), [1, 2, 3, 4, 5, 7, 5])
args = make_arguments_for_translation(space, [], {},
w_stararg=[1],
w_starstararg={'c': 5, 'd': 7})
assert args.flatten() == ((0, (), True, True), [[1], {'c': 5, 'd': 7}])
args = make_arguments_for_translation(space, [1,2], {'g': 9},
w_stararg=[3,4,5],
w_starstararg={'e': 5, 'd': 7})
assert args.flatten() == ((2, ('g', ), True, True), [1, 2, 9, [3, 4, 5], {'e': 5, 'd': 7}])
def test_stararg_flowspace_variable(self):
space = DummySpace()
var = object()
shape = ((2, ('g', ), True, False), [1, 2, 9, var])
args = make_arguments_for_translation(space, [1,2], {'g': 9},
w_stararg=var)
assert args.flatten() == shape
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
def test_fromshape(self):
space = DummySpace()
shape = ((3, (), False, False), [1, 2, 3])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((1, (), False, False), [1])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((5, (), False, False), [1,2,3,4,5])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((1, ('b', 'c'), False, False), [1, 2, 3])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((1, ('c', ), False, False), [1, 5])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((1, ('c', 'd'), False, False), [1, 5, 7])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((5, ('d', 'e'), False, False), [1, 2, 3, 4, 5, 7, 5])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((0, (), True, True), [[1], {'c': 5, 'd': 7}])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape
shape = ((2, ('g', ), True, True), [1, 2, 9, [3, 4, 5], {'e': 5, 'd': 7}])
args = ArgumentsForTranslation.fromshape(space, *shape)
assert args.flatten() == shape | 0.5083 | 0.522202 |
import datetime
import nose.tools
from nose.tools import with_setup
from billy.models import db
def setup_func():
assert db.name.endswith('_test')
db.metadata.drop()
db.bills.drop()
db.votes.drop()
db.legislators.drop()
db.document_ids.drop()
db.vote_ids.drop()
db.committees.drop()
vote = {
u'+threshold': u'2/3',
u'_type': u'vote',
u'chamber': u'lower',
u'date': datetime.datetime(2010, 6, 21, 21, 6),
u'motion': u'Assembly Third Reading',
u'no_count': 27,
u'no_votes': [],
u'other_count': 5,
u'other_votes': [],
u'passed': True,
u'sources': [],
u'type': u'passage',
u'vote_id': u'CAV00032373',
u'yes_count': 47,
u'yes_votes': [
{u'leg_id': u'CAL000104', u'name': u'Ammiano'},
]
}
# Add a vote for the current session bill.
db.votes.insert(dict(vote, bill_id='CAB00007468',
date=datetime.datetime(2011, 12, 6, 0, 0)))
# Add a vote for the prior session bill.
db.votes.insert(dict(vote, bill_id='CAB00005131',
date=datetime.datetime(2009, 12, 6, 0, 0)))
# Insert some test records.
db.legislators.insert({
"_all_ids": ["CAL000104"],
"_id": "CAL000104",
"_type": "person",
"active": True,
"district": "13",
"leg_id": "CAL000104",
"old_roles": {
"20092010": [
{
"+active": True,
"chamber": "lower",
"country": "us",
"district": "1",
"end_date": datetime.datetime(2010, 1, 1, 0, 0),
"level": "state",
"party": "Democratic",
"start_date": datetime.datetime(2009, 1, 1, 0, 0),
"state": "ca",
"term": "20092010",
"type": "member"
},
{
"+active": True,
"chamber": "lower",
"country": "us",
"district": "2",
"end_date": datetime.datetime(2010, 12, 1, 0, 0),
"level": "state",
"party": "Democratic",
"start_date": datetime.datetime(2010, 1, 2, 0, 0),
"state": "ca",
"term": "20092010",
"type": "member"
},
],
'fake-session': [{
"state": "ca",
"chamber": "joint",
"district": "13",
"end_date": None,
"party": "Democratic",
"start_date": None,
"term": "fake-term",
"type": "member"
}]
},
"party": "Democratic",
"roles": [
# Earlier role from 2011 to 2012.
{
"chamber": "lower",
"district": "13",
"start_date": datetime.datetime(2011, 1, 1, 0, 0),
"party": "Democratic",
"end_date": datetime.datetime(2012, 1, 1, 0, 0),
"state": "ca",
"term": "20112012",
"type": "member"
},
# Later role from 2012-2013.
{
"chamber": "lower",
"district": "14",
"start_date": datetime.datetime(2012, 1, 2, 0, 0),
"party": "Democratic",
"end_date": datetime.datetime(2012, 12, 1, 0, 0),
"state": "ca",
"term": "20112012",
"type": "member"
},
{
"state": "ca",
"chamber": "joint",
"district": "13",
"end_date": None,
"party": "Democratic",
"start_date": None,
"term": "fake-term",
"type": "member"
}
],
"state": "ca",
})
db.metadata.insert({
u'_id': u'ca',
u'_type': u'metadata',
u'abbreviation': u'ca',
u'legislature_name': u'California State Legislature',
u'name': u'California',
u'session_details': {
u'20092010': {
u'display_name': u'2009-2010 Regular Session',
u'start_date': datetime.datetime(2008, 12, 1, 0, 0),
u'type': u'primary'},
u'20092010 Special Session 1': {
u'display_name': u'2009-2010, 1st Special Session',
u'type': u'special'},
u'20092010 Special Session 2': {
u'display_name': u'2009-2010, 2nd Special Session',
u'type': u'special'},
u'20092010 Special Session 3': {
u'display_name': u'2009-2010, 3rd Special Session',
u'type': u'special'},
u'20092010 Special Session 4': {
u'display_name': u'2009-2010, 4th Special Session',
u'type': u'special'},
u'20092010 Special Session 5': {
u'display_name': u'2009-2010, 5th Special Session',
u'type': u'special'},
u'20092010 Special Session 6': {
u'display_name': u'2009-2010, 6th Special Session',
u'type': u'special'},
u'20092010 Special Session 7': {
u'display_name': u'2009-2010, 7th Special Session',
u'type': u'special'},
u'20092010 Special Session 8': {
u'display_name': u'2009-2010, 8th Special Session',
u'type': u'special'},
u'20112012': {
u'display_name': u'2011-2012 Regular Session',
u'start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'type': u'primary'},
u'fake-session': {
u'display_name': u'2011-2012 Regular Session',
u'start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'type': u'primary'},
u'fake-session2': {
u'display_name': u'2011-2012 Regular Session',
u'start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'type': u'primary'},
u'20112012 Special Session 1': {
u'display_name': u'2011-2012, 1st Special Session',
u'type': u'special'}},
u'terms': [
{
u'+start_date': datetime.datetime(2008, 12, 1, 0, 0),
u'end_year': 2010,
u'name': u'20092010',
u'sessions': [u'20092010', u'20092010 Special Session 1',
u'20092010 Special Session 2', u'20092010 Special Session 3',
u'20092010 Special Session 4', u'20092010 Special Session 5',
u'20092010 Special Session 6', u'20092010 Special Session 7',
u'20092010 Special Session 8'],
u'start_year': 2009
},
{
u'+start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'end_year': 2012,
u'name': u'20112012',
u'sessions': [u'20112012 Special Session 1', u'20112012'],
u'start_year': 2011
},
{
u'+start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'end_year': 2012,
u'name': u'fake-term',
u'sessions': [u'fake-session'],
u'start_year': 2011
},
{
u'+start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'end_year': 2012,
u'name': u'fake-term2',
u'sessions': [u'fake-session2'],
u'start_year': 2011
},
]
})
# A current session bill, where current session is 20112012.
db.bills.insert({
u'_all_ids': [u'CAB00007468'],
u'_id': u'CAB00007468',
u'_term': u'20112012',
u'_type': u'bill',
u'action_dates': {
u'first': datetime.datetime(2011, 2, 17, 0, 0),
u'last': datetime.datetime(2011, 8, 25, 0, 0),
u'passed_lower': datetime.datetime(2011, 6, 2, 0, 0),
u'passed_upper': None,
u'signed': None},
u'alternate_titles': [],
u'bill_id': u'AB 889',
u'chamber': u'lower',
u'country': u'us',
u'created_at': datetime.datetime(2011, 3, 24, 20, 45, 24, 16000),
u'documents': [],
u'level': u'state',
u'session': u'20112012',
u'sources': [{u'url': u'http://leginfo.legislature.ca.gov/fake'}],
u'sponsors': [
{u'leg_id': u'CAL000104',
u'name': u'Ammiano',
u'official_type': u'LEAD_AUTHOR',
u'type': u'primary'},
],
u'state': u'ca',
})
# A prior session bill, where prior is 20092010.
db.bills.insert({
u'_all_ids': [u'CAB00005131'],
u'_id': u'CAB00005131',
u'_term': u'20092010',
u'_type': u'bill',
u'action_dates': {
u'first': datetime.datetime(2009, 2, 17, 0, 0),
u'last': datetime.datetime(2009, 8, 25, 0, 0),
u'passed_lower': datetime.datetime(2009, 6, 2, 0, 0),
u'passed_upper': None,
u'signed': None},
u'chamber': u'lower',
u'country': u'us',
u'session': u'20092010 Special Session 4',
u'sponsors': [
{u'leg_id': u'CAL000104', u'name': u'Ammiano',
u'type': u'cosponsor'}
],
u'state': u'ca',
})
'''
Need to test context_role with:
- no related bill or vote (returns '')
- related bill, single role for term
- related vote, single role for term
- related bill, multiple roles for term, one that fits
- related vote, multiple roles for term, one that fits
- related bill, multiple roles for term, none that fits
- related vote, multiple roles for term, none that fits
- passed-in term, bill, vote, session
'''
# Test context_role for current term, session, bill, vote.
@with_setup(setup_func)
def test_current_using_bill():
# The bill's first action was in 2011, so the correct role is the first
# one in leg['roles'], which lasts from 2011 to 2012.
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][0]
bill = db.bills.find_one({'_term': '20112012'})
nose.tools.eq_(correct_role, leg.context_role(bill=bill))
@with_setup(setup_func)
def test_current_using_vote():
leg = db.legislators.find_one()
correct_role = leg['roles'][0]
bill = db.bills.find_one({'_term': '20112012'})
vote = next(bill.votes_manager())
nose.tools.eq_(correct_role, leg.context_role(vote=vote))
@with_setup(setup_func)
def test_current_using_session_multiple_roles():
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][0]
nose.tools.eq_(correct_role, leg.context_role(session='20112012'))
@with_setup(setup_func)
def test_current_using_session_single_role():
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][2]
nose.tools.eq_(correct_role, leg.context_role(session='fake-session'))
@with_setup(setup_func)
def test_current_using_term_multiple_roles():
# If there're multiple roles for a term, return the first role in the list.
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][0]
nose.tools.eq_(correct_role, leg.context_role(term='20112012'))
@with_setup(setup_func)
def test_current_using_term_single_role():
# If there'only one role for a term, return it.
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][2]
nose.tools.eq_(correct_role, leg.context_role(term='fake-term'))
@with_setup(setup_func)
def test_current_using_related_bill():
bill = db.bills.find_one({'_term': '20112012'})
leg = next(iter(bill.sponsors_manager))
correct_role = leg['roles'][0]
nose.tools.eq_(correct_role, leg.context_role(bill=bill))
@with_setup(setup_func)
def test_current_using_related_vote():
bill = db.bills.find_one({'_term': '20112012'})
vote = next(bill.votes_manager())
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][0]
nose.tools.eq_(correct_role, leg.context_role(vote=vote))
@with_setup(setup_func)
def test_current_using_term_no_matching_roles():
# If there're multiple roles for a term, return the
leg = db.legislators.find_one('CAL000104')
correct_role = ''
nose.tools.eq_(correct_role, leg.context_role(term='fake-term2'))
@with_setup(setup_func)
def test_current_using_session_no_matching_roles():
# If there're multiple roles for a term, return the first role in the list.
leg = db.legislators.find_one('CAL000104')
correct_role = ''
nose.tools.eq_(correct_role, leg.context_role(session='fake-session2'))
# Test context_role with for old term, session, bill, vote.
@with_setup(setup_func)
def test_old_using_bill():
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['20092010'][0]
bill = db.bills.find_one({'_term': '20092010'})
nose.tools.eq_(correct_role, leg.context_role(bill=bill))
@with_setup(setup_func)
def test_old_using_vote():
leg = db.legislators.find_one()
correct_role = leg['old_roles']['20092010'][0]
bill = db.bills.find_one({'_term': '20092010'})
vote = next(bill.votes_manager())
nose.tools.eq_(correct_role, leg.context_role(vote=vote))
@with_setup(setup_func)
def test_old_using_session_multiple_roles():
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['20092010'][0]
nose.tools.eq_(correct_role, leg.context_role(session='20092010'))
@with_setup(setup_func)
def test_old_using_session_single_role():
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['fake-session'][0]
nose.tools.eq_(correct_role, leg.context_role(session='fake-session'))
@with_setup(setup_func)
def test_old_using_term_multiple_roles():
# If there're multiple roles for a term, return the first role in the list.
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['20092010'][0]
nose.tools.eq_(correct_role, leg.context_role(term='20092010'))
@with_setup(setup_func)
def test_old_using_term_single_role():
# If there's only one role for a term, return it.
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['fake-session'][0]
nose.tools.eq_(correct_role, leg.context_role(term='fake-term'))
@with_setup(setup_func)
def test_old_using_related_bill():
bill = db.bills.find_one({'_term': '20092010'})
leg = next(iter(bill.sponsors_manager))
correct_role = leg['old_roles']['20092010'][0]
nose.tools.eq_(correct_role, leg.context_role(bill=bill))
@with_setup(setup_func)
def test_old_using_related_vote():
bill = db.bills.find_one({'_term': '20092010'})
vote = next(bill.votes_manager())
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['20092010'][0]
nose.tools.eq_(correct_role, leg.context_role(vote=vote)) | billy/tests/models/legislator_test_context_role.py | import datetime
import nose.tools
from nose.tools import with_setup
from billy.models import db
def setup_func():
assert db.name.endswith('_test')
db.metadata.drop()
db.bills.drop()
db.votes.drop()
db.legislators.drop()
db.document_ids.drop()
db.vote_ids.drop()
db.committees.drop()
vote = {
u'+threshold': u'2/3',
u'_type': u'vote',
u'chamber': u'lower',
u'date': datetime.datetime(2010, 6, 21, 21, 6),
u'motion': u'Assembly Third Reading',
u'no_count': 27,
u'no_votes': [],
u'other_count': 5,
u'other_votes': [],
u'passed': True,
u'sources': [],
u'type': u'passage',
u'vote_id': u'CAV00032373',
u'yes_count': 47,
u'yes_votes': [
{u'leg_id': u'CAL000104', u'name': u'Ammiano'},
]
}
# Add a vote for the current session bill.
db.votes.insert(dict(vote, bill_id='CAB00007468',
date=datetime.datetime(2011, 12, 6, 0, 0)))
# Add a vote for the prior session bill.
db.votes.insert(dict(vote, bill_id='CAB00005131',
date=datetime.datetime(2009, 12, 6, 0, 0)))
# Insert some test records.
db.legislators.insert({
"_all_ids": ["CAL000104"],
"_id": "CAL000104",
"_type": "person",
"active": True,
"district": "13",
"leg_id": "CAL000104",
"old_roles": {
"20092010": [
{
"+active": True,
"chamber": "lower",
"country": "us",
"district": "1",
"end_date": datetime.datetime(2010, 1, 1, 0, 0),
"level": "state",
"party": "Democratic",
"start_date": datetime.datetime(2009, 1, 1, 0, 0),
"state": "ca",
"term": "20092010",
"type": "member"
},
{
"+active": True,
"chamber": "lower",
"country": "us",
"district": "2",
"end_date": datetime.datetime(2010, 12, 1, 0, 0),
"level": "state",
"party": "Democratic",
"start_date": datetime.datetime(2010, 1, 2, 0, 0),
"state": "ca",
"term": "20092010",
"type": "member"
},
],
'fake-session': [{
"state": "ca",
"chamber": "joint",
"district": "13",
"end_date": None,
"party": "Democratic",
"start_date": None,
"term": "fake-term",
"type": "member"
}]
},
"party": "Democratic",
"roles": [
# Earlier role from 2011 to 2012.
{
"chamber": "lower",
"district": "13",
"start_date": datetime.datetime(2011, 1, 1, 0, 0),
"party": "Democratic",
"end_date": datetime.datetime(2012, 1, 1, 0, 0),
"state": "ca",
"term": "20112012",
"type": "member"
},
# Later role from 2012-2013.
{
"chamber": "lower",
"district": "14",
"start_date": datetime.datetime(2012, 1, 2, 0, 0),
"party": "Democratic",
"end_date": datetime.datetime(2012, 12, 1, 0, 0),
"state": "ca",
"term": "20112012",
"type": "member"
},
{
"state": "ca",
"chamber": "joint",
"district": "13",
"end_date": None,
"party": "Democratic",
"start_date": None,
"term": "fake-term",
"type": "member"
}
],
"state": "ca",
})
db.metadata.insert({
u'_id': u'ca',
u'_type': u'metadata',
u'abbreviation': u'ca',
u'legislature_name': u'California State Legislature',
u'name': u'California',
u'session_details': {
u'20092010': {
u'display_name': u'2009-2010 Regular Session',
u'start_date': datetime.datetime(2008, 12, 1, 0, 0),
u'type': u'primary'},
u'20092010 Special Session 1': {
u'display_name': u'2009-2010, 1st Special Session',
u'type': u'special'},
u'20092010 Special Session 2': {
u'display_name': u'2009-2010, 2nd Special Session',
u'type': u'special'},
u'20092010 Special Session 3': {
u'display_name': u'2009-2010, 3rd Special Session',
u'type': u'special'},
u'20092010 Special Session 4': {
u'display_name': u'2009-2010, 4th Special Session',
u'type': u'special'},
u'20092010 Special Session 5': {
u'display_name': u'2009-2010, 5th Special Session',
u'type': u'special'},
u'20092010 Special Session 6': {
u'display_name': u'2009-2010, 6th Special Session',
u'type': u'special'},
u'20092010 Special Session 7': {
u'display_name': u'2009-2010, 7th Special Session',
u'type': u'special'},
u'20092010 Special Session 8': {
u'display_name': u'2009-2010, 8th Special Session',
u'type': u'special'},
u'20112012': {
u'display_name': u'2011-2012 Regular Session',
u'start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'type': u'primary'},
u'fake-session': {
u'display_name': u'2011-2012 Regular Session',
u'start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'type': u'primary'},
u'fake-session2': {
u'display_name': u'2011-2012 Regular Session',
u'start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'type': u'primary'},
u'20112012 Special Session 1': {
u'display_name': u'2011-2012, 1st Special Session',
u'type': u'special'}},
u'terms': [
{
u'+start_date': datetime.datetime(2008, 12, 1, 0, 0),
u'end_year': 2010,
u'name': u'20092010',
u'sessions': [u'20092010', u'20092010 Special Session 1',
u'20092010 Special Session 2', u'20092010 Special Session 3',
u'20092010 Special Session 4', u'20092010 Special Session 5',
u'20092010 Special Session 6', u'20092010 Special Session 7',
u'20092010 Special Session 8'],
u'start_year': 2009
},
{
u'+start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'end_year': 2012,
u'name': u'20112012',
u'sessions': [u'20112012 Special Session 1', u'20112012'],
u'start_year': 2011
},
{
u'+start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'end_year': 2012,
u'name': u'fake-term',
u'sessions': [u'fake-session'],
u'start_year': 2011
},
{
u'+start_date': datetime.datetime(2010, 12, 6, 0, 0),
u'end_year': 2012,
u'name': u'fake-term2',
u'sessions': [u'fake-session2'],
u'start_year': 2011
},
]
})
# A current session bill, where current session is 20112012.
db.bills.insert({
u'_all_ids': [u'CAB00007468'],
u'_id': u'CAB00007468',
u'_term': u'20112012',
u'_type': u'bill',
u'action_dates': {
u'first': datetime.datetime(2011, 2, 17, 0, 0),
u'last': datetime.datetime(2011, 8, 25, 0, 0),
u'passed_lower': datetime.datetime(2011, 6, 2, 0, 0),
u'passed_upper': None,
u'signed': None},
u'alternate_titles': [],
u'bill_id': u'AB 889',
u'chamber': u'lower',
u'country': u'us',
u'created_at': datetime.datetime(2011, 3, 24, 20, 45, 24, 16000),
u'documents': [],
u'level': u'state',
u'session': u'20112012',
u'sources': [{u'url': u'http://leginfo.legislature.ca.gov/fake'}],
u'sponsors': [
{u'leg_id': u'CAL000104',
u'name': u'Ammiano',
u'official_type': u'LEAD_AUTHOR',
u'type': u'primary'},
],
u'state': u'ca',
})
# A prior session bill, where prior is 20092010.
db.bills.insert({
u'_all_ids': [u'CAB00005131'],
u'_id': u'CAB00005131',
u'_term': u'20092010',
u'_type': u'bill',
u'action_dates': {
u'first': datetime.datetime(2009, 2, 17, 0, 0),
u'last': datetime.datetime(2009, 8, 25, 0, 0),
u'passed_lower': datetime.datetime(2009, 6, 2, 0, 0),
u'passed_upper': None,
u'signed': None},
u'chamber': u'lower',
u'country': u'us',
u'session': u'20092010 Special Session 4',
u'sponsors': [
{u'leg_id': u'CAL000104', u'name': u'Ammiano',
u'type': u'cosponsor'}
],
u'state': u'ca',
})
'''
Need to test context_role with:
- no related bill or vote (returns '')
- related bill, single role for term
- related vote, single role for term
- related bill, multiple roles for term, one that fits
- related vote, multiple roles for term, one that fits
- related bill, multiple roles for term, none that fits
- related vote, multiple roles for term, none that fits
- passed-in term, bill, vote, session
'''
# Test context_role for current term, session, bill, vote.
@with_setup(setup_func)
def test_current_using_bill():
# The bill's first action was in 2011, so the correct role is the first
# one in leg['roles'], which lasts from 2011 to 2012.
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][0]
bill = db.bills.find_one({'_term': '20112012'})
nose.tools.eq_(correct_role, leg.context_role(bill=bill))
@with_setup(setup_func)
def test_current_using_vote():
leg = db.legislators.find_one()
correct_role = leg['roles'][0]
bill = db.bills.find_one({'_term': '20112012'})
vote = next(bill.votes_manager())
nose.tools.eq_(correct_role, leg.context_role(vote=vote))
@with_setup(setup_func)
def test_current_using_session_multiple_roles():
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][0]
nose.tools.eq_(correct_role, leg.context_role(session='20112012'))
@with_setup(setup_func)
def test_current_using_session_single_role():
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][2]
nose.tools.eq_(correct_role, leg.context_role(session='fake-session'))
@with_setup(setup_func)
def test_current_using_term_multiple_roles():
# If there're multiple roles for a term, return the first role in the list.
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][0]
nose.tools.eq_(correct_role, leg.context_role(term='20112012'))
@with_setup(setup_func)
def test_current_using_term_single_role():
# If there'only one role for a term, return it.
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][2]
nose.tools.eq_(correct_role, leg.context_role(term='fake-term'))
@with_setup(setup_func)
def test_current_using_related_bill():
bill = db.bills.find_one({'_term': '20112012'})
leg = next(iter(bill.sponsors_manager))
correct_role = leg['roles'][0]
nose.tools.eq_(correct_role, leg.context_role(bill=bill))
@with_setup(setup_func)
def test_current_using_related_vote():
bill = db.bills.find_one({'_term': '20112012'})
vote = next(bill.votes_manager())
leg = db.legislators.find_one('CAL000104')
correct_role = leg['roles'][0]
nose.tools.eq_(correct_role, leg.context_role(vote=vote))
@with_setup(setup_func)
def test_current_using_term_no_matching_roles():
# If there're multiple roles for a term, return the
leg = db.legislators.find_one('CAL000104')
correct_role = ''
nose.tools.eq_(correct_role, leg.context_role(term='fake-term2'))
@with_setup(setup_func)
def test_current_using_session_no_matching_roles():
# If there're multiple roles for a term, return the first role in the list.
leg = db.legislators.find_one('CAL000104')
correct_role = ''
nose.tools.eq_(correct_role, leg.context_role(session='fake-session2'))
# Test context_role with for old term, session, bill, vote.
@with_setup(setup_func)
def test_old_using_bill():
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['20092010'][0]
bill = db.bills.find_one({'_term': '20092010'})
nose.tools.eq_(correct_role, leg.context_role(bill=bill))
@with_setup(setup_func)
def test_old_using_vote():
leg = db.legislators.find_one()
correct_role = leg['old_roles']['20092010'][0]
bill = db.bills.find_one({'_term': '20092010'})
vote = next(bill.votes_manager())
nose.tools.eq_(correct_role, leg.context_role(vote=vote))
@with_setup(setup_func)
def test_old_using_session_multiple_roles():
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['20092010'][0]
nose.tools.eq_(correct_role, leg.context_role(session='20092010'))
@with_setup(setup_func)
def test_old_using_session_single_role():
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['fake-session'][0]
nose.tools.eq_(correct_role, leg.context_role(session='fake-session'))
@with_setup(setup_func)
def test_old_using_term_multiple_roles():
# If there're multiple roles for a term, return the first role in the list.
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['20092010'][0]
nose.tools.eq_(correct_role, leg.context_role(term='20092010'))
@with_setup(setup_func)
def test_old_using_term_single_role():
# If there's only one role for a term, return it.
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['fake-session'][0]
nose.tools.eq_(correct_role, leg.context_role(term='fake-term'))
@with_setup(setup_func)
def test_old_using_related_bill():
bill = db.bills.find_one({'_term': '20092010'})
leg = next(iter(bill.sponsors_manager))
correct_role = leg['old_roles']['20092010'][0]
nose.tools.eq_(correct_role, leg.context_role(bill=bill))
@with_setup(setup_func)
def test_old_using_related_vote():
bill = db.bills.find_one({'_term': '20092010'})
vote = next(bill.votes_manager())
leg = db.legislators.find_one('CAL000104')
correct_role = leg['old_roles']['20092010'][0]
nose.tools.eq_(correct_role, leg.context_role(vote=vote)) | 0.46563 | 0.260125 |
from setuptools import setup
import os
import seam_erasure
here = os.path.abspath(os.path.dirname(__file__))
# Get the long description from the README file
with open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
long_description = long_description.replace(
"static/img/",
"https://raw.githubusercontent.com/zfergus/seam-erasure/master/static/img/"
)
setup(
name=seam_erasure.__name__,
packages=[seam_erasure.__name__],
version=seam_erasure.__version__,
license=seam_erasure.__license__,
description="Seamlessly erase seams from your favorite 3D models.",
long_description=long_description,
long_description_content_type='text/markdown',
author=seam_erasure.__author__,
author_email=seam_erasure.__email__,
url="https://github.com/zfergus/seam-erasure",
keywords=["3D Modeling", "Textures", "Computer Graphics"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Science/Research",
"Topic :: Multimedia :: Graphics :: 3D Modeling",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
python_requires=">= 2.7",
install_requires=[
"numpy",
"scipy",
"recordclass",
"pillow",
"pathlib; python_version < '3.4'",
"tqdm",
],
extras_require={
"cholmod": ["cvxopt"],
"web-ui": ["flask"],
},
entry_points={
"console_scripts": [
"seam-erasure=seam_erasure.cli:main",
# "seam-erasure-webui=server:main",
],
},
project_urls={
"Bug Reports": "https://github.com/zfergus/seam-erasure/issues",
"Research Project Page": "https://cragl.cs.gmu.edu/seamless/",
"Paper": "https://goo.gl/1LwB3Z",
"Video": "https://youtu.be/kCryf9n82Y8",
"Source": "https://github.com/zfergus/seam-erasure/",
},
) | setup.py |
from setuptools import setup
import os
import seam_erasure
here = os.path.abspath(os.path.dirname(__file__))
# Get the long description from the README file
with open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
long_description = long_description.replace(
"static/img/",
"https://raw.githubusercontent.com/zfergus/seam-erasure/master/static/img/"
)
setup(
name=seam_erasure.__name__,
packages=[seam_erasure.__name__],
version=seam_erasure.__version__,
license=seam_erasure.__license__,
description="Seamlessly erase seams from your favorite 3D models.",
long_description=long_description,
long_description_content_type='text/markdown',
author=seam_erasure.__author__,
author_email=seam_erasure.__email__,
url="https://github.com/zfergus/seam-erasure",
keywords=["3D Modeling", "Textures", "Computer Graphics"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Science/Research",
"Topic :: Multimedia :: Graphics :: 3D Modeling",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
python_requires=">= 2.7",
install_requires=[
"numpy",
"scipy",
"recordclass",
"pillow",
"pathlib; python_version < '3.4'",
"tqdm",
],
extras_require={
"cholmod": ["cvxopt"],
"web-ui": ["flask"],
},
entry_points={
"console_scripts": [
"seam-erasure=seam_erasure.cli:main",
# "seam-erasure-webui=server:main",
],
},
project_urls={
"Bug Reports": "https://github.com/zfergus/seam-erasure/issues",
"Research Project Page": "https://cragl.cs.gmu.edu/seamless/",
"Paper": "https://goo.gl/1LwB3Z",
"Video": "https://youtu.be/kCryf9n82Y8",
"Source": "https://github.com/zfergus/seam-erasure/",
},
) | 0.444806 | 0.200127 |
import sys
import os
import argparse
import src.common as commonutils
"""
PyDashing CLI Parser.
---------------------
"""
class PyDashingCli(object):
"""
PyDashing CLI Parser.
"""
def __init__(self):
"""
Initialize PyDashing CLI.
"""
# Check for atleast one argument
if len(sys.argv) <= 1:
print ""
print "%s requires arguments. Use -h to seee Usage help." % \
sys.argv[0]
print ""
sys.exit()
self.namespace = self.__parse_arguments()
self.validate_arguments()
self.cliobj = self.__generate_cli_obj()
def __generate_cli_obj(self):
"""
Create a CLI dictionary object
"""
cliobj = {}
cliobj['config_file'] = self.namespace.config_file
cliobj['render_path'] = self.namespace.render_path
return cliobj
def __parse_arguments(self):
"""
Parse Arguments and return the namespace object.
"""
parser = argparse.ArgumentParser(
prog="pydashing",
description=self.show_help(),
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("--config",
dest="config_file",
required=True,
help="Path to the configuration file.")
parser.add_argument("--render-path",
dest="render_path",
required=False,
default=commonutils.get_absolute_path_for_file(
"../rendered_dashboards"),
help="Directory where to render dashing templates.")
namespace = parser.parse_args()
return namespace
def show_help(self):
"""
Display help message with the -h option.
"""
msg = "PyDashing : A simple python utility to generate nice" + "\n" \
+ "dashboards" + "\n" \
+ "" + "\n" \
+ "Example Usage: ./pydashing.py --config ../config/simple.yaml" \
+ "\n"
return msg
def validate_arguments(self):
config_file = self.namespace.config_file
render_path = self.namespace.render_path
if not os.path.exists(config_file):
print "Invalid config file [%s]. Does not exist!" % config_file
sys.exit()
if not os.path.exists(render_path):
print "Invalid staging/render path [%s]. Does not exist!" % \
render_path
sys.exit() | testdash/src/pydashing_cli.py |
import sys
import os
import argparse
import src.common as commonutils
"""
PyDashing CLI Parser.
---------------------
"""
class PyDashingCli(object):
"""
PyDashing CLI Parser.
"""
def __init__(self):
"""
Initialize PyDashing CLI.
"""
# Check for atleast one argument
if len(sys.argv) <= 1:
print ""
print "%s requires arguments. Use -h to seee Usage help." % \
sys.argv[0]
print ""
sys.exit()
self.namespace = self.__parse_arguments()
self.validate_arguments()
self.cliobj = self.__generate_cli_obj()
def __generate_cli_obj(self):
"""
Create a CLI dictionary object
"""
cliobj = {}
cliobj['config_file'] = self.namespace.config_file
cliobj['render_path'] = self.namespace.render_path
return cliobj
def __parse_arguments(self):
"""
Parse Arguments and return the namespace object.
"""
parser = argparse.ArgumentParser(
prog="pydashing",
description=self.show_help(),
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("--config",
dest="config_file",
required=True,
help="Path to the configuration file.")
parser.add_argument("--render-path",
dest="render_path",
required=False,
default=commonutils.get_absolute_path_for_file(
"../rendered_dashboards"),
help="Directory where to render dashing templates.")
namespace = parser.parse_args()
return namespace
def show_help(self):
"""
Display help message with the -h option.
"""
msg = "PyDashing : A simple python utility to generate nice" + "\n" \
+ "dashboards" + "\n" \
+ "" + "\n" \
+ "Example Usage: ./pydashing.py --config ../config/simple.yaml" \
+ "\n"
return msg
def validate_arguments(self):
config_file = self.namespace.config_file
render_path = self.namespace.render_path
if not os.path.exists(config_file):
print "Invalid config file [%s]. Does not exist!" % config_file
sys.exit()
if not os.path.exists(render_path):
print "Invalid staging/render path [%s]. Does not exist!" % \
render_path
sys.exit() | 0.376509 | 0.197561 |
"Entities relating to the datamap."
from enum import Enum, auto
from pathlib import Path
# pylint: disable=R0903,R0913;
from typing import IO, Dict, Optional, Union
from engine.exceptions import DatamapNotCSVException
class DatamapLineValueType(Enum):
"""A representation of a data type for us in validating data from the spreadsheet.
These are used in datamap processing and spreadsheet parsing to represent the
type of data being extracted.
"""
NUMBER = auto()
TEXT = auto()
DATE = auto()
class DatamapLine:
"""The core data structure that is configured by the user datamap.csv.
Data structure representing all cell data extracted from templates/spreadsheets.
"""
def __init__(
self,
key: str,
sheet: str,
cellref: str,
data_type: Optional[str],
filename: str,
) -> None:
self.key = key
self.sheet = sheet
self.cellref = cellref
self.data_type = data_type
self.filename = filename
def to_dict(self) -> Dict[str, Optional[str]]:
"Return the attributes as a dictionary."
return {
"key": self.key,
"sheet": self.sheet,
"cellref": self.cellref,
"data_type": self.data_type,
"filename": self.filename,
}
class DatamapFile:
"""A context manager that represents the datamap file.
Having a context manager means we can more elegantly capture the
exception with the file isn't found.
"""
def __init__(self, filepath: Union[Path, str]) -> None:
"Create the context manager"
self.filepath = filepath
def __enter__(self) -> IO[str]:
try:
# first check - is it a CSV file?
# Note: that you are able to pass in Path objects here as well as str paths.
try:
_ext = f".{self.filepath.rpartition('.')[-1]}" # type: ignore
except AttributeError:
_ext = self.filepath.suffix # type: ignore
if _ext != ".csv":
raise DatamapNotCSVException("Given datamap file is not in CSV format.")
self.f_obj = open(self.filepath, "r", encoding="utf-8")
self.f_obj.read()
self.f_obj.seek(0)
return self.f_obj
except DatamapNotCSVException:
raise
except FileNotFoundError:
raise FileNotFoundError("Cannot find {}".format(self.filepath))
except UnicodeDecodeError:
self.f_obj = open(self.filepath, "r", encoding="latin1")
return self.f_obj
def __exit__(self, mytype, value, traceback): # type: ignore
self.f_obj.close() | engine/domain/datamap.py | "Entities relating to the datamap."
from enum import Enum, auto
from pathlib import Path
# pylint: disable=R0903,R0913;
from typing import IO, Dict, Optional, Union
from engine.exceptions import DatamapNotCSVException
class DatamapLineValueType(Enum):
"""A representation of a data type for us in validating data from the spreadsheet.
These are used in datamap processing and spreadsheet parsing to represent the
type of data being extracted.
"""
NUMBER = auto()
TEXT = auto()
DATE = auto()
class DatamapLine:
"""The core data structure that is configured by the user datamap.csv.
Data structure representing all cell data extracted from templates/spreadsheets.
"""
def __init__(
self,
key: str,
sheet: str,
cellref: str,
data_type: Optional[str],
filename: str,
) -> None:
self.key = key
self.sheet = sheet
self.cellref = cellref
self.data_type = data_type
self.filename = filename
def to_dict(self) -> Dict[str, Optional[str]]:
"Return the attributes as a dictionary."
return {
"key": self.key,
"sheet": self.sheet,
"cellref": self.cellref,
"data_type": self.data_type,
"filename": self.filename,
}
class DatamapFile:
"""A context manager that represents the datamap file.
Having a context manager means we can more elegantly capture the
exception with the file isn't found.
"""
def __init__(self, filepath: Union[Path, str]) -> None:
"Create the context manager"
self.filepath = filepath
def __enter__(self) -> IO[str]:
try:
# first check - is it a CSV file?
# Note: that you are able to pass in Path objects here as well as str paths.
try:
_ext = f".{self.filepath.rpartition('.')[-1]}" # type: ignore
except AttributeError:
_ext = self.filepath.suffix # type: ignore
if _ext != ".csv":
raise DatamapNotCSVException("Given datamap file is not in CSV format.")
self.f_obj = open(self.filepath, "r", encoding="utf-8")
self.f_obj.read()
self.f_obj.seek(0)
return self.f_obj
except DatamapNotCSVException:
raise
except FileNotFoundError:
raise FileNotFoundError("Cannot find {}".format(self.filepath))
except UnicodeDecodeError:
self.f_obj = open(self.filepath, "r", encoding="latin1")
return self.f_obj
def __exit__(self, mytype, value, traceback): # type: ignore
self.f_obj.close() | 0.89706 | 0.508117 |
from collections import OrderedDict
import numpy as np
from astropy.modeling import models
from astropy.modeling.core import Model
from astropy.utils.misc import isiterable
from asdf.tags.core.ndarray import NDArrayType
from asdf_astropy.converters.transform.core import TransformConverterBase
__all__ = ['LabelMapperConverter', 'RegionsSelectorConverter']
class LabelMapperConverter(TransformConverterBase):
tags = ["tag:stsci.edu:gwcs/label_mapper-*"]
types = ["gwcs.selector.LabelMapperArray", "gwcs.selector.LabelMapperDict",
"gwcs.selector.LabelMapperRange", "gwcs.selector.LabelMapper"]
def from_yaml_tree_transform(self, node, tag, ctx):
from ..selector import (LabelMapperArray, LabelMapperDict,
LabelMapperRange, LabelMapper)
inputs_mapping = node.get('inputs_mapping', None)
if inputs_mapping is not None and not isinstance(inputs_mapping, models.Mapping):
raise TypeError("inputs_mapping must be an instance"
"of astropy.modeling.models.Mapping.")
mapper = node['mapper']
atol = node.get('atol', 1e-8)
no_label = node.get('no_label', np.nan)
if isinstance(mapper, NDArrayType):
if mapper.ndim != 2:
raise NotImplementedError("GWCS currently only supports 2D masks.")
return LabelMapperArray(mapper, inputs_mapping)
elif isinstance(mapper, Model):
inputs = node.get('inputs')
return LabelMapper(inputs, mapper, inputs_mapping=inputs_mapping, no_label=no_label)
else:
inputs = node.get('inputs', None)
if inputs is not None:
inputs = tuple(inputs)
labels = mapper.get('labels')
transforms = mapper.get('models')
if isiterable(labels[0]):
labels = [tuple(l) for l in labels]
dict_mapper = dict(zip(labels, transforms))
return LabelMapperRange(inputs, dict_mapper, inputs_mapping)
else:
dict_mapper = dict(zip(labels, transforms))
return LabelMapperDict(inputs, dict_mapper, inputs_mapping, atol=atol)
def to_yaml_tree_transform(self, model, tag, ctx):
from ..selector import (LabelMapperArray, LabelMapperDict,
LabelMapperRange, LabelMapper)
node = OrderedDict()
node['no_label'] = model.no_label
if model.inputs_mapping is not None:
node['inputs_mapping'] = model.inputs_mapping
if isinstance(model, LabelMapperArray):
node['mapper'] = model.mapper
elif isinstance(model, LabelMapper):
node['mapper'] = model.mapper
node['inputs'] = list(model.inputs)
elif isinstance(model, (LabelMapperDict, LabelMapperRange)):
if hasattr(model, 'atol'):
node['atol'] = model.atol
mapper = OrderedDict()
labels = list(model.mapper)
transforms = []
for k in labels:
transforms.append(model.mapper[k])
if isiterable(labels[0]):
labels = [list(l) for l in labels]
mapper['labels'] = labels
mapper['models'] = transforms
node['mapper'] = mapper
node['inputs'] = list(model.inputs)
else:
raise TypeError("Unrecognized type of LabelMapper - {0}".format(model))
return node
class RegionsSelectorConverter(TransformConverterBase):
tags = ["tag:stsci.edu:gwcs/regions_selector-*"]
types = ["gwcs.selector.RegionsSelector"]
def from_yaml_tree_transform(self, node, tag, ctx):
from ..selector import RegionsSelector
inputs = node['inputs']
outputs = node['outputs']
label_mapper = node['label_mapper']
undefined_transform_value = node['undefined_transform_value']
sel = node['selector']
sel = dict(zip(sel['labels'], sel['transforms']))
return RegionsSelector(inputs, outputs,
sel, label_mapper, undefined_transform_value)
def to_yaml_tree_transform(self, model, tag, ctx):
selector = OrderedDict()
node = OrderedDict()
labels = list(model.selector)
values = []
for l in labels:
values.append(model.selector[l])
selector['labels'] = labels
selector['transforms'] = values
node['inputs'] = list(model.inputs)
node['outputs'] = list(model.outputs)
node['selector'] = selector
node['label_mapper'] = model.label_mapper
node['undefined_transform_value'] = model.undefined_transform_value
return node | gwcs/converters/selector.py |
from collections import OrderedDict
import numpy as np
from astropy.modeling import models
from astropy.modeling.core import Model
from astropy.utils.misc import isiterable
from asdf.tags.core.ndarray import NDArrayType
from asdf_astropy.converters.transform.core import TransformConverterBase
__all__ = ['LabelMapperConverter', 'RegionsSelectorConverter']
class LabelMapperConverter(TransformConverterBase):
tags = ["tag:stsci.edu:gwcs/label_mapper-*"]
types = ["gwcs.selector.LabelMapperArray", "gwcs.selector.LabelMapperDict",
"gwcs.selector.LabelMapperRange", "gwcs.selector.LabelMapper"]
def from_yaml_tree_transform(self, node, tag, ctx):
from ..selector import (LabelMapperArray, LabelMapperDict,
LabelMapperRange, LabelMapper)
inputs_mapping = node.get('inputs_mapping', None)
if inputs_mapping is not None and not isinstance(inputs_mapping, models.Mapping):
raise TypeError("inputs_mapping must be an instance"
"of astropy.modeling.models.Mapping.")
mapper = node['mapper']
atol = node.get('atol', 1e-8)
no_label = node.get('no_label', np.nan)
if isinstance(mapper, NDArrayType):
if mapper.ndim != 2:
raise NotImplementedError("GWCS currently only supports 2D masks.")
return LabelMapperArray(mapper, inputs_mapping)
elif isinstance(mapper, Model):
inputs = node.get('inputs')
return LabelMapper(inputs, mapper, inputs_mapping=inputs_mapping, no_label=no_label)
else:
inputs = node.get('inputs', None)
if inputs is not None:
inputs = tuple(inputs)
labels = mapper.get('labels')
transforms = mapper.get('models')
if isiterable(labels[0]):
labels = [tuple(l) for l in labels]
dict_mapper = dict(zip(labels, transforms))
return LabelMapperRange(inputs, dict_mapper, inputs_mapping)
else:
dict_mapper = dict(zip(labels, transforms))
return LabelMapperDict(inputs, dict_mapper, inputs_mapping, atol=atol)
def to_yaml_tree_transform(self, model, tag, ctx):
from ..selector import (LabelMapperArray, LabelMapperDict,
LabelMapperRange, LabelMapper)
node = OrderedDict()
node['no_label'] = model.no_label
if model.inputs_mapping is not None:
node['inputs_mapping'] = model.inputs_mapping
if isinstance(model, LabelMapperArray):
node['mapper'] = model.mapper
elif isinstance(model, LabelMapper):
node['mapper'] = model.mapper
node['inputs'] = list(model.inputs)
elif isinstance(model, (LabelMapperDict, LabelMapperRange)):
if hasattr(model, 'atol'):
node['atol'] = model.atol
mapper = OrderedDict()
labels = list(model.mapper)
transforms = []
for k in labels:
transforms.append(model.mapper[k])
if isiterable(labels[0]):
labels = [list(l) for l in labels]
mapper['labels'] = labels
mapper['models'] = transforms
node['mapper'] = mapper
node['inputs'] = list(model.inputs)
else:
raise TypeError("Unrecognized type of LabelMapper - {0}".format(model))
return node
class RegionsSelectorConverter(TransformConverterBase):
tags = ["tag:stsci.edu:gwcs/regions_selector-*"]
types = ["gwcs.selector.RegionsSelector"]
def from_yaml_tree_transform(self, node, tag, ctx):
from ..selector import RegionsSelector
inputs = node['inputs']
outputs = node['outputs']
label_mapper = node['label_mapper']
undefined_transform_value = node['undefined_transform_value']
sel = node['selector']
sel = dict(zip(sel['labels'], sel['transforms']))
return RegionsSelector(inputs, outputs,
sel, label_mapper, undefined_transform_value)
def to_yaml_tree_transform(self, model, tag, ctx):
selector = OrderedDict()
node = OrderedDict()
labels = list(model.selector)
values = []
for l in labels:
values.append(model.selector[l])
selector['labels'] = labels
selector['transforms'] = values
node['inputs'] = list(model.inputs)
node['outputs'] = list(model.outputs)
node['selector'] = selector
node['label_mapper'] = model.label_mapper
node['undefined_transform_value'] = model.undefined_transform_value
return node | 0.699357 | 0.437643 |
import cv2 as cv
import mediapipe as mp
import time
mpDraw = mp.solutions.drawing_utils
mpFaceDetection = mp.solutions.face_detection
faceDetection = mpFaceDetection.FaceDetection(0.30)
pTime = 0
cap = cv.VideoCapture('Videos/mkbhd.mp4')
def Rescale(frame, scale=0.50):
# FOR PICTURES,VIDEO,LIVE JUST T0 MAKE IT FIT
width = int(frame.shape[1]*scale)
height = int(frame.shape[0]*scale)
dim = (width, height)
return cv.resize(frame, dim, interpolation=cv.INTER_AREA)
def drawBorders(image, bbox, len=30, thick=4):
x, y, w, h = bbox
x1, y1 = x+w, y+h
cv.line(image, (x, y), (x+len, y), (255, 0, 255), thick)
cv.line(image, (x, y), (x, y+len), (255, 0, 255), thick)
cv.line(image, (x1, y), (x1-len, y), (255, 0, 255), thick)
cv.line(image, (x1, y), (x1, y+len), (255, 0, 255), thick)
cv.line(image, (x, y1), (x+len, y1), (255, 0, 255), thick)
cv.line(image, (x, y1), (x, y1-len), (255, 0, 255), thick)
cv.line(image, (x1, y1), (x1-len, y1), (255, 0, 255), thick)
cv.line(image, (x1, y1), (x1, y1-len), (255, 0, 255), thick)
return image
while True:
istrue, frame = cap.read()
resize = Rescale(frame)
RGB = cv.cvtColor(resize, cv.COLOR_BGR2RGB)
results = faceDetection.process(RGB)
if results.detections:
for id, detection in enumerate(results.detections):
bboxc = detection.location_data.relative_bounding_box
height, width = resize.shape[:2]
bbox = int(bboxc.xmin * width), int(bboxc.ymin *height), int(bboxc.width * width), int(bboxc.height * height)
cv.rectangle(resize, bbox, (255, 0, 255), 1)
cv.putText(resize, f'{round(detection.score[0]*100)}%', (bbox[0],
bbox[1]-20), cv.FONT_HERSHEY_PLAIN, 1, (255, 0, 255), thickness=1)
resize = drawBorders(resize, bbox)
cTime = time.time()
fps = round(1/(cTime-pTime))
pTime = cTime
cv.putText(resize, f'FPS:{fps}', (20, 70),
cv.FONT_HERSHEY_PLAIN, 1, (0, 255, 0), thickness=1)
cv.imshow('video', resize)
if cv.waitKey(1) & 0xFF == ord('d'):
break | videofacedetect.py | import cv2 as cv
import mediapipe as mp
import time
mpDraw = mp.solutions.drawing_utils
mpFaceDetection = mp.solutions.face_detection
faceDetection = mpFaceDetection.FaceDetection(0.30)
pTime = 0
cap = cv.VideoCapture('Videos/mkbhd.mp4')
def Rescale(frame, scale=0.50):
# FOR PICTURES,VIDEO,LIVE JUST T0 MAKE IT FIT
width = int(frame.shape[1]*scale)
height = int(frame.shape[0]*scale)
dim = (width, height)
return cv.resize(frame, dim, interpolation=cv.INTER_AREA)
def drawBorders(image, bbox, len=30, thick=4):
x, y, w, h = bbox
x1, y1 = x+w, y+h
cv.line(image, (x, y), (x+len, y), (255, 0, 255), thick)
cv.line(image, (x, y), (x, y+len), (255, 0, 255), thick)
cv.line(image, (x1, y), (x1-len, y), (255, 0, 255), thick)
cv.line(image, (x1, y), (x1, y+len), (255, 0, 255), thick)
cv.line(image, (x, y1), (x+len, y1), (255, 0, 255), thick)
cv.line(image, (x, y1), (x, y1-len), (255, 0, 255), thick)
cv.line(image, (x1, y1), (x1-len, y1), (255, 0, 255), thick)
cv.line(image, (x1, y1), (x1, y1-len), (255, 0, 255), thick)
return image
while True:
istrue, frame = cap.read()
resize = Rescale(frame)
RGB = cv.cvtColor(resize, cv.COLOR_BGR2RGB)
results = faceDetection.process(RGB)
if results.detections:
for id, detection in enumerate(results.detections):
bboxc = detection.location_data.relative_bounding_box
height, width = resize.shape[:2]
bbox = int(bboxc.xmin * width), int(bboxc.ymin *height), int(bboxc.width * width), int(bboxc.height * height)
cv.rectangle(resize, bbox, (255, 0, 255), 1)
cv.putText(resize, f'{round(detection.score[0]*100)}%', (bbox[0],
bbox[1]-20), cv.FONT_HERSHEY_PLAIN, 1, (255, 0, 255), thickness=1)
resize = drawBorders(resize, bbox)
cTime = time.time()
fps = round(1/(cTime-pTime))
pTime = cTime
cv.putText(resize, f'FPS:{fps}', (20, 70),
cv.FONT_HERSHEY_PLAIN, 1, (0, 255, 0), thickness=1)
cv.imshow('video', resize)
if cv.waitKey(1) & 0xFF == ord('d'):
break | 0.404978 | 0.322846 |
from django.core.management.base import BaseCommand
from cantusdata.models.folio import Folio
from cantusdata.models.manuscript import Manuscript
from django.core.management import call_command
from optparse import make_option
import csv
class Command(BaseCommand):
"""
Import a folio mapping (CSV file)
Save that mapping to both django and Solr (through signals)
Usage: See 'help' below
"""
def add_arguments(self, parser):
parser.add_argument("args", nargs=2)
parser.add_argument(
"--no-refresh",
action="store_false",
dest="refresh",
default=True,
help="Do not refresh Solr after the import",
)
help = (
"Usage: ./manage.py import_folio_mapping <manuscript_id> <mapping_csv_file> [<manuscript2_id> <mapping_csv_file2> ...]"
'\n\tNote that csv files must be in the folder "data_dumps/folio_mapping/"'
)
def handle(self, *args, **options):
if len(args) == 0 or len(args) % 2 == 1:
self.stdout.write(self.help)
return
manuscripts = []
for index, arg in enumerate(args):
if index % 2 == 0:
temp_manuscript = {"id": arg}
else:
temp_manuscript["file"] = arg
manuscripts.append(temp_manuscript)
for manuscript in manuscripts:
manuscript_id = manuscript["id"]
input_file = manuscript["file"]
try:
manuscript = Manuscript.objects.get(id=manuscript_id)
except IOError:
raise IOError(
"Manuscript {0} does not exist".format(manuscript_id)
)
try:
mapping_csv = csv.DictReader(
open(
"data_dumps/folio_mapping/{0}".format(input_file), "rU"
)
)
except IOError:
raise IOError(
"File data_dumps/folio_mapping/{0} does not exist".format(
input_file
)
)
self.stdout.write(
"Starting import process for manuscript {0}".format(
manuscript_id
)
)
for index, row in enumerate(mapping_csv):
folio = row["folio"]
uri = row["uri"]
# Save in the Django DB
try:
folio_obj = Folio.objects.get(
number=folio, manuscript__id=manuscript_id
)
except Folio.DoesNotExist:
# If no folio is found, create one
folio_obj = Folio()
folio_obj.number = folio
folio_obj.manuscript = manuscript
folio_obj.image_uri = uri
folio_obj.save()
if index > 0 and index % 50 == 0:
self.stdout.write("Imported {0} folios".format(index))
self.stdout.write(
"All folios of manuscript {0} have been imported".format(
manuscript_id
)
)
# Refreshing Solr chants is necessary since chants have a field image_uri
# which is used when clicking on a search result
if options["refresh"]:
self.stdout.write("Refreshing Solr chants after folio import")
call_command(
"refresh_solr",
"chants",
*[str(man["id"]) for man in manuscripts]
)
else:
self.stdout.write(
"Import process completed. To refresh Solr,"
"use './manage.py refresh_solr chants [manuscript_id ...]'"
) | public/cantusdata/management/commands/import_folio_mapping.py | from django.core.management.base import BaseCommand
from cantusdata.models.folio import Folio
from cantusdata.models.manuscript import Manuscript
from django.core.management import call_command
from optparse import make_option
import csv
class Command(BaseCommand):
"""
Import a folio mapping (CSV file)
Save that mapping to both django and Solr (through signals)
Usage: See 'help' below
"""
def add_arguments(self, parser):
parser.add_argument("args", nargs=2)
parser.add_argument(
"--no-refresh",
action="store_false",
dest="refresh",
default=True,
help="Do not refresh Solr after the import",
)
help = (
"Usage: ./manage.py import_folio_mapping <manuscript_id> <mapping_csv_file> [<manuscript2_id> <mapping_csv_file2> ...]"
'\n\tNote that csv files must be in the folder "data_dumps/folio_mapping/"'
)
def handle(self, *args, **options):
if len(args) == 0 or len(args) % 2 == 1:
self.stdout.write(self.help)
return
manuscripts = []
for index, arg in enumerate(args):
if index % 2 == 0:
temp_manuscript = {"id": arg}
else:
temp_manuscript["file"] = arg
manuscripts.append(temp_manuscript)
for manuscript in manuscripts:
manuscript_id = manuscript["id"]
input_file = manuscript["file"]
try:
manuscript = Manuscript.objects.get(id=manuscript_id)
except IOError:
raise IOError(
"Manuscript {0} does not exist".format(manuscript_id)
)
try:
mapping_csv = csv.DictReader(
open(
"data_dumps/folio_mapping/{0}".format(input_file), "rU"
)
)
except IOError:
raise IOError(
"File data_dumps/folio_mapping/{0} does not exist".format(
input_file
)
)
self.stdout.write(
"Starting import process for manuscript {0}".format(
manuscript_id
)
)
for index, row in enumerate(mapping_csv):
folio = row["folio"]
uri = row["uri"]
# Save in the Django DB
try:
folio_obj = Folio.objects.get(
number=folio, manuscript__id=manuscript_id
)
except Folio.DoesNotExist:
# If no folio is found, create one
folio_obj = Folio()
folio_obj.number = folio
folio_obj.manuscript = manuscript
folio_obj.image_uri = uri
folio_obj.save()
if index > 0 and index % 50 == 0:
self.stdout.write("Imported {0} folios".format(index))
self.stdout.write(
"All folios of manuscript {0} have been imported".format(
manuscript_id
)
)
# Refreshing Solr chants is necessary since chants have a field image_uri
# which is used when clicking on a search result
if options["refresh"]:
self.stdout.write("Refreshing Solr chants after folio import")
call_command(
"refresh_solr",
"chants",
*[str(man["id"]) for man in manuscripts]
)
else:
self.stdout.write(
"Import process completed. To refresh Solr,"
"use './manage.py refresh_solr chants [manuscript_id ...]'"
) | 0.402744 | 0.184143 |
adjList=[
[26, 1],
[28, 0],
[29, 3],
[43, 2],
[31, 5],
[69, 6, 4],
[70, 5],
[97, 8],
[45, 9, 7],
[46, 8],
[58, 11],
[36, 10],
[37, 13],
[39, 14, 12],
[61, 15, 13],
[41, 14],
[71],
[47, 18],
[48, 17],
[20],
[50, 21, 19],
[20],
[52, 23],
[54, 24, 22],
[56, 25, 23],
[24],
[190, 0, 27],
[92, 26],
[1, 29],
[2, 28],
[31],
[4, 30],
[95, 33],
[68, 32],
[116, 35],
[76, 34],
[11, 37],
[12, 36],
[73, 39],
[13, 38],
[41],
[15, 40],
[74, 43],
[3, 42],
[77, 45],
[8, 44],
[9, 47],
[17, 46],
[18, 49],
[79, 48],
[20, 51],
[101, 52, 50],
[22, 51],
[82, 54],
[84, 23, 53],
[56],
[24, 55],
[86, 58],
[10, 59, 57],
[58],
[61],
[14, 60],
[89, 63],
[90, 62],
[93, 65],
[109, 64],
[112, 67],
[94, 66],
[33, 69],
[5, 68],
[115, 6, 71],
[16, 70],
[106, 73],
[38, 72],
[42, 75],
[111, 74],
[35, 77],
[44, 76],
[98],
[49, 80],
[126, 79],
[129, 82],
[53, 81],
[131, 84],
[54, 83],
[133, 86],
[57, 85],
[105],
[120, 89],
[62, 88],
[63, 91],
[123, 90],
[27, 93],
[64, 92],
[67, 95],
[32, 94],
[124, 97],
[7, 96],
[78, 99],
[148, 100, 98],
[151, 99],
[51, 102],
[101],
[119],
[136, 105],
[87, 106, 104],
[72, 105],
[173, 108],
[138, 107],
[65, 110],
[142, 109],
[75, 112],
[66, 111],
[146],
[147, 115],
[70, 114],
[34, 117],
[195, 116],
[135, 119],
[103, 118],
[139, 88, 121],
[120],
[152, 123],
[91, 122],
[96, 125],
[158, 124],
[80, 127],
[162, 126],
[163, 129],
[81, 128],
[165, 131],
[83, 130],
[167, 133],
[85, 132],
[135],
[169, 118, 134],
[104, 137],
[172, 136],
[108, 139],
[120, 138],
[191, 141],
[177, 140],
[110, 143],
[210, 142],
[181],
[182, 146],
[113, 147, 145],
[114, 146],
[99, 149],
[217, 148],
[196, 151],
[100, 150],
[122, 153],
[188, 152],
[228, 155],
[192, 156, 154],
[155],
[193, 158],
[125, 157],
[264, 160],
[194, 159],
[197, 162],
[127, 163, 161],
[128, 162],
[165],
[130, 166, 164],
[165],
[132, 168],
[203, 167],
[135, 170],
[205, 169],
[219, 172],
[137, 171],
[186, 107, 174],
[173],
[207, 176],
[222, 175],
[141, 178],
[225, 177],
[208, 180],
[179],
[144, 182],
[145, 181],
[213, 184],
[227, 183],
[221, 186],
[173, 185],
[223, 188],
[153, 189, 187],
[256, 188],
[26, 191],
[140, 190],
[155, 193],
[157, 192],
[160, 195],
[117, 194],
[150, 197],
[161, 196],
[246, 199],
[230, 198],
[231, 201],
[266, 200],
[233, 203],
[168, 202],
[235, 205],
[170, 204],
[236, 207],
[175, 206],
[238, 179, 209],
[239, 208],
[143, 211],
[241, 210],
[242, 213],
[226, 183, 212],
[262, 215],
[263, 214],
[244, 217],
[149, 216],
[249, 219],
[171, 218],
[251, 221],
[185, 220],
[176, 223],
[187, 222],
[257, 225],
[178, 224],
[213],
[184, 228],
[154, 229, 227],
[228],
[199, 231],
[247, 200, 230],
[267, 233],
[202, 232],
[269, 235],
[270, 204, 234],
[206, 237],
[272, 236],
[208],
[274, 209, 240],
[239],
[211, 242],
[212, 241],
[265, 244],
[216, 243],
[280, 246],
[198, 245],
[231, 248],
[315, 247],
[218, 250],
[284, 251, 249],
[220, 250],
[321, 253],
[286, 252],
[255],
[324, 256, 254],
[189, 255],
[224, 258],
[287, 257],
[290, 260],
[291, 259],
[292, 262],
[214, 261],
[294, 215, 264],
[159, 263],
[243],
[201, 267],
[232, 266],
[297, 269],
[234, 268],
[235, 271],
[270],
[237, 273],
[322, 272],
[304, 239, 275],
[307, 276, 274],
[308, 275],
[333, 278],
[309, 277],
[310, 280],
[245, 281, 279],
[280],
[314, 283],
[334, 282],
[250, 285],
[320, 284],
[253],
[258, 288],
[326, 287],
[339, 290],
[259, 289],
[260, 292],
[261, 291],
[329, 294],
[263, 295, 293],
[294],
[346, 297],
[268, 296],
[348, 299],
[349, 298],
[337, 301],
[351, 300],
[355, 303],
[302],
[274, 305],
[304],
[403, 307],
[275, 306],
[276],
[278, 310],
[341, 279, 309],
[378, 312],
[342, 311],
[344, 314],
[282, 313],
[248, 316],
[345, 315],
[350, 318],
[414, 317],
[320],
[285, 321, 319],
[252, 320],
[273, 323],
[353, 322],
[255, 325],
[324],
[288, 327],
[359, 326],
[329],
[293, 328],
[362, 331],
[374, 330],
[375, 333],
[277, 332],
[283, 335],
[364, 334],
[368, 337],
[300, 336],
[357],
[289, 340],
[372, 339],
[310],
[312, 343],
[380, 344, 342],
[313, 343],
[316, 346],
[296, 345],
[384, 348],
[298, 347],
[299, 350],
[317, 349],
[301, 352],
[402, 351],
[323, 354],
[388, 353],
[302, 356],
[390, 357, 355],
[338, 358, 356],
[391, 357],
[327, 360],
[393, 359],
[394, 362],
[330, 361],
[364],
[381, 335, 363],
[415, 366],
[437, 365],
[438, 368],
[336, 367],
[404, 370],
[463, 371, 369],
[370],
[340, 373],
[406, 372],
[331, 375],
[332, 374],
[427, 377],
[376],
[311, 379],
[432, 378],
[343],
[364, 382],
[408, 381],
[409, 384],
[347, 383],
[411, 386],
[412, 385],
[460, 388],
[354, 387],
[551, 390],
[356, 389],
[358],
[421, 393],
[360, 392],
[361, 395],
[424, 394],
[429, 397],
[430, 396],
[434, 399],
[435, 398],
[417],
[459, 402],
[352, 401],
[442, 306, 404],
[369, 403],
[444, 406],
[373, 407, 405],
[446, 406],
[382, 409],
[383, 408],
[450, 411],
[385, 410],
[386, 413],
[452, 412],
[318, 415],
[365, 414],
[457, 417],
[458, 400, 416],
[532, 419],
[461, 418],
[462, 421],
[392, 420],
[464, 423],
[502, 422],
[395, 425],
[466, 424],
[491, 427],
[376, 426],
[467, 429],
[396, 428],
[397, 431],
[447, 432, 430],
[379, 431],
[476, 434],
[398, 433],
[399, 436],
[470, 435],
[455, 366, 438],
[367, 437],
[518, 440],
[600, 439],
[442],
[403, 443, 441],
[521, 442],
[405, 445],
[473, 444],
[407],
[431],
[478, 449],
[512, 448],
[410, 451],
[480, 450],
[413, 453],
[482, 452],
[483, 455],
[437, 454],
[457],
[416, 456],
[417, 459],
[401, 460, 458],
[486, 387, 459],
[419, 462],
[420, 461],
[370, 464],
[422, 463],
[489, 466],
[425, 465],
[428, 468],
[508, 467],
[495, 470],
[436, 469],
[520, 472],
[499, 471],
[445, 474],
[506, 473],
[509, 476],
[433, 475],
[511, 478],
[448, 477],
[513, 480],
[451, 479],
[515, 482],
[453, 481],
[454, 484],
[496, 483],
[486],
[460, 485],
[533, 488],
[519, 487],
[465, 490],
[523, 489],
[524, 426, 492],
[525, 491],
[528, 494],
[542, 495, 493],
[469, 494],
[484],
[581, 498],
[530, 497],
[472, 500],
[535, 499],
[536, 502],
[423, 503, 501],
[538, 502],
[539, 505],
[591, 504],
[554, 474, 507],
[506],
[468, 509],
[475, 508],
[543, 511],
[477, 510],
[449, 513],
[479, 514, 512],
[513],
[481, 516],
[548, 515],
[583, 518],
[439, 517],
[488, 520],
[471, 519],
[443],
[555, 523],
[490, 522],
[491],
[574, 492, 526],
[556, 525],
[559, 528],
[493, 527],
[549],
[498, 531],
[565, 530],
[418, 533],
[487, 532],
[566, 535],
[500, 534],
[501, 537],
[569, 536],
[503, 539],
[504, 538],
[572, 541],
[573, 540],
[494],
[510, 544],
[575, 543],
[546],
[578, 545],
[609, 548],
[610, 516, 547],
[529, 550],
[580, 549],
[389, 552],
[586, 551],
[588],
[506, 555],
[522, 554],
[526, 557],
[593, 556],
[659],
[527, 560],
[595, 559],
[672, 562],
[608, 561],
[564],
[597, 565, 563],
[531, 564],
[534, 567],
[621, 566],
[601, 569],
[537, 568],
[654, 571],
[627, 572, 570],
[540, 571],
[541, 574],
[525, 573],
[544, 576],
[606, 575],
[607, 578],
[546, 579, 577],
[633, 578],
[550, 581],
[497, 580],
[583],
[635, 517, 582],
[599],
[614, 586],
[552, 585],
[616, 588],
[618, 553, 587],
[622],
[623, 591],
[505, 590],
[630, 593],
[557, 592],
[641, 595],
[560, 594],
[611],
[564, 598],
[634, 597],
[636, 584, 600],
[440, 599],
[568, 602],
[639, 601],
[625, 604],
[626, 603],
[642, 606],
[576, 607, 605],
[577, 606],
[562, 609],
[547, 608],
[548, 611],
[596, 612, 610],
[643, 611],
[644],
[699, 585, 615],
[645, 614],
[587, 617],
[646, 616],
[647, 588, 619],
[681, 618],
[648, 621],
[567, 620],
[650, 589, 623],
[590, 622],
[640, 625],
[652, 603, 624],
[604],
[571, 628],
[668, 627],
[630],
[592, 631, 629],
[657, 630],
[633],
[579, 632],
[598, 635],
[583, 634],
[599, 637],
[667, 636],
[684, 639],
[602, 638],
[624],
[594, 642],
[605, 641],
[612, 644],
[674, 613, 643],
[615, 646],
[617, 645],
[618],
[620, 649],
[683, 648],
[622, 651],
[687, 650],
[700, 625, 653],
[689, 652],
[570, 655],
[691, 654],
[657],
[631, 658, 656],
[724, 659, 657],
[558, 658],
[661],
[695, 660],
[663],
[761, 664, 662],
[663],
[675],
[697, 667],
[637, 666],
[628, 669],
[701, 668],
[707, 671],
[696, 672, 670],
[561, 671],
[712],
[644, 675],
[665, 676, 674],
[727, 675],
[678],
[713, 677],
[747, 680],
[715, 679],
[716, 619, 682],
[717, 681],
[649, 684],
[638, 683],
[686],
[720, 685],
[651, 688],
[722, 687],
[653, 690],
[731, 691, 689],
[655, 690],
[704],
[694],
[775, 695, 693],
[661, 694],
[671],
[666, 698],
[745, 697],
[614],
[652],
[669, 702],
[735, 701],
[723],
[737, 692, 705],
[738, 704],
[741, 707],
[759, 670, 706],
[743, 709],
[777, 708],
[762, 711],
[763, 712, 710],
[673, 711],
[765, 678, 714],
[713],
[680, 716],
[681, 715],
[682, 718],
[753, 717],
[839, 720],
[755, 686, 719],
[756, 722],
[688, 721],
[703, 724],
[736, 658, 723],
[764, 726],
[778, 725],
[676, 728],
[779, 727],
[769, 730],
[770, 729],
[690, 732],
[757, 733, 731],
[732],
[773, 735],
[702, 734],
[724, 737],
[704, 736],
[705, 739],
[774, 738],
[790, 741],
[706, 740],
[851, 743],
[708, 742],
[780, 745],
[698, 744],
[810, 747],
[679, 746],
[812, 749],
[782, 748],
[783, 751],
[784, 750],
[785, 753],
[718, 754, 752],
[753],
[720, 756],
[721, 755],
[732],
[786],
[707, 760],
[809, 759],
[663, 762],
[710, 761],
[794, 711, 764],
[725, 763],
[713, 766],
[797, 765],
[802, 768],
[816, 769, 767],
[729, 768],
[730, 771],
[804, 770],
[806, 773],
[734, 772],
[739, 775],
[807, 694, 774],
[825, 777],
[709, 776],
[726, 779],
[831, 728, 778],
[744, 781],
[835, 780],
[749, 783],
[750, 782],
[751, 785],
[752, 784],
[848, 758, 787],
[819, 786],
[820, 789],
[821, 788],
[740, 791],
[823, 790],
[826, 793],
[827, 792],
[763, 795],
[794],
[832, 797],
[833, 766, 796],
[837, 799],
[838, 798],
[840],
[841, 802],
[842, 767, 801],
[843, 804],
[771, 803],
[844, 806],
[772, 805],
[775, 808],
[849, 807],
[760],
[746, 811],
[854, 810],
[748, 813],
[855, 812],
[856, 815],
[814],
[768, 817],
[858, 816],
[847],
[787, 820],
[788, 819],
[789, 822],
[861, 821],
[791, 824],
[875, 823],
[776, 826],
[792, 825],
[793, 828],
[865, 827],
[866, 830],
[899, 829],
[779, 832],
[796, 831],
[797],
[867, 835],
[781, 834],
[886, 837],
[798, 836],
[799, 839],
[719, 838],
[871, 800, 841],
[801, 840],
[802],
[803, 844],
[805, 843],
[872, 846],
[845],
[892, 818, 848],
[786, 847],
[862, 808, 850],
[849],
[742, 852],
[878, 851],
[915, 854],
[811, 853],
[813, 856],
[814, 855],
[889, 858],
[817, 857],
[893, 860],
[907, 859],
[822, 862],
[849, 861],
[880, 864],
[957, 863],
[828, 866],
[829, 865],
[834, 868],
[928, 867],
[902],
[904, 871],
[840, 870],
[845, 873],
[934, 872],
[908],
[824, 876],
[911, 875],
[923, 878],
[852, 877],
[880],
[913, 863, 879],
[994, 882],
[900, 881],
[916, 884],
[917, 883],
[918, 886],
[836, 885],
[945],
[919, 889],
[857, 888],
[920, 891],
[922, 890],
[847, 893],
[859, 892],
[939, 895],
[986, 894],
[924, 897],
[940, 896],
[925, 899],
[830, 898],
[960, 882, 901],
[900],
[869, 903],
[967, 902],
[870, 905],
[944, 904],
[936, 907],
[860, 906],
[949, 874, 909],
[938, 908],
[951, 911],
[876, 910],
[941, 913],
[880, 912],
[975],
[942, 853, 916],
[883, 915],
[884, 918],
[885, 917],
[888, 920],
[890, 919],
[970, 922],
[891, 921],
[877, 924],
[896, 923],
[898, 926],
[958, 925],
[961, 928],
[868, 927],
[962],
[965, 931],
[966, 930],
[969],
[934],
[972, 873, 935, 933],
[974, 934],
[906, 937],
[981, 936],
[909, 939],
[894, 938],
[897, 941],
[912, 940],
[915, 943],
[999, 942],
[905, 945],
[887, 946, 944],
[978, 945],
[979, 948],
[1005, 947],
[982, 908, 950],
[983, 949],
[910, 952],
[988, 953, 951],
[989, 952],
[990, 955],
[954],
[992, 957],
[864, 956],
[926, 959],
[993, 958],
[900, 961],
[927, 960],
[1000, 929, 963],
[962],
[1002, 965],
[1015, 930, 964],
[931, 967],
[903, 968, 966],
[977, 969, 967],
[932, 968],
[921, 971],
[1017, 970],
[934],
[1008, 974],
[935, 973],
[1013, 914, 976],
[975],
[968],
[946, 979],
[947, 978],
[1019, 981],
[937, 980],
[949],
[950, 984],
[1021, 983],
[986],
[895, 985],
[1024, 988],
[952, 987],
[1011, 953, 990],
[954, 989],
[1026, 992],
[956, 991],
[959, 994],
[881, 993],
[1031, 996],
[1032, 995],
[1033],
[1035, 999],
[943, 998],
[962, 1001],
[1038, 1002, 1000],
[964, 1001],
[1072, 1004],
[1041, 1003],
[948, 1006],
[1045, 1005],
[1091, 1008],
[973, 1009, 1007],
[1049, 1008],
[1046, 1011],
[989, 1010],
[1028],
[1080, 975, 1014],
[1061, 1013],
[965, 1016],
[1070, 1015],
[971, 1018],
[1048, 1017],
[980, 1020],
[1051, 1019],
[984, 1022],
[1054, 1021],
[1055, 1024],
[987, 1023],
[1098, 1026],
[991, 1025],
[1028],
[1079, 1012, 1027],
[1062, 1030],
[1083, 1031, 1029],
[995, 1030],
[996, 1033],
[997, 1034, 1032],
[1064, 1033],
[998, 1036],
[1085, 1037, 1035],
[1036],
[1001, 1039],
[1066, 1038],
[1105, 1041],
[1073, 1004, 1040],
[1074, 1043],
[1075, 1042],
[1076, 1045],
[1006, 1044],
[1010],
[1088, 1048],
[1018, 1047],
[1009, 1050],
[1093, 1049],
[1020, 1052],
[1095, 1051],
[1096, 1054],
[1022, 1053],
[1023, 1056],
[1110, 1055],
[1144, 1058],
[1097, 1057],
[1132, 1060],
[1112, 1059],
[1014, 1062],
[1029, 1061],
[1120, 1064],
[1034, 1063],
[1087, 1066],
[1039, 1065],
[1100, 1068],
[1101, 1067],
[1102, 1070],
[1016, 1069],
[1104, 1072],
[1003, 1071],
[1041, 1074],
[1042, 1073],
[1043],
[1044, 1077],
[1108, 1076],
[1113, 1079],
[1114, 1028, 1078],
[1013, 1081],
[1116, 1080],
[1118, 1083],
[1030, 1082],
[1122],
[1036, 1086],
[1123, 1085],
[1065],
[1047, 1089],
[1124, 1088],
[1091],
[1007, 1092, 1090],
[1091],
[1050, 1094],
[1126, 1093],
[1052, 1096],
[1129, 1053, 1095],
[1058, 1098],
[1025, 1097],
[1156, 1100],
[1067, 1099],
[1068, 1102],
[1203, 1069, 1101],
[1133, 1104],
[1071, 1103],
[1040, 1106],
[1136, 1105],
[1170, 1108],
[1077, 1107],
[1130],
[1056, 1111],
[1143, 1110],
[1060, 1113],
[1078, 1112],
[1147, 1079, 1115],
[1114],
[1081, 1117],
[1150, 1116],
[1082, 1119],
[1164, 1118],
[1063, 1121],
[1153, 1120],
[1180, 1084, 1123],
[1086, 1122],
[1089, 1125],
[1160, 1124],
[1094, 1127],
[1254, 1128, 1126],
[1171, 1127],
[1096, 1130],
[1174, 1109, 1129],
[1163, 1132],
[1059, 1131],
[1103, 1134],
[1167, 1133],
[1247, 1136],
[1106, 1135],
[1187, 1138],
[1169, 1137],
[1140],
[1237, 1139],
[1189, 1142],
[1191, 1141],
[1111, 1144],
[1057, 1143],
[1146],
[1175, 1145],
[1114, 1148],
[1209, 1147],
[1177],
[1117, 1151],
[1197, 1150],
[1153],
[1166, 1121, 1152],
[1181, 1155],
[1183, 1154],
[1099, 1157],
[1185, 1156],
[1168],
[1219, 1160],
[1125, 1159],
[1192],
[1195, 1163],
[1131, 1162],
[1119, 1165],
[1199, 1164],
[1153],
[1134, 1168],
[1277, 1158, 1167],
[1138, 1170],
[1107, 1169],
[1128, 1172],
[1207, 1171],
[1208, 1174],
[1130, 1173],
[1262, 1146, 1176],
[1175],
[1149, 1178],
[1210, 1179, 1177],
[1265, 1178],
[1122, 1181],
[1154, 1180],
[1183],
[1201, 1155, 1182],
[1215, 1185],
[1157, 1184],
[1234, 1187],
[1137, 1186],
[1220, 1189],
[1141, 1188],
[1238, 1191],
[1142, 1190],
[1161, 1193],
[1223, 1194, 1192],
[1224, 1193],
[1162, 1196],
[1227, 1195],
[1151, 1198],
[1231, 1197],
[1165, 1200],
[1211, 1199],
[1183, 1202],
[1233, 1201],
[1102, 1204],
[1203],
[1218, 1206],
[1236, 1205],
[1172, 1208],
[1173, 1207],
[1148, 1210],
[1178, 1209],
[1242, 1200, 1212],
[1395, 1211],
[1214],
[1267, 1213],
[1184, 1216],
[1244, 1215],
[1218],
[1250, 1205, 1217],
[1159, 1220],
[1188, 1221, 1219],
[1271, 1220],
[1274, 1223],
[1193, 1222],
[1194, 1225],
[1259, 1224],
[1260, 1227],
[1196, 1226],
[1263, 1229],
[1264, 1228],
[1291, 1231],
[1198, 1230],
[1268, 1233],
[1202, 1232],
[1186, 1235],
[1279, 1234],
[1251, 1206, 1237],
[1140, 1236],
[1190, 1239],
[1273, 1238],
[1255],
[1292, 1242],
[1211, 1241],
[1312, 1244],
[1270, 1216, 1243],
[1330, 1246],
[1314, 1245],
[1135, 1248],
[1299, 1247],
[1280, 1250],
[1218, 1249],
[1236],
[1302, 1253],
[1303, 1252],
[1127, 1255],
[1240, 1256, 1254],
[1308, 1255],
[1309, 1258],
[1320, 1257],
[1225, 1260],
[1226, 1259],
[1262],
[1175, 1263, 1261],
[1228, 1262],
[1229, 1265],
[1179, 1264],
[1294, 1267],
[1214, 1266],
[1232, 1269],
[1297, 1268],
[1244],
[1221],
[1306, 1273],
[1239, 1272],
[1222, 1275],
[1339, 1274],
[1327],
[1168, 1278],
[1316, 1277],
[1235, 1280],
[1334, 1249, 1281, 1279],
[1280],
[1340, 1283],
[1322, 1282],
[1323, 1285],
[1324, 1284],
[1325, 1287],
[1341, 1286],
[1342, 1289],
[1343, 1288],
[1360, 1291],
[1230, 1290],
[1241, 1293],
[1329, 1292],
[1266, 1295],
[1346, 1294],
[1429, 1297],
[1269, 1296],
[1331],
[1248, 1300],
[1333, 1299],
[1336, 1302],
[1252, 1301],
[1317, 1253, 1304],
[1337, 1303],
[1338, 1306],
[1272, 1305],
[1319],
[1256, 1309],
[1257, 1308],
[1344, 1311],
[1359, 1310],
[1348, 1243, 1313],
[1376, 1312],
[1246, 1315],
[1349, 1316, 1314],
[1278, 1315],
[1303],
[1368, 1319],
[1353, 1307, 1318],
[1258, 1321],
[1370, 1320],
[1283, 1323],
[1284, 1322],
[1285, 1325],
[1286, 1324],
[1373, 1327],
[1276, 1326],
[1362, 1329],
[1293, 1328],
[1245, 1331],
[1377, 1298, 1332, 1330],
[1331],
[1300, 1334],
[1280, 1333],
[1364, 1336],
[1301, 1335],
[1304, 1338],
[1305, 1337],
[1401, 1275, 1340],
[1282, 1339],
[1287, 1342],
[1288, 1341],
[1289, 1344],
[1310, 1343],
[1407, 1346],
[1295, 1345],
[1374, 1348],
[1312, 1347],
[1315, 1350],
[1380, 1349],
[1398, 1352],
[1384, 1351],
[1319, 1354],
[1387, 1353],
[1388],
[1371, 1357],
[1404, 1358, 1356],
[1389, 1357],
[1311, 1360],
[1372, 1290, 1359],
[1392, 1362],
[1328, 1361],
[1381],
[1335, 1365],
[1397, 1364],
[1433, 1367],
[1399, 1368, 1366],
[1318, 1367],
[1420, 1370],
[1321, 1369],
[1356],
[1360, 1373],
[1326, 1372],
[1347, 1375],
[1396, 1374],
[1313, 1377],
[1331, 1376],
[1412, 1379],
[1452, 1378],
[1350, 1381],
[1363, 1382, 1380],
[1415, 1381],
[1416],
[1352, 1385],
[1418, 1384],
[1387],
[1354, 1388, 1386],
[1355, 1387],
[1358, 1390],
[1424, 1391, 1389],
[1390],
[1361, 1393],
[1427, 1392],
[1395],
[1514, 1212, 1394],
[1375],
[1365, 1398],
[1351, 1397],
[1367, 1400],
[1435, 1399],
[1339, 1402],
[1440, 1401],
[1441, 1404],
[1357, 1403],
[1462, 1406],
[1444, 1405],
[1345, 1408],
[1447, 1407],
[1449, 1410],
[1450, 1409],
[1481, 1412],
[1378, 1411],
[1453, 1414],
[1454, 1413],
[1382, 1416],
[1456, 1383, 1415],
[1463, 1418],
[1385, 1417],
[1468, 1420],
[1369, 1419],
[1438],
[1459],
[1460, 1424],
[1390, 1423],
[1473, 1426],
[1461, 1425],
[1393, 1428],
[1477, 1427],
[1296, 1430],
[1496, 1429],
[1432],
[1523, 1431],
[1366, 1434],
[1466, 1433],
[1400, 1436],
[1467, 1435],
[1470, 1438],
[1488, 1421, 1439, 1437],
[1438],
[1402, 1441],
[1403, 1440],
[1530, 1443],
[1472, 1442],
[1406, 1445],
[1476, 1444],
[1604, 1447],
[1408, 1446],
[1478, 1449],
[1409, 1448],
[1410, 1451],
[1480, 1450],
[1379, 1453],
[1413, 1452],
[1414, 1455],
[1501, 1454],
[1416, 1457],
[1484, 1456],
[1485],
[1493, 1422, 1460],
[1423, 1459],
[1426, 1462],
[1405, 1461],
[1417, 1464],
[1525, 1463],
[1502, 1466],
[1434, 1465],
[1436, 1468],
[1419, 1467],
[1508, 1470],
[1437, 1469],
[1492],
[1443, 1473],
[1425, 1472],
[1512, 1475],
[1513, 1474],
[1445, 1477],
[1428, 1476],
[1448, 1479],
[1516, 1478],
[1451, 1481],
[1411, 1480],
[1497],
[1521, 1484],
[1457, 1483],
[1458, 1486],
[1536, 1485],
[1505],
[1438, 1489],
[1560, 1490, 1488],
[1489],
[1528, 1492],
[1471, 1493, 1491],
[1459, 1494, 1492],
[1543, 1493],
[1532, 1496],
[1430, 1495],
[1482, 1498],
[1554, 1499, 1497],
[1498],
[1535, 1501],
[1455, 1500],
[1465, 1503],
[1558, 1502],
[1537, 1505],
[1487, 1506, 1504],
[1538, 1505],
[1539, 1508],
[1469, 1507],
[1566, 1510],
[1509],
[1568, 1512],
[1474, 1511],
[1475, 1514],
[1395, 1513],
[1549, 1516],
[1479, 1515],
[1518],
[1552, 1519, 1517],
[1572, 1520, 1518],
[1553, 1519],
[1483, 1522],
[1555, 1523, 1521],
[1432, 1524, 1522],
[1523],
[1464, 1526],
[1557, 1525],
[1561, 1528],
[1491, 1529, 1527],
[1528],
[1442, 1531],
[1565, 1530],
[1495, 1533],
[1571, 1532],
[1574, 1535],
[1500, 1534],
[1486, 1537],
[1504, 1536],
[1506, 1539],
[1507, 1540, 1538],
[1579, 1539],
[1580],
[1543],
[1494, 1542],
[1545],
[1583, 1544],
[1584, 1547],
[1585, 1548, 1546],
[1547],
[1515, 1550],
[1608, 1549],
[1636, 1552],
[1518, 1551],
[1520, 1554],
[1498, 1553],
[1522],
[1612, 1557],
[1526, 1556],
[1503, 1559],
[1593, 1558],
[1489, 1561],
[1581, 1527, 1560],
[1599, 1563],
[1654, 1562],
[1618, 1565],
[1531, 1564],
[1601, 1509, 1567],
[1657, 1566],
[1511, 1569],
[1603, 1568],
[1605, 1571],
[1533, 1570],
[1519, 1573],
[1624, 1572],
[1534, 1575],
[1626, 1574],
[1629, 1577],
[1611, 1576],
[1579],
[1540, 1580, 1578],
[1541, 1579],
[1561, 1582],
[1581],
[1545, 1584],
[1546, 1583],
[1547, 1586],
[1663, 1585],
[1625, 1588],
[1638, 1587],
[1627, 1590],
[1628, 1589],
[1643],
[1630, 1593],
[1559, 1592],
[1631, 1595],
[1645, 1594],
[1646, 1597],
[1647, 1596],
[1652, 1599],
[1562, 1598],
[1632, 1601],
[1566, 1600],
[1658, 1603],
[1569, 1602],
[1634, 1446, 1605],
[1570, 1604],
[1607],
[1635, 1608, 1606],
[1550, 1607],
[1639, 1610],
[1640, 1609],
[1577, 1612],
[1556, 1611],
[1648, 1614],
[1649, 1613],
[1650, 1616],
[1651, 1615],
[1655, 1618],
[1564, 1617],
[1659, 1620],
[1660, 1619],
[1661, 1622],
[1662, 1621],
[1637],
[1573, 1625],
[1587, 1624],
[1641, 1575, 1627],
[1589, 1626],
[1590, 1629],
[1576, 1628],
[1644, 1592, 1631],
[1594, 1630],
[1656, 1600, 1633],
[1632],
[1604, 1635],
[1607, 1634],
[1551, 1637],
[1623, 1636],
[1588, 1639],
[1609, 1638],
[1610, 1641],
[1626, 1640],
[1643],
[1591, 1644, 1642],
[1630, 1643],
[1595, 1646],
[1596, 1645],
[1597, 1648],
[1613, 1647],
[1614, 1650],
[1615, 1649],
[1616, 1652],
[1598, 1651],
[1654],
[1563, 1655, 1653],
[1617, 1654],
[1632],
[1567, 1658],
[1602, 1659, 1657],
[1619, 1658],
[1620, 1661],
[1621, 1660],
[1622, 1663],
[1586, 1662]]
# x coord, y coord
nodeData = [
(1, 1),
(7, 1),
(10, 1),
(13, 1),
(17, 1),
(26, 1),
(28, 1),
(33, 1),
(42, 1),
(45, 1),
(76, 1),
(81, 1),
(83, 1),
(88, 1),
(92, 1),
(100, 1),
(31, 2),
(48, 2),
(50, 2),
(54, 2),
(55, 2),
(56, 2),
(59, 2),
(68, 2),
(72, 2),
(73, 2),
(1, 3),
(4, 3),
(7, 3),
(10, 3),
(15, 3),
(17, 3),
(20, 3),
(23, 3),
(36, 3),
(38, 3),
(81, 3),
(83, 3),
(86, 3),
(88, 3),
(99, 3),
(100, 3),
(11, 4),
(13, 4),
(40, 4),
(42, 4),
(45, 4),
(48, 4),
(50, 4),
(52, 4),
(55, 4),
(57, 4),
(59, 4),
(63, 4),
(68, 4),
(71, 4),
(72, 4),
(74, 4),
(76, 4),
(78, 4),
(90, 4),
(92, 4),
(94, 4),
(96, 4),
(6, 5),
(8, 5),
(16, 5),
(18, 5),
(23, 5),
(26, 5),
(28, 5),
(31, 5),
(84, 5),
(86, 5),
(11, 6),
(14, 6),
(38, 6),
(40, 6),
(44, 6),
(52, 6),
(54, 6),
(61, 6),
(63, 6),
(66, 6),
(68, 6),
(71, 6),
(74, 6),
(82, 6),
(92, 6),
(94, 6),
(96, 6),
(100, 6),
(4, 7),
(6, 7),
(18, 7),
(20, 7),
(31, 7),
(33, 7),
(44, 7),
(45, 7),
(51, 7),
(57, 7),
(58, 7),
(77, 7),
(79, 7),
(82, 7),
(84, 7),
(87, 7),
(89, 7),
(8, 8),
(10, 8),
(14, 8),
(16, 8),
(22, 8),
(24, 8),
(28, 8),
(36, 8),
(43, 8),
(75, 8),
(77, 8),
(92, 8),
(93, 8),
(96, 8),
(100, 8),
(31, 9),
(35, 9),
(54, 9),
(56, 9),
(58, 9),
(61, 9),
(64, 9),
(66, 9),
(69, 9),
(71, 9),
(74, 9),
(75, 9),
(79, 9),
(84, 9),
(89, 9),
(92, 9),
(3, 10),
(6, 10),
(10, 10),
(15, 10),
(17, 10),
(19, 10),
(22, 10),
(24, 10),
(45, 10),
(47, 10),
(49, 10),
(51, 10),
(96, 10),
(99, 10),
(26, 11),
(28, 11),
(29, 11),
(32, 11),
(35, 11),
(38, 11),
(41, 11),
(53, 11),
(56, 11),
(58, 11),
(62, 11),
(64, 11),
(66, 11),
(69, 11),
(72, 11),
(75, 11),
(77, 11),
(80, 11),
(84, 11),
(87, 11),
(88, 11),
(92, 11),
(94, 11),
(6, 12),
(8, 12),
(10, 12),
(12, 12),
(17, 12),
(19, 12),
(21, 12),
(24, 12),
(85, 12),
(87, 12),
(97, 12),
(99, 12),
(100, 12),
(1, 13),
(3, 13),
(28, 13),
(32, 13),
(41, 13),
(43, 13),
(49, 13),
(53, 13),
(55, 13),
(57, 13),
(59, 13),
(65, 13),
(69, 13),
(72, 13),
(75, 13),
(77, 13),
(88, 13),
(92, 13),
(10, 14),
(12, 14),
(15, 14),
(17, 14),
(19, 14),
(21, 14),
(33, 14),
(36, 14),
(45, 14),
(47, 14),
(78, 14),
(80, 14),
(82, 14),
(85, 14),
(94, 14),
(97, 14),
(1, 15),
(8, 15),
(21, 15),
(24, 15),
(26, 15),
(29, 15),
(57, 15),
(59, 15),
(67, 15),
(69, 15),
(72, 15),
(75, 15),
(88, 15),
(90, 15),
(10, 16),
(12, 16),
(13, 16),
(17, 16),
(19, 16),
(40, 16),
(45, 16),
(49, 16),
(55, 16),
(59, 16),
(63, 16),
(78, 16),
(79, 16),
(82, 16),
(85, 16),
(87, 16),
(96, 16),
(98, 16),
(100, 16),
(1, 17),
(6, 17),
(25, 17),
(27, 17),
(29, 17),
(33, 17),
(36, 17),
(38, 17),
(40, 17),
(65, 17),
(67, 17),
(69, 17),
(72, 17),
(75, 17),
(76, 17),
(90, 17),
(94, 17),
(12, 18),
(18, 18),
(20, 18),
(42, 18),
(44, 18),
(46, 18),
(49, 18),
(55, 18),
(58, 18),
(60, 18),
(79, 18),
(82, 18),
(87, 18),
(6, 19),
(9, 19),
(22, 19),
(25, 19),
(27, 19),
(29, 19),
(31, 19),
(36, 19),
(37, 19),
(67, 19),
(69, 19),
(71, 19),
(73, 19),
(90, 19),
(92, 19),
(2, 20),
(3, 20),
(12, 20),
(14, 20),
(16, 20),
(18, 20),
(20, 20),
(44, 20),
(46, 20),
(49, 20),
(52, 20),
(56, 20),
(58, 20),
(63, 20),
(65, 20),
(76, 20),
(78, 20),
(81, 20),
(82, 20),
(85, 20),
(94, 20),
(96, 20),
(98, 20),
(100, 20),
(9, 21),
(11, 21),
(29, 21),
(31, 21),
(34, 21),
(36, 21),
(39, 21),
(42, 21),
(60, 21),
(62, 21),
(88, 21),
(90, 21),
(7, 22),
(22, 22),
(26, 22),
(46, 22),
(52, 22),
(55, 22),
(56, 22),
(65, 22),
(67, 22),
(69, 22),
(71, 22),
(73, 22),
(76, 22),
(92, 22),
(94, 22),
(96, 22),
(99, 22),
(2, 23),
(4, 23),
(7, 23),
(8, 23),
(11, 23),
(13, 23),
(32, 23),
(34, 23),
(59, 23),
(62, 23),
(80, 23),
(82, 23),
(85, 23),
(88, 23),
(18, 24),
(20, 24),
(22, 24),
(26, 24),
(29, 24),
(36, 24),
(39, 24),
(41, 24),
(42, 24),
(49, 24),
(52, 24),
(55, 24),
(62, 24),
(64, 24),
(67, 24),
(69, 24),
(72, 24),
(74, 24),
(96, 24),
(99, 24),
(2, 25),
(4, 25),
(8, 25),
(11, 25),
(13, 25),
(32, 25),
(34, 25),
(45, 25),
(48, 25),
(57, 25),
(59, 25),
(90, 25),
(92, 25),
(94, 25),
(16, 26),
(18, 26),
(26, 26),
(29, 26),
(31, 26),
(64, 26),
(67, 26),
(70, 26),
(72, 26),
(74, 26),
(76, 26),
(78, 26),
(80, 26),
(88, 26),
(90, 26),
(5, 27),
(7, 27),
(9, 27),
(11, 27),
(22, 27),
(24, 27),
(34, 27),
(36, 27),
(39, 27),
(41, 27),
(43, 27),
(45, 27),
(48, 27),
(51, 27),
(52, 27),
(54, 27),
(57, 27),
(59, 27),
(62, 27),
(82, 27),
(85, 27),
(98, 27),
(100, 27),
(14, 28),
(16, 28),
(18, 28),
(26, 28),
(28, 28),
(31, 28),
(51, 28),
(65, 28),
(67, 28),
(70, 28),
(72, 28),
(76, 28),
(78, 28),
(80, 28),
(82, 28),
(87, 28),
(88, 28),
(90, 28),
(92, 28),
(96, 28),
(7, 29),
(9, 29),
(20, 29),
(22, 29),
(34, 29),
(36, 29),
(43, 29),
(48, 29),
(58, 29),
(62, 29),
(11, 30),
(14, 30),
(28, 30),
(31, 30),
(52, 30),
(54, 30),
(63, 30),
(65, 30),
(70, 30),
(72, 30),
(74, 30),
(78, 30),
(80, 30),
(82, 30),
(92, 30),
(96, 30),
(7, 31),
(9, 31),
(34, 31),
(37, 31),
(39, 31),
(45, 31),
(55, 31),
(57, 31),
(58, 31),
(82, 31),
(85, 31),
(89, 31),
(14, 32),
(16, 32),
(20, 32),
(24, 32),
(25, 32),
(27, 32),
(29, 32),
(31, 32),
(32, 32),
(48, 32),
(52, 32),
(59, 32),
(63, 32),
(67, 32),
(70, 32),
(71, 32),
(74, 32),
(77, 32),
(95, 32),
(98, 32),
(9, 33),
(11, 33),
(18, 33),
(35, 33),
(37, 33),
(39, 33),
(45, 33),
(47, 33),
(53, 33),
(55, 33),
(79, 33),
(89, 33),
(92, 33),
(5, 34),
(7, 34),
(13, 34),
(16, 34),
(20, 34),
(22, 34),
(25, 34),
(27, 34),
(41, 34),
(43, 34),
(57, 34),
(59, 34),
(61, 34),
(65, 34),
(68, 34),
(75, 34),
(77, 34),
(79, 34),
(82, 34),
(2, 35),
(4, 35),
(10, 35),
(31, 35),
(35, 35),
(47, 35),
(49, 35),
(51, 35),
(53, 35),
(55, 35),
(71, 35),
(73, 35),
(87, 35),
(89, 35),
(92, 35),
(13, 36),
(17, 36),
(20, 36),
(22, 36),
(37, 36),
(39, 36),
(41, 36),
(43, 36),
(45, 36),
(61, 36),
(63, 36),
(65, 36),
(68, 36),
(69, 36),
(82, 36),
(85, 36),
(94, 36),
(95, 36),
(98, 36),
(2, 37),
(4, 37),
(6, 37),
(10, 37),
(24, 37),
(26, 37),
(29, 37),
(46, 37),
(49, 37),
(53, 37),
(55, 37),
(79, 37),
(89, 37),
(91, 37),
(98, 37),
(100, 37),
(20, 38),
(22, 38),
(31, 38),
(34, 38),
(56, 38),
(63, 38),
(65, 38),
(73, 38),
(75, 38),
(77, 38),
(79, 38),
(82, 38),
(86, 38),
(2, 39),
(4, 39),
(6, 39),
(8, 39),
(10, 39),
(12, 39),
(14, 39),
(17, 39),
(24, 39),
(26, 39),
(28, 39),
(31, 39),
(34, 39),
(39, 39),
(41, 39),
(44, 39),
(46, 39),
(48, 39),
(67, 39),
(69, 39),
(91, 39),
(95, 39),
(98, 39),
(100, 39),
(20, 40),
(22, 40),
(28, 40),
(53, 40),
(56, 40),
(82, 40),
(86, 40),
(4, 41),
(8, 41),
(10, 41),
(14, 41),
(17, 41),
(24, 41),
(26, 41),
(31, 41),
(33, 41),
(37, 41),
(39, 41),
(46, 41),
(48, 41),
(50, 41),
(51, 41),
(59, 41),
(63, 41),
(75, 41),
(78, 41),
(79, 41),
(89, 41),
(97, 41),
(100, 41),
(41, 42),
(43, 42),
(66, 42),
(69, 42),
(71, 42),
(84, 42),
(86, 42),
(89, 42),
(90, 42),
(93, 42),
(94, 42),
(4, 43),
(8, 43),
(12, 43),
(14, 43),
(17, 43),
(20, 43),
(23, 43),
(24, 43),
(26, 43),
(28, 43),
(33, 43),
(37, 43),
(39, 43),
(53, 43),
(57, 43),
(59, 43),
(63, 43),
(69, 43),
(97, 43),
(100, 43),
(2, 44),
(31, 44),
(43, 44),
(46, 44),
(48, 44),
(53, 44),
(55, 44),
(64, 44),
(66, 44),
(72, 44),
(76, 44),
(80, 44),
(82, 44),
(84, 44),
(94, 44),
(95, 44),
(8, 45),
(12, 45),
(14, 45),
(17, 45),
(21, 45),
(24, 45),
(26, 45),
(28, 45),
(48, 45),
(50, 45),
(85, 45),
(87, 45),
(90, 45),
(92, 45),
(32, 46),
(34, 46),
(37, 46),
(39, 46),
(41, 46),
(44, 46),
(46, 46),
(50, 46),
(53, 46),
(55, 46),
(57, 46),
(62, 46),
(64, 46),
(70, 46),
(72, 46),
(98, 46),
(100, 46),
(2, 47),
(4, 47),
(6, 47),
(8, 47),
(10, 47),
(13, 47),
(15, 47),
(17, 47),
(19, 47),
(24, 47),
(26, 47),
(39, 47),
(47, 47),
(66, 47),
(68, 47),
(78, 47),
(80, 47),
(82, 47),
(85, 47),
(94, 47),
(96, 47),
(27, 48),
(30, 48),
(32, 48),
(34, 48),
(36, 48),
(41, 48),
(44, 48),
(57, 48),
(59, 48),
(73, 48),
(76, 48),
(87, 48),
(92, 48),
(98, 48),
(100, 48),
(8, 49),
(10, 49),
(13, 49),
(15, 49),
(47, 49),
(49, 49),
(51, 49),
(54, 49),
(62, 49),
(64, 49),
(78, 49),
(80, 49),
(82, 49),
(83, 49),
(94, 49),
(96, 49),
(16, 50),
(18, 50),
(23, 50),
(25, 50),
(27, 50),
(34, 50),
(36, 50),
(38, 50),
(41, 50),
(59, 50),
(61, 50),
(68, 50),
(2, 51),
(4, 51),
(6, 51),
(8, 51),
(10, 51),
(12, 51),
(30, 51),
(32, 51),
(44, 51),
(49, 51),
(51, 51),
(54, 51),
(57, 51),
(64, 51),
(66, 51),
(73, 51),
(78, 51),
(80, 51),
(83, 51),
(86, 51),
(90, 51),
(92, 51),
(94, 51),
(96, 51),
(98, 51),
(100, 51),
(14, 52),
(16, 52),
(18, 52),
(21, 52),
(23, 52),
(25, 52),
(27, 52),
(34, 52),
(38, 52),
(40, 52),
(41, 52),
(44, 52),
(47, 52),
(61, 52),
(62, 52),
(70, 52),
(72, 52),
(2, 53),
(4, 53),
(8, 53),
(10, 53),
(30, 53),
(32, 53),
(49, 53),
(52, 53),
(57, 53),
(61, 53),
(79, 53),
(81, 53),
(83, 53),
(86, 53),
(98, 53),
(100, 53),
(16, 54),
(20, 54),
(23, 54),
(40, 54),
(42, 54),
(54, 54),
(66, 54),
(68, 54),
(70, 54),
(72, 54),
(77, 54),
(79, 54),
(93, 54),
(95, 54),
(4, 55),
(9, 55),
(12, 55),
(14, 55),
(26, 55),
(28, 55),
(30, 55),
(33, 55),
(39, 55),
(44, 55),
(49, 55),
(60, 55),
(62, 55),
(73, 55),
(75, 55),
(87, 55),
(90, 55),
(95, 55),
(96, 55),
(16, 56),
(18, 56),
(20, 56),
(24, 56),
(50, 56),
(52, 56),
(54, 56),
(57, 56),
(64, 56),
(68, 56),
(77, 56),
(79, 56),
(84, 56),
(2, 57),
(4, 57),
(9, 57),
(12, 57),
(28, 57),
(33, 57),
(36, 57),
(39, 57),
(70, 57),
(73, 57),
(87, 57),
(89, 57),
(97, 57),
(100, 57),
(7, 58),
(13, 58),
(16, 58),
(22, 58),
(41, 58),
(42, 58),
(48, 58),
(50, 58),
(52, 58),
(57, 58),
(60, 58),
(75, 58),
(77, 58),
(2, 59),
(5, 59),
(24, 59),
(26, 59),
(28, 59),
(32, 59),
(34, 59),
(54, 59),
(56, 59),
(64, 59),
(66, 59),
(69, 59),
(71, 59),
(73, 59),
(79, 59),
(81, 59),
(89, 59),
(91, 59),
(95, 59),
(97, 59),
(7, 60),
(8, 60),
(11, 60),
(13, 60),
(16, 60),
(18, 60),
(19, 60),
(22, 60),
(36, 60),
(39, 60),
(42, 60),
(45, 60),
(48, 60),
(84, 60),
(86, 60),
(19, 61),
(28, 61),
(32, 61),
(50, 61),
(52, 61),
(54, 61),
(56, 61),
(58, 61),
(61, 61),
(62, 61),
(64, 61),
(66, 61),
(69, 61),
(71, 61),
(75, 61),
(79, 61),
(91, 61),
(93, 61),
(95, 61),
(97, 61),
(99, 61),
(1, 62),
(5, 62),
(7, 62),
(8, 62),
(11, 62),
(22, 62),
(26, 62),
(34, 62),
(37, 62),
(44, 62),
(45, 62),
(47, 62),
(67, 62),
(69, 62),
(81, 62),
(84, 62),
(87, 62),
(13, 63),
(18, 63),
(39, 63),
(41, 63),
(50, 63),
(53, 63),
(58, 63),
(60, 63),
(62, 63),
(64, 63),
(72, 63),
(75, 63),
(78, 63),
(81, 63),
(89, 63),
(93, 63),
(95, 63),
(97, 63),
(99, 63),
(100, 63),
(1, 64),
(4, 64),
(5, 64),
(8, 64),
(10, 64),
(24, 64),
(26, 64),
(29, 64),
(31, 64),
(33, 64),
(37, 64),
(67, 64),
(38, 65),
(41, 65),
(47, 65),
(50, 65),
(53, 65),
(56, 65),
(58, 65),
(60, 65),
(62, 65),
(64, 65),
(68, 65),
(70, 65),
(74, 65),
(76, 65),
(87, 65),
(89, 65),
(96, 65),
(100, 65),
(8, 66),
(10, 66),
(12, 66),
(14, 66),
(16, 66),
(18, 66),
(20, 66),
(22, 66),
(26, 66),
(29, 66),
(31, 66),
(33, 66),
(36, 66),
(79, 66),
(81, 66),
(84, 66),
(86, 66),
(90, 66),
(93, 66),
(1, 67),
(4, 67),
(6, 67),
(8, 67),
(38, 67),
(40, 67),
(43, 67),
(44, 67),
(46, 67),
(50, 67),
(52, 67),
(56, 67),
(58, 67),
(70, 67),
(72, 67),
(10, 68),
(12, 68),
(14, 68),
(16, 68),
(18, 68),
(20, 68),
(24, 68),
(28, 68),
(34, 68),
(36, 68),
(61, 68),
(64, 68),
(66, 68),
(76, 68),
(79, 68),
(81, 68),
(82, 68),
(86, 68),
(88, 68),
(90, 68),
(93, 68),
(96, 68),
(99, 68),
(1, 69),
(6, 69),
(40, 69),
(43, 69),
(52, 69),
(53, 69),
(55, 69),
(58, 69),
(61, 69),
(71, 69),
(74, 69),
(18, 70),
(20, 70),
(25, 70),
(28, 70),
(30, 70),
(32, 70),
(36, 70),
(38, 70),
(45, 70),
(51, 70),
(66, 70),
(68, 70),
(75, 70),
(77, 70),
(81, 70),
(83, 70),
(85, 70),
(88, 70),
(91, 70),
(98, 70),
(99, 70),
(3, 71),
(7, 71),
(10, 71),
(13, 71),
(23, 71),
(41, 71),
(43, 71),
(63, 71),
(69, 71),
(71, 71),
(93, 71),
(95, 71),
(99, 71),
(20, 72),
(23, 72),
(32, 72),
(34, 72),
(55, 72),
(57, 72),
(59, 72),
(61, 72),
(77, 72),
(80, 72),
(85, 72),
(87, 72),
(89, 72),
(1, 73),
(3, 73),
(6, 73),
(7, 73),
(11, 73),
(13, 73),
(28, 73),
(30, 73),
(43, 73),
(45, 73),
(47, 73),
(51, 73),
(63, 73),
(65, 73),
(67, 73),
(69, 73),
(74, 73),
(91, 73),
(93, 73),
(95, 73),
(98, 73),
(7, 74),
(9, 74),
(16, 74),
(19, 74),
(34, 74),
(36, 74),
(57, 74),
(59, 74),
(83, 74),
(87, 74),
(98, 74),
(100, 74),
(2, 75),
(3, 75),
(11, 75),
(13, 75),
(32, 75),
(34, 75),
(41, 75),
(43, 75),
(45, 75),
(62, 75),
(65, 75),
(67, 75),
(69, 75),
(71, 75),
(74, 75),
(79, 75),
(82, 75),
(91, 75),
(93, 75),
(6, 76),
(9, 76),
(28, 76),
(30, 76),
(36, 76),
(38, 76),
(47, 76),
(51, 76),
(55, 76),
(96, 76),
(98, 76),
(10, 77),
(13, 77),
(15, 77),
(20, 77),
(25, 77),
(27, 77),
(32, 77),
(34, 77),
(36, 77),
(39, 77),
(42, 77),
(53, 77),
(55, 77),
(56, 77),
(58, 77),
(60, 77),
(69, 77),
(71, 77),
(75, 77),
(77, 77),
(79, 77),
(82, 77),
(89, 77),
(1, 78),
(3, 78),
(6, 78),
(8, 78),
(13, 78),
(45, 78),
(49, 78),
(51, 78),
(62, 78),
(65, 78),
(93, 78),
(23, 79),
(25, 79),
(30, 79),
(32, 79),
(33, 79),
(67, 79),
(69, 79),
(71, 79),
(73, 79),
(75, 79),
(77, 79),
(79, 79),
(81, 79),
(89, 79),
(91, 79),
(96, 79),
(98, 79),
(1, 80),
(4, 80),
(6, 80),
(8, 80),
(17, 80),
(27, 80),
(29, 80),
(36, 80),
(39, 80),
(42, 80),
(44, 80),
(46, 80),
(49, 80),
(53, 80),
(56, 80),
(58, 80),
(83, 80),
(86, 80),
(10, 81),
(12, 81),
(20, 81),
(21, 81),
(25, 81),
(42, 81),
(50, 81),
(53, 81),
(60, 81),
(63, 81),
(69, 81),
(71, 81),
(73, 81),
(75, 81),
(91, 81),
(93, 81),
(96, 81),
(98, 81),
(15, 82),
(17, 82),
(19, 82),
(29, 82),
(32, 82),
(34, 82),
(36, 82),
(44, 82),
(46, 82),
(65, 82),
(67, 82),
(77, 82),
(79, 82),
(81, 82),
(83, 82),
(2, 83),
(4, 83),
(8, 83),
(10, 83),
(21, 83),
(25, 83),
(38, 83),
(40, 83),
(53, 83),
(56, 83),
(58, 83),
(68, 83),
(72, 83),
(76, 83),
(86, 83),
(89, 83),
(93, 83),
(96, 83),
(28, 84),
(34, 84),
(36, 84),
(44, 84),
(47, 84),
(50, 84),
(60, 84),
(63, 84),
(68, 84),
(89, 84),
(91, 84),
(8, 85),
(10, 85),
(12, 85),
(17, 85),
(20, 85),
(22, 85),
(25, 85),
(28, 85),
(30, 85),
(32, 85),
(40, 85),
(42, 85),
(53, 85),
(56, 85),
(58, 85),
(76, 85),
(78, 85),
(86, 85),
(93, 85),
(95, 85),
(98, 85),
(100, 85),
(10, 86),
(36, 86),
(38, 86),
(47, 86),
(51, 86),
(65, 86),
(67, 86),
(70, 86),
(72, 86),
(89, 86),
(92, 86),
(2, 87),
(4, 87),
(12, 87),
(14, 87),
(18, 87),
(20, 87),
(26, 87),
(28, 87),
(30, 87),
(32, 87),
(39, 87),
(42, 87),
(57, 87),
(60, 87),
(63, 87),
(74, 87),
(76, 87),
(78, 87),
(84, 87),
(87, 87),
(95, 87),
(97, 87),
(6, 88),
(8, 88),
(36, 88),
(37, 88),
(44, 88),
(46, 88),
(51, 88),
(55, 88),
(61, 88),
(63, 88),
(64, 88),
(67, 88),
(70, 88),
(80, 88),
(82, 88),
(92, 88),
(94, 88),
(2, 89),
(4, 89),
(10, 89),
(12, 89),
(14, 89),
(16, 89),
(22, 89),
(26, 89),
(28, 89),
(30, 89),
(32, 89),
(34, 89),
(48, 89),
(74, 89),
(76, 89),
(87, 89),
(89, 89),
(39, 90),
(41, 90),
(44, 90),
(46, 90),
(55, 90),
(57, 90),
(59, 90),
(61, 90),
(72, 90),
(82, 90),
(84, 90),
(90, 90),
(92, 90),
(94, 90),
(97, 90),
(10, 91),
(12, 91),
(16, 91),
(18, 91),
(21, 91),
(32, 91),
(34, 91),
(48, 91),
(50, 91),
(53, 91),
(63, 91),
(65, 91),
(66, 91),
(70, 91),
(72, 91),
(74, 91),
(77, 91),
(4, 92),
(8, 92),
(21, 92),
(23, 92),
(24, 92),
(28, 92),
(30, 92),
(44, 92),
(46, 92),
(52, 92),
(53, 92),
(55, 92),
(57, 92),
(59, 92),
(84, 92),
(86, 92),
(88, 92),
(90, 92),
(92, 92),
(100, 92),
(9, 93),
(12, 93),
(14, 93),
(15, 93),
(17, 93),
(20, 93),
(32, 93),
(34, 93),
(37, 93),
(38, 93),
(41, 93),
(43, 93),
(67, 93),
(70, 93),
(71, 93),
(80, 93),
(82, 93),
(4, 94),
(7, 94),
(25, 94),
(28, 94),
(50, 94),
(52, 94),
(55, 94),
(57, 94),
(61, 94),
(63, 94),
(76, 94),
(77, 94),
(92, 94),
(93, 94),
(95, 94),
(97, 94),
(99, 94),
(9, 95),
(11, 95),
(13, 95),
(15, 95),
(20, 95),
(23, 95),
(34, 95),
(41, 95),
(43, 95),
(46, 95),
(48, 95),
(65, 95),
(67, 95),
(72, 95),
(74, 95),
(78, 95),
(82, 95),
(84, 95),
(86, 95),
(88, 95),
(90, 95),
(4, 96),
(7, 96),
(17, 96),
(19, 96),
(25, 96),
(31, 96),
(37, 96),
(39, 96),
(59, 96),
(61, 96),
(63, 96),
(67, 96),
(68, 96),
(93, 96),
(95, 96),
(97, 96),
(99, 96),
(21, 97),
(24, 97),
(33, 97),
(35, 97),
(44, 97),
(46, 97),
(48, 97),
(50, 97),
(52, 97),
(54, 97),
(56, 97),
(70, 97),
(72, 97),
(80, 97),
(84, 97),
(88, 97),
(90, 97),
(2, 98),
(4, 98),
(8, 98),
(9, 98),
(11, 98),
(26, 98),
(29, 98),
(39, 98),
(41, 98),
(59, 98),
(61, 98),
(64, 98),
(67, 98),
(76, 98),
(78, 98),
(91, 98),
(93, 98),
(95, 98),
(97, 98),
(16, 99),
(19, 99),
(21, 99),
(31, 99),
(33, 99),
(35, 99),
(37, 99),
(46, 99),
(50, 99),
(80, 99),
(82, 99),
(2, 100),
(9, 100),
(13, 100),
(16, 100),
(24, 100),
(26, 100),
(29, 100),
(31, 100),
(41, 100),
(44, 100),
(46, 100),
(52, 100),
(54, 100),
(56, 100),
(59, 100),
(61, 100),
(64, 100),
(67, 100),
(70, 100),
(73, 100),
(74, 100),
(76, 100),
(80, 100),
(86, 100),
(88, 100),
(91, 100),
(93, 100),
(95, 100),
(97, 100),
(99, 100)] | graph_datasets/mazes/pmaze9.py |
adjList=[
[26, 1],
[28, 0],
[29, 3],
[43, 2],
[31, 5],
[69, 6, 4],
[70, 5],
[97, 8],
[45, 9, 7],
[46, 8],
[58, 11],
[36, 10],
[37, 13],
[39, 14, 12],
[61, 15, 13],
[41, 14],
[71],
[47, 18],
[48, 17],
[20],
[50, 21, 19],
[20],
[52, 23],
[54, 24, 22],
[56, 25, 23],
[24],
[190, 0, 27],
[92, 26],
[1, 29],
[2, 28],
[31],
[4, 30],
[95, 33],
[68, 32],
[116, 35],
[76, 34],
[11, 37],
[12, 36],
[73, 39],
[13, 38],
[41],
[15, 40],
[74, 43],
[3, 42],
[77, 45],
[8, 44],
[9, 47],
[17, 46],
[18, 49],
[79, 48],
[20, 51],
[101, 52, 50],
[22, 51],
[82, 54],
[84, 23, 53],
[56],
[24, 55],
[86, 58],
[10, 59, 57],
[58],
[61],
[14, 60],
[89, 63],
[90, 62],
[93, 65],
[109, 64],
[112, 67],
[94, 66],
[33, 69],
[5, 68],
[115, 6, 71],
[16, 70],
[106, 73],
[38, 72],
[42, 75],
[111, 74],
[35, 77],
[44, 76],
[98],
[49, 80],
[126, 79],
[129, 82],
[53, 81],
[131, 84],
[54, 83],
[133, 86],
[57, 85],
[105],
[120, 89],
[62, 88],
[63, 91],
[123, 90],
[27, 93],
[64, 92],
[67, 95],
[32, 94],
[124, 97],
[7, 96],
[78, 99],
[148, 100, 98],
[151, 99],
[51, 102],
[101],
[119],
[136, 105],
[87, 106, 104],
[72, 105],
[173, 108],
[138, 107],
[65, 110],
[142, 109],
[75, 112],
[66, 111],
[146],
[147, 115],
[70, 114],
[34, 117],
[195, 116],
[135, 119],
[103, 118],
[139, 88, 121],
[120],
[152, 123],
[91, 122],
[96, 125],
[158, 124],
[80, 127],
[162, 126],
[163, 129],
[81, 128],
[165, 131],
[83, 130],
[167, 133],
[85, 132],
[135],
[169, 118, 134],
[104, 137],
[172, 136],
[108, 139],
[120, 138],
[191, 141],
[177, 140],
[110, 143],
[210, 142],
[181],
[182, 146],
[113, 147, 145],
[114, 146],
[99, 149],
[217, 148],
[196, 151],
[100, 150],
[122, 153],
[188, 152],
[228, 155],
[192, 156, 154],
[155],
[193, 158],
[125, 157],
[264, 160],
[194, 159],
[197, 162],
[127, 163, 161],
[128, 162],
[165],
[130, 166, 164],
[165],
[132, 168],
[203, 167],
[135, 170],
[205, 169],
[219, 172],
[137, 171],
[186, 107, 174],
[173],
[207, 176],
[222, 175],
[141, 178],
[225, 177],
[208, 180],
[179],
[144, 182],
[145, 181],
[213, 184],
[227, 183],
[221, 186],
[173, 185],
[223, 188],
[153, 189, 187],
[256, 188],
[26, 191],
[140, 190],
[155, 193],
[157, 192],
[160, 195],
[117, 194],
[150, 197],
[161, 196],
[246, 199],
[230, 198],
[231, 201],
[266, 200],
[233, 203],
[168, 202],
[235, 205],
[170, 204],
[236, 207],
[175, 206],
[238, 179, 209],
[239, 208],
[143, 211],
[241, 210],
[242, 213],
[226, 183, 212],
[262, 215],
[263, 214],
[244, 217],
[149, 216],
[249, 219],
[171, 218],
[251, 221],
[185, 220],
[176, 223],
[187, 222],
[257, 225],
[178, 224],
[213],
[184, 228],
[154, 229, 227],
[228],
[199, 231],
[247, 200, 230],
[267, 233],
[202, 232],
[269, 235],
[270, 204, 234],
[206, 237],
[272, 236],
[208],
[274, 209, 240],
[239],
[211, 242],
[212, 241],
[265, 244],
[216, 243],
[280, 246],
[198, 245],
[231, 248],
[315, 247],
[218, 250],
[284, 251, 249],
[220, 250],
[321, 253],
[286, 252],
[255],
[324, 256, 254],
[189, 255],
[224, 258],
[287, 257],
[290, 260],
[291, 259],
[292, 262],
[214, 261],
[294, 215, 264],
[159, 263],
[243],
[201, 267],
[232, 266],
[297, 269],
[234, 268],
[235, 271],
[270],
[237, 273],
[322, 272],
[304, 239, 275],
[307, 276, 274],
[308, 275],
[333, 278],
[309, 277],
[310, 280],
[245, 281, 279],
[280],
[314, 283],
[334, 282],
[250, 285],
[320, 284],
[253],
[258, 288],
[326, 287],
[339, 290],
[259, 289],
[260, 292],
[261, 291],
[329, 294],
[263, 295, 293],
[294],
[346, 297],
[268, 296],
[348, 299],
[349, 298],
[337, 301],
[351, 300],
[355, 303],
[302],
[274, 305],
[304],
[403, 307],
[275, 306],
[276],
[278, 310],
[341, 279, 309],
[378, 312],
[342, 311],
[344, 314],
[282, 313],
[248, 316],
[345, 315],
[350, 318],
[414, 317],
[320],
[285, 321, 319],
[252, 320],
[273, 323],
[353, 322],
[255, 325],
[324],
[288, 327],
[359, 326],
[329],
[293, 328],
[362, 331],
[374, 330],
[375, 333],
[277, 332],
[283, 335],
[364, 334],
[368, 337],
[300, 336],
[357],
[289, 340],
[372, 339],
[310],
[312, 343],
[380, 344, 342],
[313, 343],
[316, 346],
[296, 345],
[384, 348],
[298, 347],
[299, 350],
[317, 349],
[301, 352],
[402, 351],
[323, 354],
[388, 353],
[302, 356],
[390, 357, 355],
[338, 358, 356],
[391, 357],
[327, 360],
[393, 359],
[394, 362],
[330, 361],
[364],
[381, 335, 363],
[415, 366],
[437, 365],
[438, 368],
[336, 367],
[404, 370],
[463, 371, 369],
[370],
[340, 373],
[406, 372],
[331, 375],
[332, 374],
[427, 377],
[376],
[311, 379],
[432, 378],
[343],
[364, 382],
[408, 381],
[409, 384],
[347, 383],
[411, 386],
[412, 385],
[460, 388],
[354, 387],
[551, 390],
[356, 389],
[358],
[421, 393],
[360, 392],
[361, 395],
[424, 394],
[429, 397],
[430, 396],
[434, 399],
[435, 398],
[417],
[459, 402],
[352, 401],
[442, 306, 404],
[369, 403],
[444, 406],
[373, 407, 405],
[446, 406],
[382, 409],
[383, 408],
[450, 411],
[385, 410],
[386, 413],
[452, 412],
[318, 415],
[365, 414],
[457, 417],
[458, 400, 416],
[532, 419],
[461, 418],
[462, 421],
[392, 420],
[464, 423],
[502, 422],
[395, 425],
[466, 424],
[491, 427],
[376, 426],
[467, 429],
[396, 428],
[397, 431],
[447, 432, 430],
[379, 431],
[476, 434],
[398, 433],
[399, 436],
[470, 435],
[455, 366, 438],
[367, 437],
[518, 440],
[600, 439],
[442],
[403, 443, 441],
[521, 442],
[405, 445],
[473, 444],
[407],
[431],
[478, 449],
[512, 448],
[410, 451],
[480, 450],
[413, 453],
[482, 452],
[483, 455],
[437, 454],
[457],
[416, 456],
[417, 459],
[401, 460, 458],
[486, 387, 459],
[419, 462],
[420, 461],
[370, 464],
[422, 463],
[489, 466],
[425, 465],
[428, 468],
[508, 467],
[495, 470],
[436, 469],
[520, 472],
[499, 471],
[445, 474],
[506, 473],
[509, 476],
[433, 475],
[511, 478],
[448, 477],
[513, 480],
[451, 479],
[515, 482],
[453, 481],
[454, 484],
[496, 483],
[486],
[460, 485],
[533, 488],
[519, 487],
[465, 490],
[523, 489],
[524, 426, 492],
[525, 491],
[528, 494],
[542, 495, 493],
[469, 494],
[484],
[581, 498],
[530, 497],
[472, 500],
[535, 499],
[536, 502],
[423, 503, 501],
[538, 502],
[539, 505],
[591, 504],
[554, 474, 507],
[506],
[468, 509],
[475, 508],
[543, 511],
[477, 510],
[449, 513],
[479, 514, 512],
[513],
[481, 516],
[548, 515],
[583, 518],
[439, 517],
[488, 520],
[471, 519],
[443],
[555, 523],
[490, 522],
[491],
[574, 492, 526],
[556, 525],
[559, 528],
[493, 527],
[549],
[498, 531],
[565, 530],
[418, 533],
[487, 532],
[566, 535],
[500, 534],
[501, 537],
[569, 536],
[503, 539],
[504, 538],
[572, 541],
[573, 540],
[494],
[510, 544],
[575, 543],
[546],
[578, 545],
[609, 548],
[610, 516, 547],
[529, 550],
[580, 549],
[389, 552],
[586, 551],
[588],
[506, 555],
[522, 554],
[526, 557],
[593, 556],
[659],
[527, 560],
[595, 559],
[672, 562],
[608, 561],
[564],
[597, 565, 563],
[531, 564],
[534, 567],
[621, 566],
[601, 569],
[537, 568],
[654, 571],
[627, 572, 570],
[540, 571],
[541, 574],
[525, 573],
[544, 576],
[606, 575],
[607, 578],
[546, 579, 577],
[633, 578],
[550, 581],
[497, 580],
[583],
[635, 517, 582],
[599],
[614, 586],
[552, 585],
[616, 588],
[618, 553, 587],
[622],
[623, 591],
[505, 590],
[630, 593],
[557, 592],
[641, 595],
[560, 594],
[611],
[564, 598],
[634, 597],
[636, 584, 600],
[440, 599],
[568, 602],
[639, 601],
[625, 604],
[626, 603],
[642, 606],
[576, 607, 605],
[577, 606],
[562, 609],
[547, 608],
[548, 611],
[596, 612, 610],
[643, 611],
[644],
[699, 585, 615],
[645, 614],
[587, 617],
[646, 616],
[647, 588, 619],
[681, 618],
[648, 621],
[567, 620],
[650, 589, 623],
[590, 622],
[640, 625],
[652, 603, 624],
[604],
[571, 628],
[668, 627],
[630],
[592, 631, 629],
[657, 630],
[633],
[579, 632],
[598, 635],
[583, 634],
[599, 637],
[667, 636],
[684, 639],
[602, 638],
[624],
[594, 642],
[605, 641],
[612, 644],
[674, 613, 643],
[615, 646],
[617, 645],
[618],
[620, 649],
[683, 648],
[622, 651],
[687, 650],
[700, 625, 653],
[689, 652],
[570, 655],
[691, 654],
[657],
[631, 658, 656],
[724, 659, 657],
[558, 658],
[661],
[695, 660],
[663],
[761, 664, 662],
[663],
[675],
[697, 667],
[637, 666],
[628, 669],
[701, 668],
[707, 671],
[696, 672, 670],
[561, 671],
[712],
[644, 675],
[665, 676, 674],
[727, 675],
[678],
[713, 677],
[747, 680],
[715, 679],
[716, 619, 682],
[717, 681],
[649, 684],
[638, 683],
[686],
[720, 685],
[651, 688],
[722, 687],
[653, 690],
[731, 691, 689],
[655, 690],
[704],
[694],
[775, 695, 693],
[661, 694],
[671],
[666, 698],
[745, 697],
[614],
[652],
[669, 702],
[735, 701],
[723],
[737, 692, 705],
[738, 704],
[741, 707],
[759, 670, 706],
[743, 709],
[777, 708],
[762, 711],
[763, 712, 710],
[673, 711],
[765, 678, 714],
[713],
[680, 716],
[681, 715],
[682, 718],
[753, 717],
[839, 720],
[755, 686, 719],
[756, 722],
[688, 721],
[703, 724],
[736, 658, 723],
[764, 726],
[778, 725],
[676, 728],
[779, 727],
[769, 730],
[770, 729],
[690, 732],
[757, 733, 731],
[732],
[773, 735],
[702, 734],
[724, 737],
[704, 736],
[705, 739],
[774, 738],
[790, 741],
[706, 740],
[851, 743],
[708, 742],
[780, 745],
[698, 744],
[810, 747],
[679, 746],
[812, 749],
[782, 748],
[783, 751],
[784, 750],
[785, 753],
[718, 754, 752],
[753],
[720, 756],
[721, 755],
[732],
[786],
[707, 760],
[809, 759],
[663, 762],
[710, 761],
[794, 711, 764],
[725, 763],
[713, 766],
[797, 765],
[802, 768],
[816, 769, 767],
[729, 768],
[730, 771],
[804, 770],
[806, 773],
[734, 772],
[739, 775],
[807, 694, 774],
[825, 777],
[709, 776],
[726, 779],
[831, 728, 778],
[744, 781],
[835, 780],
[749, 783],
[750, 782],
[751, 785],
[752, 784],
[848, 758, 787],
[819, 786],
[820, 789],
[821, 788],
[740, 791],
[823, 790],
[826, 793],
[827, 792],
[763, 795],
[794],
[832, 797],
[833, 766, 796],
[837, 799],
[838, 798],
[840],
[841, 802],
[842, 767, 801],
[843, 804],
[771, 803],
[844, 806],
[772, 805],
[775, 808],
[849, 807],
[760],
[746, 811],
[854, 810],
[748, 813],
[855, 812],
[856, 815],
[814],
[768, 817],
[858, 816],
[847],
[787, 820],
[788, 819],
[789, 822],
[861, 821],
[791, 824],
[875, 823],
[776, 826],
[792, 825],
[793, 828],
[865, 827],
[866, 830],
[899, 829],
[779, 832],
[796, 831],
[797],
[867, 835],
[781, 834],
[886, 837],
[798, 836],
[799, 839],
[719, 838],
[871, 800, 841],
[801, 840],
[802],
[803, 844],
[805, 843],
[872, 846],
[845],
[892, 818, 848],
[786, 847],
[862, 808, 850],
[849],
[742, 852],
[878, 851],
[915, 854],
[811, 853],
[813, 856],
[814, 855],
[889, 858],
[817, 857],
[893, 860],
[907, 859],
[822, 862],
[849, 861],
[880, 864],
[957, 863],
[828, 866],
[829, 865],
[834, 868],
[928, 867],
[902],
[904, 871],
[840, 870],
[845, 873],
[934, 872],
[908],
[824, 876],
[911, 875],
[923, 878],
[852, 877],
[880],
[913, 863, 879],
[994, 882],
[900, 881],
[916, 884],
[917, 883],
[918, 886],
[836, 885],
[945],
[919, 889],
[857, 888],
[920, 891],
[922, 890],
[847, 893],
[859, 892],
[939, 895],
[986, 894],
[924, 897],
[940, 896],
[925, 899],
[830, 898],
[960, 882, 901],
[900],
[869, 903],
[967, 902],
[870, 905],
[944, 904],
[936, 907],
[860, 906],
[949, 874, 909],
[938, 908],
[951, 911],
[876, 910],
[941, 913],
[880, 912],
[975],
[942, 853, 916],
[883, 915],
[884, 918],
[885, 917],
[888, 920],
[890, 919],
[970, 922],
[891, 921],
[877, 924],
[896, 923],
[898, 926],
[958, 925],
[961, 928],
[868, 927],
[962],
[965, 931],
[966, 930],
[969],
[934],
[972, 873, 935, 933],
[974, 934],
[906, 937],
[981, 936],
[909, 939],
[894, 938],
[897, 941],
[912, 940],
[915, 943],
[999, 942],
[905, 945],
[887, 946, 944],
[978, 945],
[979, 948],
[1005, 947],
[982, 908, 950],
[983, 949],
[910, 952],
[988, 953, 951],
[989, 952],
[990, 955],
[954],
[992, 957],
[864, 956],
[926, 959],
[993, 958],
[900, 961],
[927, 960],
[1000, 929, 963],
[962],
[1002, 965],
[1015, 930, 964],
[931, 967],
[903, 968, 966],
[977, 969, 967],
[932, 968],
[921, 971],
[1017, 970],
[934],
[1008, 974],
[935, 973],
[1013, 914, 976],
[975],
[968],
[946, 979],
[947, 978],
[1019, 981],
[937, 980],
[949],
[950, 984],
[1021, 983],
[986],
[895, 985],
[1024, 988],
[952, 987],
[1011, 953, 990],
[954, 989],
[1026, 992],
[956, 991],
[959, 994],
[881, 993],
[1031, 996],
[1032, 995],
[1033],
[1035, 999],
[943, 998],
[962, 1001],
[1038, 1002, 1000],
[964, 1001],
[1072, 1004],
[1041, 1003],
[948, 1006],
[1045, 1005],
[1091, 1008],
[973, 1009, 1007],
[1049, 1008],
[1046, 1011],
[989, 1010],
[1028],
[1080, 975, 1014],
[1061, 1013],
[965, 1016],
[1070, 1015],
[971, 1018],
[1048, 1017],
[980, 1020],
[1051, 1019],
[984, 1022],
[1054, 1021],
[1055, 1024],
[987, 1023],
[1098, 1026],
[991, 1025],
[1028],
[1079, 1012, 1027],
[1062, 1030],
[1083, 1031, 1029],
[995, 1030],
[996, 1033],
[997, 1034, 1032],
[1064, 1033],
[998, 1036],
[1085, 1037, 1035],
[1036],
[1001, 1039],
[1066, 1038],
[1105, 1041],
[1073, 1004, 1040],
[1074, 1043],
[1075, 1042],
[1076, 1045],
[1006, 1044],
[1010],
[1088, 1048],
[1018, 1047],
[1009, 1050],
[1093, 1049],
[1020, 1052],
[1095, 1051],
[1096, 1054],
[1022, 1053],
[1023, 1056],
[1110, 1055],
[1144, 1058],
[1097, 1057],
[1132, 1060],
[1112, 1059],
[1014, 1062],
[1029, 1061],
[1120, 1064],
[1034, 1063],
[1087, 1066],
[1039, 1065],
[1100, 1068],
[1101, 1067],
[1102, 1070],
[1016, 1069],
[1104, 1072],
[1003, 1071],
[1041, 1074],
[1042, 1073],
[1043],
[1044, 1077],
[1108, 1076],
[1113, 1079],
[1114, 1028, 1078],
[1013, 1081],
[1116, 1080],
[1118, 1083],
[1030, 1082],
[1122],
[1036, 1086],
[1123, 1085],
[1065],
[1047, 1089],
[1124, 1088],
[1091],
[1007, 1092, 1090],
[1091],
[1050, 1094],
[1126, 1093],
[1052, 1096],
[1129, 1053, 1095],
[1058, 1098],
[1025, 1097],
[1156, 1100],
[1067, 1099],
[1068, 1102],
[1203, 1069, 1101],
[1133, 1104],
[1071, 1103],
[1040, 1106],
[1136, 1105],
[1170, 1108],
[1077, 1107],
[1130],
[1056, 1111],
[1143, 1110],
[1060, 1113],
[1078, 1112],
[1147, 1079, 1115],
[1114],
[1081, 1117],
[1150, 1116],
[1082, 1119],
[1164, 1118],
[1063, 1121],
[1153, 1120],
[1180, 1084, 1123],
[1086, 1122],
[1089, 1125],
[1160, 1124],
[1094, 1127],
[1254, 1128, 1126],
[1171, 1127],
[1096, 1130],
[1174, 1109, 1129],
[1163, 1132],
[1059, 1131],
[1103, 1134],
[1167, 1133],
[1247, 1136],
[1106, 1135],
[1187, 1138],
[1169, 1137],
[1140],
[1237, 1139],
[1189, 1142],
[1191, 1141],
[1111, 1144],
[1057, 1143],
[1146],
[1175, 1145],
[1114, 1148],
[1209, 1147],
[1177],
[1117, 1151],
[1197, 1150],
[1153],
[1166, 1121, 1152],
[1181, 1155],
[1183, 1154],
[1099, 1157],
[1185, 1156],
[1168],
[1219, 1160],
[1125, 1159],
[1192],
[1195, 1163],
[1131, 1162],
[1119, 1165],
[1199, 1164],
[1153],
[1134, 1168],
[1277, 1158, 1167],
[1138, 1170],
[1107, 1169],
[1128, 1172],
[1207, 1171],
[1208, 1174],
[1130, 1173],
[1262, 1146, 1176],
[1175],
[1149, 1178],
[1210, 1179, 1177],
[1265, 1178],
[1122, 1181],
[1154, 1180],
[1183],
[1201, 1155, 1182],
[1215, 1185],
[1157, 1184],
[1234, 1187],
[1137, 1186],
[1220, 1189],
[1141, 1188],
[1238, 1191],
[1142, 1190],
[1161, 1193],
[1223, 1194, 1192],
[1224, 1193],
[1162, 1196],
[1227, 1195],
[1151, 1198],
[1231, 1197],
[1165, 1200],
[1211, 1199],
[1183, 1202],
[1233, 1201],
[1102, 1204],
[1203],
[1218, 1206],
[1236, 1205],
[1172, 1208],
[1173, 1207],
[1148, 1210],
[1178, 1209],
[1242, 1200, 1212],
[1395, 1211],
[1214],
[1267, 1213],
[1184, 1216],
[1244, 1215],
[1218],
[1250, 1205, 1217],
[1159, 1220],
[1188, 1221, 1219],
[1271, 1220],
[1274, 1223],
[1193, 1222],
[1194, 1225],
[1259, 1224],
[1260, 1227],
[1196, 1226],
[1263, 1229],
[1264, 1228],
[1291, 1231],
[1198, 1230],
[1268, 1233],
[1202, 1232],
[1186, 1235],
[1279, 1234],
[1251, 1206, 1237],
[1140, 1236],
[1190, 1239],
[1273, 1238],
[1255],
[1292, 1242],
[1211, 1241],
[1312, 1244],
[1270, 1216, 1243],
[1330, 1246],
[1314, 1245],
[1135, 1248],
[1299, 1247],
[1280, 1250],
[1218, 1249],
[1236],
[1302, 1253],
[1303, 1252],
[1127, 1255],
[1240, 1256, 1254],
[1308, 1255],
[1309, 1258],
[1320, 1257],
[1225, 1260],
[1226, 1259],
[1262],
[1175, 1263, 1261],
[1228, 1262],
[1229, 1265],
[1179, 1264],
[1294, 1267],
[1214, 1266],
[1232, 1269],
[1297, 1268],
[1244],
[1221],
[1306, 1273],
[1239, 1272],
[1222, 1275],
[1339, 1274],
[1327],
[1168, 1278],
[1316, 1277],
[1235, 1280],
[1334, 1249, 1281, 1279],
[1280],
[1340, 1283],
[1322, 1282],
[1323, 1285],
[1324, 1284],
[1325, 1287],
[1341, 1286],
[1342, 1289],
[1343, 1288],
[1360, 1291],
[1230, 1290],
[1241, 1293],
[1329, 1292],
[1266, 1295],
[1346, 1294],
[1429, 1297],
[1269, 1296],
[1331],
[1248, 1300],
[1333, 1299],
[1336, 1302],
[1252, 1301],
[1317, 1253, 1304],
[1337, 1303],
[1338, 1306],
[1272, 1305],
[1319],
[1256, 1309],
[1257, 1308],
[1344, 1311],
[1359, 1310],
[1348, 1243, 1313],
[1376, 1312],
[1246, 1315],
[1349, 1316, 1314],
[1278, 1315],
[1303],
[1368, 1319],
[1353, 1307, 1318],
[1258, 1321],
[1370, 1320],
[1283, 1323],
[1284, 1322],
[1285, 1325],
[1286, 1324],
[1373, 1327],
[1276, 1326],
[1362, 1329],
[1293, 1328],
[1245, 1331],
[1377, 1298, 1332, 1330],
[1331],
[1300, 1334],
[1280, 1333],
[1364, 1336],
[1301, 1335],
[1304, 1338],
[1305, 1337],
[1401, 1275, 1340],
[1282, 1339],
[1287, 1342],
[1288, 1341],
[1289, 1344],
[1310, 1343],
[1407, 1346],
[1295, 1345],
[1374, 1348],
[1312, 1347],
[1315, 1350],
[1380, 1349],
[1398, 1352],
[1384, 1351],
[1319, 1354],
[1387, 1353],
[1388],
[1371, 1357],
[1404, 1358, 1356],
[1389, 1357],
[1311, 1360],
[1372, 1290, 1359],
[1392, 1362],
[1328, 1361],
[1381],
[1335, 1365],
[1397, 1364],
[1433, 1367],
[1399, 1368, 1366],
[1318, 1367],
[1420, 1370],
[1321, 1369],
[1356],
[1360, 1373],
[1326, 1372],
[1347, 1375],
[1396, 1374],
[1313, 1377],
[1331, 1376],
[1412, 1379],
[1452, 1378],
[1350, 1381],
[1363, 1382, 1380],
[1415, 1381],
[1416],
[1352, 1385],
[1418, 1384],
[1387],
[1354, 1388, 1386],
[1355, 1387],
[1358, 1390],
[1424, 1391, 1389],
[1390],
[1361, 1393],
[1427, 1392],
[1395],
[1514, 1212, 1394],
[1375],
[1365, 1398],
[1351, 1397],
[1367, 1400],
[1435, 1399],
[1339, 1402],
[1440, 1401],
[1441, 1404],
[1357, 1403],
[1462, 1406],
[1444, 1405],
[1345, 1408],
[1447, 1407],
[1449, 1410],
[1450, 1409],
[1481, 1412],
[1378, 1411],
[1453, 1414],
[1454, 1413],
[1382, 1416],
[1456, 1383, 1415],
[1463, 1418],
[1385, 1417],
[1468, 1420],
[1369, 1419],
[1438],
[1459],
[1460, 1424],
[1390, 1423],
[1473, 1426],
[1461, 1425],
[1393, 1428],
[1477, 1427],
[1296, 1430],
[1496, 1429],
[1432],
[1523, 1431],
[1366, 1434],
[1466, 1433],
[1400, 1436],
[1467, 1435],
[1470, 1438],
[1488, 1421, 1439, 1437],
[1438],
[1402, 1441],
[1403, 1440],
[1530, 1443],
[1472, 1442],
[1406, 1445],
[1476, 1444],
[1604, 1447],
[1408, 1446],
[1478, 1449],
[1409, 1448],
[1410, 1451],
[1480, 1450],
[1379, 1453],
[1413, 1452],
[1414, 1455],
[1501, 1454],
[1416, 1457],
[1484, 1456],
[1485],
[1493, 1422, 1460],
[1423, 1459],
[1426, 1462],
[1405, 1461],
[1417, 1464],
[1525, 1463],
[1502, 1466],
[1434, 1465],
[1436, 1468],
[1419, 1467],
[1508, 1470],
[1437, 1469],
[1492],
[1443, 1473],
[1425, 1472],
[1512, 1475],
[1513, 1474],
[1445, 1477],
[1428, 1476],
[1448, 1479],
[1516, 1478],
[1451, 1481],
[1411, 1480],
[1497],
[1521, 1484],
[1457, 1483],
[1458, 1486],
[1536, 1485],
[1505],
[1438, 1489],
[1560, 1490, 1488],
[1489],
[1528, 1492],
[1471, 1493, 1491],
[1459, 1494, 1492],
[1543, 1493],
[1532, 1496],
[1430, 1495],
[1482, 1498],
[1554, 1499, 1497],
[1498],
[1535, 1501],
[1455, 1500],
[1465, 1503],
[1558, 1502],
[1537, 1505],
[1487, 1506, 1504],
[1538, 1505],
[1539, 1508],
[1469, 1507],
[1566, 1510],
[1509],
[1568, 1512],
[1474, 1511],
[1475, 1514],
[1395, 1513],
[1549, 1516],
[1479, 1515],
[1518],
[1552, 1519, 1517],
[1572, 1520, 1518],
[1553, 1519],
[1483, 1522],
[1555, 1523, 1521],
[1432, 1524, 1522],
[1523],
[1464, 1526],
[1557, 1525],
[1561, 1528],
[1491, 1529, 1527],
[1528],
[1442, 1531],
[1565, 1530],
[1495, 1533],
[1571, 1532],
[1574, 1535],
[1500, 1534],
[1486, 1537],
[1504, 1536],
[1506, 1539],
[1507, 1540, 1538],
[1579, 1539],
[1580],
[1543],
[1494, 1542],
[1545],
[1583, 1544],
[1584, 1547],
[1585, 1548, 1546],
[1547],
[1515, 1550],
[1608, 1549],
[1636, 1552],
[1518, 1551],
[1520, 1554],
[1498, 1553],
[1522],
[1612, 1557],
[1526, 1556],
[1503, 1559],
[1593, 1558],
[1489, 1561],
[1581, 1527, 1560],
[1599, 1563],
[1654, 1562],
[1618, 1565],
[1531, 1564],
[1601, 1509, 1567],
[1657, 1566],
[1511, 1569],
[1603, 1568],
[1605, 1571],
[1533, 1570],
[1519, 1573],
[1624, 1572],
[1534, 1575],
[1626, 1574],
[1629, 1577],
[1611, 1576],
[1579],
[1540, 1580, 1578],
[1541, 1579],
[1561, 1582],
[1581],
[1545, 1584],
[1546, 1583],
[1547, 1586],
[1663, 1585],
[1625, 1588],
[1638, 1587],
[1627, 1590],
[1628, 1589],
[1643],
[1630, 1593],
[1559, 1592],
[1631, 1595],
[1645, 1594],
[1646, 1597],
[1647, 1596],
[1652, 1599],
[1562, 1598],
[1632, 1601],
[1566, 1600],
[1658, 1603],
[1569, 1602],
[1634, 1446, 1605],
[1570, 1604],
[1607],
[1635, 1608, 1606],
[1550, 1607],
[1639, 1610],
[1640, 1609],
[1577, 1612],
[1556, 1611],
[1648, 1614],
[1649, 1613],
[1650, 1616],
[1651, 1615],
[1655, 1618],
[1564, 1617],
[1659, 1620],
[1660, 1619],
[1661, 1622],
[1662, 1621],
[1637],
[1573, 1625],
[1587, 1624],
[1641, 1575, 1627],
[1589, 1626],
[1590, 1629],
[1576, 1628],
[1644, 1592, 1631],
[1594, 1630],
[1656, 1600, 1633],
[1632],
[1604, 1635],
[1607, 1634],
[1551, 1637],
[1623, 1636],
[1588, 1639],
[1609, 1638],
[1610, 1641],
[1626, 1640],
[1643],
[1591, 1644, 1642],
[1630, 1643],
[1595, 1646],
[1596, 1645],
[1597, 1648],
[1613, 1647],
[1614, 1650],
[1615, 1649],
[1616, 1652],
[1598, 1651],
[1654],
[1563, 1655, 1653],
[1617, 1654],
[1632],
[1567, 1658],
[1602, 1659, 1657],
[1619, 1658],
[1620, 1661],
[1621, 1660],
[1622, 1663],
[1586, 1662]]
# x coord, y coord
nodeData = [
(1, 1),
(7, 1),
(10, 1),
(13, 1),
(17, 1),
(26, 1),
(28, 1),
(33, 1),
(42, 1),
(45, 1),
(76, 1),
(81, 1),
(83, 1),
(88, 1),
(92, 1),
(100, 1),
(31, 2),
(48, 2),
(50, 2),
(54, 2),
(55, 2),
(56, 2),
(59, 2),
(68, 2),
(72, 2),
(73, 2),
(1, 3),
(4, 3),
(7, 3),
(10, 3),
(15, 3),
(17, 3),
(20, 3),
(23, 3),
(36, 3),
(38, 3),
(81, 3),
(83, 3),
(86, 3),
(88, 3),
(99, 3),
(100, 3),
(11, 4),
(13, 4),
(40, 4),
(42, 4),
(45, 4),
(48, 4),
(50, 4),
(52, 4),
(55, 4),
(57, 4),
(59, 4),
(63, 4),
(68, 4),
(71, 4),
(72, 4),
(74, 4),
(76, 4),
(78, 4),
(90, 4),
(92, 4),
(94, 4),
(96, 4),
(6, 5),
(8, 5),
(16, 5),
(18, 5),
(23, 5),
(26, 5),
(28, 5),
(31, 5),
(84, 5),
(86, 5),
(11, 6),
(14, 6),
(38, 6),
(40, 6),
(44, 6),
(52, 6),
(54, 6),
(61, 6),
(63, 6),
(66, 6),
(68, 6),
(71, 6),
(74, 6),
(82, 6),
(92, 6),
(94, 6),
(96, 6),
(100, 6),
(4, 7),
(6, 7),
(18, 7),
(20, 7),
(31, 7),
(33, 7),
(44, 7),
(45, 7),
(51, 7),
(57, 7),
(58, 7),
(77, 7),
(79, 7),
(82, 7),
(84, 7),
(87, 7),
(89, 7),
(8, 8),
(10, 8),
(14, 8),
(16, 8),
(22, 8),
(24, 8),
(28, 8),
(36, 8),
(43, 8),
(75, 8),
(77, 8),
(92, 8),
(93, 8),
(96, 8),
(100, 8),
(31, 9),
(35, 9),
(54, 9),
(56, 9),
(58, 9),
(61, 9),
(64, 9),
(66, 9),
(69, 9),
(71, 9),
(74, 9),
(75, 9),
(79, 9),
(84, 9),
(89, 9),
(92, 9),
(3, 10),
(6, 10),
(10, 10),
(15, 10),
(17, 10),
(19, 10),
(22, 10),
(24, 10),
(45, 10),
(47, 10),
(49, 10),
(51, 10),
(96, 10),
(99, 10),
(26, 11),
(28, 11),
(29, 11),
(32, 11),
(35, 11),
(38, 11),
(41, 11),
(53, 11),
(56, 11),
(58, 11),
(62, 11),
(64, 11),
(66, 11),
(69, 11),
(72, 11),
(75, 11),
(77, 11),
(80, 11),
(84, 11),
(87, 11),
(88, 11),
(92, 11),
(94, 11),
(6, 12),
(8, 12),
(10, 12),
(12, 12),
(17, 12),
(19, 12),
(21, 12),
(24, 12),
(85, 12),
(87, 12),
(97, 12),
(99, 12),
(100, 12),
(1, 13),
(3, 13),
(28, 13),
(32, 13),
(41, 13),
(43, 13),
(49, 13),
(53, 13),
(55, 13),
(57, 13),
(59, 13),
(65, 13),
(69, 13),
(72, 13),
(75, 13),
(77, 13),
(88, 13),
(92, 13),
(10, 14),
(12, 14),
(15, 14),
(17, 14),
(19, 14),
(21, 14),
(33, 14),
(36, 14),
(45, 14),
(47, 14),
(78, 14),
(80, 14),
(82, 14),
(85, 14),
(94, 14),
(97, 14),
(1, 15),
(8, 15),
(21, 15),
(24, 15),
(26, 15),
(29, 15),
(57, 15),
(59, 15),
(67, 15),
(69, 15),
(72, 15),
(75, 15),
(88, 15),
(90, 15),
(10, 16),
(12, 16),
(13, 16),
(17, 16),
(19, 16),
(40, 16),
(45, 16),
(49, 16),
(55, 16),
(59, 16),
(63, 16),
(78, 16),
(79, 16),
(82, 16),
(85, 16),
(87, 16),
(96, 16),
(98, 16),
(100, 16),
(1, 17),
(6, 17),
(25, 17),
(27, 17),
(29, 17),
(33, 17),
(36, 17),
(38, 17),
(40, 17),
(65, 17),
(67, 17),
(69, 17),
(72, 17),
(75, 17),
(76, 17),
(90, 17),
(94, 17),
(12, 18),
(18, 18),
(20, 18),
(42, 18),
(44, 18),
(46, 18),
(49, 18),
(55, 18),
(58, 18),
(60, 18),
(79, 18),
(82, 18),
(87, 18),
(6, 19),
(9, 19),
(22, 19),
(25, 19),
(27, 19),
(29, 19),
(31, 19),
(36, 19),
(37, 19),
(67, 19),
(69, 19),
(71, 19),
(73, 19),
(90, 19),
(92, 19),
(2, 20),
(3, 20),
(12, 20),
(14, 20),
(16, 20),
(18, 20),
(20, 20),
(44, 20),
(46, 20),
(49, 20),
(52, 20),
(56, 20),
(58, 20),
(63, 20),
(65, 20),
(76, 20),
(78, 20),
(81, 20),
(82, 20),
(85, 20),
(94, 20),
(96, 20),
(98, 20),
(100, 20),
(9, 21),
(11, 21),
(29, 21),
(31, 21),
(34, 21),
(36, 21),
(39, 21),
(42, 21),
(60, 21),
(62, 21),
(88, 21),
(90, 21),
(7, 22),
(22, 22),
(26, 22),
(46, 22),
(52, 22),
(55, 22),
(56, 22),
(65, 22),
(67, 22),
(69, 22),
(71, 22),
(73, 22),
(76, 22),
(92, 22),
(94, 22),
(96, 22),
(99, 22),
(2, 23),
(4, 23),
(7, 23),
(8, 23),
(11, 23),
(13, 23),
(32, 23),
(34, 23),
(59, 23),
(62, 23),
(80, 23),
(82, 23),
(85, 23),
(88, 23),
(18, 24),
(20, 24),
(22, 24),
(26, 24),
(29, 24),
(36, 24),
(39, 24),
(41, 24),
(42, 24),
(49, 24),
(52, 24),
(55, 24),
(62, 24),
(64, 24),
(67, 24),
(69, 24),
(72, 24),
(74, 24),
(96, 24),
(99, 24),
(2, 25),
(4, 25),
(8, 25),
(11, 25),
(13, 25),
(32, 25),
(34, 25),
(45, 25),
(48, 25),
(57, 25),
(59, 25),
(90, 25),
(92, 25),
(94, 25),
(16, 26),
(18, 26),
(26, 26),
(29, 26),
(31, 26),
(64, 26),
(67, 26),
(70, 26),
(72, 26),
(74, 26),
(76, 26),
(78, 26),
(80, 26),
(88, 26),
(90, 26),
(5, 27),
(7, 27),
(9, 27),
(11, 27),
(22, 27),
(24, 27),
(34, 27),
(36, 27),
(39, 27),
(41, 27),
(43, 27),
(45, 27),
(48, 27),
(51, 27),
(52, 27),
(54, 27),
(57, 27),
(59, 27),
(62, 27),
(82, 27),
(85, 27),
(98, 27),
(100, 27),
(14, 28),
(16, 28),
(18, 28),
(26, 28),
(28, 28),
(31, 28),
(51, 28),
(65, 28),
(67, 28),
(70, 28),
(72, 28),
(76, 28),
(78, 28),
(80, 28),
(82, 28),
(87, 28),
(88, 28),
(90, 28),
(92, 28),
(96, 28),
(7, 29),
(9, 29),
(20, 29),
(22, 29),
(34, 29),
(36, 29),
(43, 29),
(48, 29),
(58, 29),
(62, 29),
(11, 30),
(14, 30),
(28, 30),
(31, 30),
(52, 30),
(54, 30),
(63, 30),
(65, 30),
(70, 30),
(72, 30),
(74, 30),
(78, 30),
(80, 30),
(82, 30),
(92, 30),
(96, 30),
(7, 31),
(9, 31),
(34, 31),
(37, 31),
(39, 31),
(45, 31),
(55, 31),
(57, 31),
(58, 31),
(82, 31),
(85, 31),
(89, 31),
(14, 32),
(16, 32),
(20, 32),
(24, 32),
(25, 32),
(27, 32),
(29, 32),
(31, 32),
(32, 32),
(48, 32),
(52, 32),
(59, 32),
(63, 32),
(67, 32),
(70, 32),
(71, 32),
(74, 32),
(77, 32),
(95, 32),
(98, 32),
(9, 33),
(11, 33),
(18, 33),
(35, 33),
(37, 33),
(39, 33),
(45, 33),
(47, 33),
(53, 33),
(55, 33),
(79, 33),
(89, 33),
(92, 33),
(5, 34),
(7, 34),
(13, 34),
(16, 34),
(20, 34),
(22, 34),
(25, 34),
(27, 34),
(41, 34),
(43, 34),
(57, 34),
(59, 34),
(61, 34),
(65, 34),
(68, 34),
(75, 34),
(77, 34),
(79, 34),
(82, 34),
(2, 35),
(4, 35),
(10, 35),
(31, 35),
(35, 35),
(47, 35),
(49, 35),
(51, 35),
(53, 35),
(55, 35),
(71, 35),
(73, 35),
(87, 35),
(89, 35),
(92, 35),
(13, 36),
(17, 36),
(20, 36),
(22, 36),
(37, 36),
(39, 36),
(41, 36),
(43, 36),
(45, 36),
(61, 36),
(63, 36),
(65, 36),
(68, 36),
(69, 36),
(82, 36),
(85, 36),
(94, 36),
(95, 36),
(98, 36),
(2, 37),
(4, 37),
(6, 37),
(10, 37),
(24, 37),
(26, 37),
(29, 37),
(46, 37),
(49, 37),
(53, 37),
(55, 37),
(79, 37),
(89, 37),
(91, 37),
(98, 37),
(100, 37),
(20, 38),
(22, 38),
(31, 38),
(34, 38),
(56, 38),
(63, 38),
(65, 38),
(73, 38),
(75, 38),
(77, 38),
(79, 38),
(82, 38),
(86, 38),
(2, 39),
(4, 39),
(6, 39),
(8, 39),
(10, 39),
(12, 39),
(14, 39),
(17, 39),
(24, 39),
(26, 39),
(28, 39),
(31, 39),
(34, 39),
(39, 39),
(41, 39),
(44, 39),
(46, 39),
(48, 39),
(67, 39),
(69, 39),
(91, 39),
(95, 39),
(98, 39),
(100, 39),
(20, 40),
(22, 40),
(28, 40),
(53, 40),
(56, 40),
(82, 40),
(86, 40),
(4, 41),
(8, 41),
(10, 41),
(14, 41),
(17, 41),
(24, 41),
(26, 41),
(31, 41),
(33, 41),
(37, 41),
(39, 41),
(46, 41),
(48, 41),
(50, 41),
(51, 41),
(59, 41),
(63, 41),
(75, 41),
(78, 41),
(79, 41),
(89, 41),
(97, 41),
(100, 41),
(41, 42),
(43, 42),
(66, 42),
(69, 42),
(71, 42),
(84, 42),
(86, 42),
(89, 42),
(90, 42),
(93, 42),
(94, 42),
(4, 43),
(8, 43),
(12, 43),
(14, 43),
(17, 43),
(20, 43),
(23, 43),
(24, 43),
(26, 43),
(28, 43),
(33, 43),
(37, 43),
(39, 43),
(53, 43),
(57, 43),
(59, 43),
(63, 43),
(69, 43),
(97, 43),
(100, 43),
(2, 44),
(31, 44),
(43, 44),
(46, 44),
(48, 44),
(53, 44),
(55, 44),
(64, 44),
(66, 44),
(72, 44),
(76, 44),
(80, 44),
(82, 44),
(84, 44),
(94, 44),
(95, 44),
(8, 45),
(12, 45),
(14, 45),
(17, 45),
(21, 45),
(24, 45),
(26, 45),
(28, 45),
(48, 45),
(50, 45),
(85, 45),
(87, 45),
(90, 45),
(92, 45),
(32, 46),
(34, 46),
(37, 46),
(39, 46),
(41, 46),
(44, 46),
(46, 46),
(50, 46),
(53, 46),
(55, 46),
(57, 46),
(62, 46),
(64, 46),
(70, 46),
(72, 46),
(98, 46),
(100, 46),
(2, 47),
(4, 47),
(6, 47),
(8, 47),
(10, 47),
(13, 47),
(15, 47),
(17, 47),
(19, 47),
(24, 47),
(26, 47),
(39, 47),
(47, 47),
(66, 47),
(68, 47),
(78, 47),
(80, 47),
(82, 47),
(85, 47),
(94, 47),
(96, 47),
(27, 48),
(30, 48),
(32, 48),
(34, 48),
(36, 48),
(41, 48),
(44, 48),
(57, 48),
(59, 48),
(73, 48),
(76, 48),
(87, 48),
(92, 48),
(98, 48),
(100, 48),
(8, 49),
(10, 49),
(13, 49),
(15, 49),
(47, 49),
(49, 49),
(51, 49),
(54, 49),
(62, 49),
(64, 49),
(78, 49),
(80, 49),
(82, 49),
(83, 49),
(94, 49),
(96, 49),
(16, 50),
(18, 50),
(23, 50),
(25, 50),
(27, 50),
(34, 50),
(36, 50),
(38, 50),
(41, 50),
(59, 50),
(61, 50),
(68, 50),
(2, 51),
(4, 51),
(6, 51),
(8, 51),
(10, 51),
(12, 51),
(30, 51),
(32, 51),
(44, 51),
(49, 51),
(51, 51),
(54, 51),
(57, 51),
(64, 51),
(66, 51),
(73, 51),
(78, 51),
(80, 51),
(83, 51),
(86, 51),
(90, 51),
(92, 51),
(94, 51),
(96, 51),
(98, 51),
(100, 51),
(14, 52),
(16, 52),
(18, 52),
(21, 52),
(23, 52),
(25, 52),
(27, 52),
(34, 52),
(38, 52),
(40, 52),
(41, 52),
(44, 52),
(47, 52),
(61, 52),
(62, 52),
(70, 52),
(72, 52),
(2, 53),
(4, 53),
(8, 53),
(10, 53),
(30, 53),
(32, 53),
(49, 53),
(52, 53),
(57, 53),
(61, 53),
(79, 53),
(81, 53),
(83, 53),
(86, 53),
(98, 53),
(100, 53),
(16, 54),
(20, 54),
(23, 54),
(40, 54),
(42, 54),
(54, 54),
(66, 54),
(68, 54),
(70, 54),
(72, 54),
(77, 54),
(79, 54),
(93, 54),
(95, 54),
(4, 55),
(9, 55),
(12, 55),
(14, 55),
(26, 55),
(28, 55),
(30, 55),
(33, 55),
(39, 55),
(44, 55),
(49, 55),
(60, 55),
(62, 55),
(73, 55),
(75, 55),
(87, 55),
(90, 55),
(95, 55),
(96, 55),
(16, 56),
(18, 56),
(20, 56),
(24, 56),
(50, 56),
(52, 56),
(54, 56),
(57, 56),
(64, 56),
(68, 56),
(77, 56),
(79, 56),
(84, 56),
(2, 57),
(4, 57),
(9, 57),
(12, 57),
(28, 57),
(33, 57),
(36, 57),
(39, 57),
(70, 57),
(73, 57),
(87, 57),
(89, 57),
(97, 57),
(100, 57),
(7, 58),
(13, 58),
(16, 58),
(22, 58),
(41, 58),
(42, 58),
(48, 58),
(50, 58),
(52, 58),
(57, 58),
(60, 58),
(75, 58),
(77, 58),
(2, 59),
(5, 59),
(24, 59),
(26, 59),
(28, 59),
(32, 59),
(34, 59),
(54, 59),
(56, 59),
(64, 59),
(66, 59),
(69, 59),
(71, 59),
(73, 59),
(79, 59),
(81, 59),
(89, 59),
(91, 59),
(95, 59),
(97, 59),
(7, 60),
(8, 60),
(11, 60),
(13, 60),
(16, 60),
(18, 60),
(19, 60),
(22, 60),
(36, 60),
(39, 60),
(42, 60),
(45, 60),
(48, 60),
(84, 60),
(86, 60),
(19, 61),
(28, 61),
(32, 61),
(50, 61),
(52, 61),
(54, 61),
(56, 61),
(58, 61),
(61, 61),
(62, 61),
(64, 61),
(66, 61),
(69, 61),
(71, 61),
(75, 61),
(79, 61),
(91, 61),
(93, 61),
(95, 61),
(97, 61),
(99, 61),
(1, 62),
(5, 62),
(7, 62),
(8, 62),
(11, 62),
(22, 62),
(26, 62),
(34, 62),
(37, 62),
(44, 62),
(45, 62),
(47, 62),
(67, 62),
(69, 62),
(81, 62),
(84, 62),
(87, 62),
(13, 63),
(18, 63),
(39, 63),
(41, 63),
(50, 63),
(53, 63),
(58, 63),
(60, 63),
(62, 63),
(64, 63),
(72, 63),
(75, 63),
(78, 63),
(81, 63),
(89, 63),
(93, 63),
(95, 63),
(97, 63),
(99, 63),
(100, 63),
(1, 64),
(4, 64),
(5, 64),
(8, 64),
(10, 64),
(24, 64),
(26, 64),
(29, 64),
(31, 64),
(33, 64),
(37, 64),
(67, 64),
(38, 65),
(41, 65),
(47, 65),
(50, 65),
(53, 65),
(56, 65),
(58, 65),
(60, 65),
(62, 65),
(64, 65),
(68, 65),
(70, 65),
(74, 65),
(76, 65),
(87, 65),
(89, 65),
(96, 65),
(100, 65),
(8, 66),
(10, 66),
(12, 66),
(14, 66),
(16, 66),
(18, 66),
(20, 66),
(22, 66),
(26, 66),
(29, 66),
(31, 66),
(33, 66),
(36, 66),
(79, 66),
(81, 66),
(84, 66),
(86, 66),
(90, 66),
(93, 66),
(1, 67),
(4, 67),
(6, 67),
(8, 67),
(38, 67),
(40, 67),
(43, 67),
(44, 67),
(46, 67),
(50, 67),
(52, 67),
(56, 67),
(58, 67),
(70, 67),
(72, 67),
(10, 68),
(12, 68),
(14, 68),
(16, 68),
(18, 68),
(20, 68),
(24, 68),
(28, 68),
(34, 68),
(36, 68),
(61, 68),
(64, 68),
(66, 68),
(76, 68),
(79, 68),
(81, 68),
(82, 68),
(86, 68),
(88, 68),
(90, 68),
(93, 68),
(96, 68),
(99, 68),
(1, 69),
(6, 69),
(40, 69),
(43, 69),
(52, 69),
(53, 69),
(55, 69),
(58, 69),
(61, 69),
(71, 69),
(74, 69),
(18, 70),
(20, 70),
(25, 70),
(28, 70),
(30, 70),
(32, 70),
(36, 70),
(38, 70),
(45, 70),
(51, 70),
(66, 70),
(68, 70),
(75, 70),
(77, 70),
(81, 70),
(83, 70),
(85, 70),
(88, 70),
(91, 70),
(98, 70),
(99, 70),
(3, 71),
(7, 71),
(10, 71),
(13, 71),
(23, 71),
(41, 71),
(43, 71),
(63, 71),
(69, 71),
(71, 71),
(93, 71),
(95, 71),
(99, 71),
(20, 72),
(23, 72),
(32, 72),
(34, 72),
(55, 72),
(57, 72),
(59, 72),
(61, 72),
(77, 72),
(80, 72),
(85, 72),
(87, 72),
(89, 72),
(1, 73),
(3, 73),
(6, 73),
(7, 73),
(11, 73),
(13, 73),
(28, 73),
(30, 73),
(43, 73),
(45, 73),
(47, 73),
(51, 73),
(63, 73),
(65, 73),
(67, 73),
(69, 73),
(74, 73),
(91, 73),
(93, 73),
(95, 73),
(98, 73),
(7, 74),
(9, 74),
(16, 74),
(19, 74),
(34, 74),
(36, 74),
(57, 74),
(59, 74),
(83, 74),
(87, 74),
(98, 74),
(100, 74),
(2, 75),
(3, 75),
(11, 75),
(13, 75),
(32, 75),
(34, 75),
(41, 75),
(43, 75),
(45, 75),
(62, 75),
(65, 75),
(67, 75),
(69, 75),
(71, 75),
(74, 75),
(79, 75),
(82, 75),
(91, 75),
(93, 75),
(6, 76),
(9, 76),
(28, 76),
(30, 76),
(36, 76),
(38, 76),
(47, 76),
(51, 76),
(55, 76),
(96, 76),
(98, 76),
(10, 77),
(13, 77),
(15, 77),
(20, 77),
(25, 77),
(27, 77),
(32, 77),
(34, 77),
(36, 77),
(39, 77),
(42, 77),
(53, 77),
(55, 77),
(56, 77),
(58, 77),
(60, 77),
(69, 77),
(71, 77),
(75, 77),
(77, 77),
(79, 77),
(82, 77),
(89, 77),
(1, 78),
(3, 78),
(6, 78),
(8, 78),
(13, 78),
(45, 78),
(49, 78),
(51, 78),
(62, 78),
(65, 78),
(93, 78),
(23, 79),
(25, 79),
(30, 79),
(32, 79),
(33, 79),
(67, 79),
(69, 79),
(71, 79),
(73, 79),
(75, 79),
(77, 79),
(79, 79),
(81, 79),
(89, 79),
(91, 79),
(96, 79),
(98, 79),
(1, 80),
(4, 80),
(6, 80),
(8, 80),
(17, 80),
(27, 80),
(29, 80),
(36, 80),
(39, 80),
(42, 80),
(44, 80),
(46, 80),
(49, 80),
(53, 80),
(56, 80),
(58, 80),
(83, 80),
(86, 80),
(10, 81),
(12, 81),
(20, 81),
(21, 81),
(25, 81),
(42, 81),
(50, 81),
(53, 81),
(60, 81),
(63, 81),
(69, 81),
(71, 81),
(73, 81),
(75, 81),
(91, 81),
(93, 81),
(96, 81),
(98, 81),
(15, 82),
(17, 82),
(19, 82),
(29, 82),
(32, 82),
(34, 82),
(36, 82),
(44, 82),
(46, 82),
(65, 82),
(67, 82),
(77, 82),
(79, 82),
(81, 82),
(83, 82),
(2, 83),
(4, 83),
(8, 83),
(10, 83),
(21, 83),
(25, 83),
(38, 83),
(40, 83),
(53, 83),
(56, 83),
(58, 83),
(68, 83),
(72, 83),
(76, 83),
(86, 83),
(89, 83),
(93, 83),
(96, 83),
(28, 84),
(34, 84),
(36, 84),
(44, 84),
(47, 84),
(50, 84),
(60, 84),
(63, 84),
(68, 84),
(89, 84),
(91, 84),
(8, 85),
(10, 85),
(12, 85),
(17, 85),
(20, 85),
(22, 85),
(25, 85),
(28, 85),
(30, 85),
(32, 85),
(40, 85),
(42, 85),
(53, 85),
(56, 85),
(58, 85),
(76, 85),
(78, 85),
(86, 85),
(93, 85),
(95, 85),
(98, 85),
(100, 85),
(10, 86),
(36, 86),
(38, 86),
(47, 86),
(51, 86),
(65, 86),
(67, 86),
(70, 86),
(72, 86),
(89, 86),
(92, 86),
(2, 87),
(4, 87),
(12, 87),
(14, 87),
(18, 87),
(20, 87),
(26, 87),
(28, 87),
(30, 87),
(32, 87),
(39, 87),
(42, 87),
(57, 87),
(60, 87),
(63, 87),
(74, 87),
(76, 87),
(78, 87),
(84, 87),
(87, 87),
(95, 87),
(97, 87),
(6, 88),
(8, 88),
(36, 88),
(37, 88),
(44, 88),
(46, 88),
(51, 88),
(55, 88),
(61, 88),
(63, 88),
(64, 88),
(67, 88),
(70, 88),
(80, 88),
(82, 88),
(92, 88),
(94, 88),
(2, 89),
(4, 89),
(10, 89),
(12, 89),
(14, 89),
(16, 89),
(22, 89),
(26, 89),
(28, 89),
(30, 89),
(32, 89),
(34, 89),
(48, 89),
(74, 89),
(76, 89),
(87, 89),
(89, 89),
(39, 90),
(41, 90),
(44, 90),
(46, 90),
(55, 90),
(57, 90),
(59, 90),
(61, 90),
(72, 90),
(82, 90),
(84, 90),
(90, 90),
(92, 90),
(94, 90),
(97, 90),
(10, 91),
(12, 91),
(16, 91),
(18, 91),
(21, 91),
(32, 91),
(34, 91),
(48, 91),
(50, 91),
(53, 91),
(63, 91),
(65, 91),
(66, 91),
(70, 91),
(72, 91),
(74, 91),
(77, 91),
(4, 92),
(8, 92),
(21, 92),
(23, 92),
(24, 92),
(28, 92),
(30, 92),
(44, 92),
(46, 92),
(52, 92),
(53, 92),
(55, 92),
(57, 92),
(59, 92),
(84, 92),
(86, 92),
(88, 92),
(90, 92),
(92, 92),
(100, 92),
(9, 93),
(12, 93),
(14, 93),
(15, 93),
(17, 93),
(20, 93),
(32, 93),
(34, 93),
(37, 93),
(38, 93),
(41, 93),
(43, 93),
(67, 93),
(70, 93),
(71, 93),
(80, 93),
(82, 93),
(4, 94),
(7, 94),
(25, 94),
(28, 94),
(50, 94),
(52, 94),
(55, 94),
(57, 94),
(61, 94),
(63, 94),
(76, 94),
(77, 94),
(92, 94),
(93, 94),
(95, 94),
(97, 94),
(99, 94),
(9, 95),
(11, 95),
(13, 95),
(15, 95),
(20, 95),
(23, 95),
(34, 95),
(41, 95),
(43, 95),
(46, 95),
(48, 95),
(65, 95),
(67, 95),
(72, 95),
(74, 95),
(78, 95),
(82, 95),
(84, 95),
(86, 95),
(88, 95),
(90, 95),
(4, 96),
(7, 96),
(17, 96),
(19, 96),
(25, 96),
(31, 96),
(37, 96),
(39, 96),
(59, 96),
(61, 96),
(63, 96),
(67, 96),
(68, 96),
(93, 96),
(95, 96),
(97, 96),
(99, 96),
(21, 97),
(24, 97),
(33, 97),
(35, 97),
(44, 97),
(46, 97),
(48, 97),
(50, 97),
(52, 97),
(54, 97),
(56, 97),
(70, 97),
(72, 97),
(80, 97),
(84, 97),
(88, 97),
(90, 97),
(2, 98),
(4, 98),
(8, 98),
(9, 98),
(11, 98),
(26, 98),
(29, 98),
(39, 98),
(41, 98),
(59, 98),
(61, 98),
(64, 98),
(67, 98),
(76, 98),
(78, 98),
(91, 98),
(93, 98),
(95, 98),
(97, 98),
(16, 99),
(19, 99),
(21, 99),
(31, 99),
(33, 99),
(35, 99),
(37, 99),
(46, 99),
(50, 99),
(80, 99),
(82, 99),
(2, 100),
(9, 100),
(13, 100),
(16, 100),
(24, 100),
(26, 100),
(29, 100),
(31, 100),
(41, 100),
(44, 100),
(46, 100),
(52, 100),
(54, 100),
(56, 100),
(59, 100),
(61, 100),
(64, 100),
(67, 100),
(70, 100),
(73, 100),
(74, 100),
(76, 100),
(80, 100),
(86, 100),
(88, 100),
(91, 100),
(93, 100),
(95, 100),
(97, 100),
(99, 100)] | 0.202404 | 0.651819 |
from collections import namedtuple
from random import choice
from string import ascii_letters
import pytest
from azure_databricks_api.exceptions import ResourceAlreadyExists, IoError, ResourceDoesNotExist, InvalidParameterValue
from tests.utils import create_client
client = create_client()
DBFS_TEMP_DIR = '/tmp'
SMALL_DBFS = '{temp_dir}/small.txt'.format(temp_dir=DBFS_TEMP_DIR)
LARGE_DBFS = '{temp_dir}/large.txt'.format(temp_dir=DBFS_TEMP_DIR)
DBFS_MOVED = '{temp_dir}/small-moved.txt'.format(temp_dir=DBFS_TEMP_DIR)
@pytest.fixture(scope="module")
def temp_files(tmp_path_factory):
temp_path = tmp_path_factory.mktemp('./tmp')
large_file_path = temp_path.with_name("large.txt")
small_file_path = temp_path.with_name("small.txt")
small_file_path.write_text("This is a test file used for DBFS Testing")
large_file_path.write_text(str([choice(ascii_letters) for _ in range(1048576)]))
FileList = namedtuple("FileList", ['small', 'large', "dir"])
return FileList(small=small_file_path, large=large_file_path, dir=temp_path)
def test_mkdir():
client.dbfs.mkdirs(DBFS_TEMP_DIR)
assert DBFS_TEMP_DIR in [file.path for file in client.dbfs.list('/')]
def test_upload_file_to_dbfs(temp_files):
client.dbfs.upload_file_by_path(file_path=temp_files.small, dbfs_path=SMALL_DBFS)
assert SMALL_DBFS in [file.path for file in client.dbfs.list('/tmp')]
def test_upload_file_not_exists(temp_files):
with pytest.raises(FileNotFoundError):
client.dbfs.upload_file_by_path(file_path="THISFILESHOULDNOTEXISTSANYWHERE.txt", dbfs_path=SMALL_DBFS)
def test_upload_file_dbfs_exists(temp_files):
with pytest.raises(ResourceAlreadyExists):
client.dbfs.upload_file_by_path(file_path=temp_files.small, dbfs_path=SMALL_DBFS)
def test_upload_files_raises_must_be_absolute(temp_files):
with pytest.raises(InvalidParameterValue):
client.dbfs.upload_file_by_path(file_path=temp_files.small, dbfs_path='raiseanerror.txt', overwrite=True)
def test_download_files_raises_must_be_absolute(temp_files):
with pytest.raises(InvalidParameterValue):
client.dbfs.download_file(local_path="thisisanytestfile.txt", dbfs_path='raiseanerror.txt')
def test_download_file(temp_files):
new_small_path = temp_files.dir.with_name("small_2.txt")
client.dbfs.download_file(local_path=new_small_path, dbfs_path=SMALL_DBFS)
assert new_small_path.read_bytes() == temp_files.small.read_bytes()
def test_download_dbfs_file_not_found(temp_files):
with pytest.raises(ResourceDoesNotExist):
new_large_path = temp_files.dir.with_name("large_2.txt")
client.dbfs.download_file(dbfs_path=LARGE_DBFS, local_path=new_large_path)
def test_download_local_file_already_exists_no_overwrite(temp_files):
new_small_path = temp_files.dir.with_name("small_2.txt")
with pytest.raises(FileExistsError):
client.dbfs.download_file(local_path=new_small_path, dbfs_path=SMALL_DBFS, overwrite=False)
def test_download_overwrite_local_file(temp_files):
new_small_path = temp_files.dir.with_name("small_2.txt")
client.dbfs.download_file(local_path=new_small_path, dbfs_path=SMALL_DBFS, overwrite=True)
def test_upload_large_file(temp_files):
client.dbfs.upload_file_by_path(file_path=temp_files.large, dbfs_path=LARGE_DBFS)
def test_upload_existing_without_overwrite(temp_files):
with pytest.raises(ResourceAlreadyExists):
client.dbfs.upload_file_by_path(file_path=temp_files.small, dbfs_path=SMALL_DBFS, overwrite=False)
def test_list():
file_list = client.dbfs.list(DBFS_TEMP_DIR)
assert SMALL_DBFS in [file.path for file in file_list]
def test_list_not_exists():
with pytest.raises(ResourceDoesNotExist):
client.dbfs.list("/thisfoldershouldneverexist")
def test_get_status_is_dir():
status = client.dbfs.get_status(DBFS_TEMP_DIR)
assert status.is_dir
def test_get_status_is_file():
status = client.dbfs.get_status(SMALL_DBFS)
assert not status.is_dir
def test_get_status_resource_not_found():
with pytest.raises(ResourceDoesNotExist):
client.dbfs.get_status("/THISPATHSHOULDNOTEXISTANYWHERE")
def test_get_status_must_be_absolute():
with pytest.raises(InvalidParameterValue):
client.dbfs.get_status("THISPATHSHOULDNOTEXISTANYWHERE")
def test_move():
client.dbfs.move(SMALL_DBFS, DBFS_MOVED)
def test_move_file_not_found():
with pytest.raises(ResourceDoesNotExist):
client.dbfs.move(SMALL_DBFS, DBFS_MOVED)
def test_move_already_exists():
with pytest.raises(ResourceAlreadyExists):
client.dbfs.move(LARGE_DBFS, DBFS_MOVED)
def test_nonrecursive_delete():
client.dbfs.delete(SMALL_DBFS, recursive=False)
def test_nonrecursive_delete_fails():
with pytest.raises(IoError):
client.dbfs.delete(DBFS_TEMP_DIR, recursive=False, not_exists_ok=False)
def test_recursive_delete():
client.dbfs.delete(DBFS_TEMP_DIR, recursive=True, not_exists_ok=False)
assert DBFS_TEMP_DIR not in [file.path for file in client.dbfs.list('/')] | tests/test_dbfs.py | from collections import namedtuple
from random import choice
from string import ascii_letters
import pytest
from azure_databricks_api.exceptions import ResourceAlreadyExists, IoError, ResourceDoesNotExist, InvalidParameterValue
from tests.utils import create_client
client = create_client()
DBFS_TEMP_DIR = '/tmp'
SMALL_DBFS = '{temp_dir}/small.txt'.format(temp_dir=DBFS_TEMP_DIR)
LARGE_DBFS = '{temp_dir}/large.txt'.format(temp_dir=DBFS_TEMP_DIR)
DBFS_MOVED = '{temp_dir}/small-moved.txt'.format(temp_dir=DBFS_TEMP_DIR)
@pytest.fixture(scope="module")
def temp_files(tmp_path_factory):
temp_path = tmp_path_factory.mktemp('./tmp')
large_file_path = temp_path.with_name("large.txt")
small_file_path = temp_path.with_name("small.txt")
small_file_path.write_text("This is a test file used for DBFS Testing")
large_file_path.write_text(str([choice(ascii_letters) for _ in range(1048576)]))
FileList = namedtuple("FileList", ['small', 'large', "dir"])
return FileList(small=small_file_path, large=large_file_path, dir=temp_path)
def test_mkdir():
client.dbfs.mkdirs(DBFS_TEMP_DIR)
assert DBFS_TEMP_DIR in [file.path for file in client.dbfs.list('/')]
def test_upload_file_to_dbfs(temp_files):
client.dbfs.upload_file_by_path(file_path=temp_files.small, dbfs_path=SMALL_DBFS)
assert SMALL_DBFS in [file.path for file in client.dbfs.list('/tmp')]
def test_upload_file_not_exists(temp_files):
with pytest.raises(FileNotFoundError):
client.dbfs.upload_file_by_path(file_path="THISFILESHOULDNOTEXISTSANYWHERE.txt", dbfs_path=SMALL_DBFS)
def test_upload_file_dbfs_exists(temp_files):
with pytest.raises(ResourceAlreadyExists):
client.dbfs.upload_file_by_path(file_path=temp_files.small, dbfs_path=SMALL_DBFS)
def test_upload_files_raises_must_be_absolute(temp_files):
with pytest.raises(InvalidParameterValue):
client.dbfs.upload_file_by_path(file_path=temp_files.small, dbfs_path='raiseanerror.txt', overwrite=True)
def test_download_files_raises_must_be_absolute(temp_files):
with pytest.raises(InvalidParameterValue):
client.dbfs.download_file(local_path="thisisanytestfile.txt", dbfs_path='raiseanerror.txt')
def test_download_file(temp_files):
new_small_path = temp_files.dir.with_name("small_2.txt")
client.dbfs.download_file(local_path=new_small_path, dbfs_path=SMALL_DBFS)
assert new_small_path.read_bytes() == temp_files.small.read_bytes()
def test_download_dbfs_file_not_found(temp_files):
with pytest.raises(ResourceDoesNotExist):
new_large_path = temp_files.dir.with_name("large_2.txt")
client.dbfs.download_file(dbfs_path=LARGE_DBFS, local_path=new_large_path)
def test_download_local_file_already_exists_no_overwrite(temp_files):
new_small_path = temp_files.dir.with_name("small_2.txt")
with pytest.raises(FileExistsError):
client.dbfs.download_file(local_path=new_small_path, dbfs_path=SMALL_DBFS, overwrite=False)
def test_download_overwrite_local_file(temp_files):
new_small_path = temp_files.dir.with_name("small_2.txt")
client.dbfs.download_file(local_path=new_small_path, dbfs_path=SMALL_DBFS, overwrite=True)
def test_upload_large_file(temp_files):
client.dbfs.upload_file_by_path(file_path=temp_files.large, dbfs_path=LARGE_DBFS)
def test_upload_existing_without_overwrite(temp_files):
with pytest.raises(ResourceAlreadyExists):
client.dbfs.upload_file_by_path(file_path=temp_files.small, dbfs_path=SMALL_DBFS, overwrite=False)
def test_list():
file_list = client.dbfs.list(DBFS_TEMP_DIR)
assert SMALL_DBFS in [file.path for file in file_list]
def test_list_not_exists():
with pytest.raises(ResourceDoesNotExist):
client.dbfs.list("/thisfoldershouldneverexist")
def test_get_status_is_dir():
status = client.dbfs.get_status(DBFS_TEMP_DIR)
assert status.is_dir
def test_get_status_is_file():
status = client.dbfs.get_status(SMALL_DBFS)
assert not status.is_dir
def test_get_status_resource_not_found():
with pytest.raises(ResourceDoesNotExist):
client.dbfs.get_status("/THISPATHSHOULDNOTEXISTANYWHERE")
def test_get_status_must_be_absolute():
with pytest.raises(InvalidParameterValue):
client.dbfs.get_status("THISPATHSHOULDNOTEXISTANYWHERE")
def test_move():
client.dbfs.move(SMALL_DBFS, DBFS_MOVED)
def test_move_file_not_found():
with pytest.raises(ResourceDoesNotExist):
client.dbfs.move(SMALL_DBFS, DBFS_MOVED)
def test_move_already_exists():
with pytest.raises(ResourceAlreadyExists):
client.dbfs.move(LARGE_DBFS, DBFS_MOVED)
def test_nonrecursive_delete():
client.dbfs.delete(SMALL_DBFS, recursive=False)
def test_nonrecursive_delete_fails():
with pytest.raises(IoError):
client.dbfs.delete(DBFS_TEMP_DIR, recursive=False, not_exists_ok=False)
def test_recursive_delete():
client.dbfs.delete(DBFS_TEMP_DIR, recursive=True, not_exists_ok=False)
assert DBFS_TEMP_DIR not in [file.path for file in client.dbfs.list('/')] | 0.391871 | 0.21794 |
from unittest import TestCase
import numpy as np
import dnn_misc
import numpy.testing as test
class TestRelu(TestCase):
def test_forward(self):
# check_relu.forward
np.random.seed(123)
# example data
X = np.random.normal(0, 1, (5, 3))
check_relu = dnn_misc.relu()
hat_X = check_relu.forward(X)
ground_hat_X = np.array([[0., 0.99734545, 0.2829785],
[0., 0., 1.65143654],
[0., 0., 1.26593626],
[0., 0., 0.],
[1.49138963, 0., 0.]])
if (hat_X.shape[0] != 5) or (hat_X.shape[1] != 3):
print('Wrong output dimension of relu.forward')
else:
max_relative_diff = np.amax(np.abs(ground_hat_X - hat_X) / (ground_hat_X + 1e-8))
print('max_diff_output: ' + str(max_relative_diff))
if max_relative_diff >= 1e-7:
print('relu.forward might be wrong')
else:
print('relu.forward should be correct')
print('##########################')
# check_relu.backward
grad_hat_X = np.random.normal(0, 1, (5, 3))
grad_X = check_relu.backward(X, grad_hat_X)
ground_grad_X = np.array([[-0., 0.92746243, -0.17363568],
[0., 0., -0.87953634],
[0., -0., -1.72766949],
[-0., 0., 0.],
[-0.01183049, 0., 0.]])
if (grad_X.shape[0] != 5) or (grad_X.shape[1] != 3):
print('Wrong output dimension of relu.backward')
else:
max_relative_diff_X = np.amax(np.abs(ground_grad_X - grad_X) / (ground_grad_X + 1e-8))
print('max_diff_grad_X: ' + str(max_relative_diff_X))
if (max_relative_diff_X >= 1e-7):
print('relu.backward might be wrong')
else:
print('relu.backward should be correct')
print('##########################')
def test_backward(self):
self.fail() | test_relu.py | from unittest import TestCase
import numpy as np
import dnn_misc
import numpy.testing as test
class TestRelu(TestCase):
def test_forward(self):
# check_relu.forward
np.random.seed(123)
# example data
X = np.random.normal(0, 1, (5, 3))
check_relu = dnn_misc.relu()
hat_X = check_relu.forward(X)
ground_hat_X = np.array([[0., 0.99734545, 0.2829785],
[0., 0., 1.65143654],
[0., 0., 1.26593626],
[0., 0., 0.],
[1.49138963, 0., 0.]])
if (hat_X.shape[0] != 5) or (hat_X.shape[1] != 3):
print('Wrong output dimension of relu.forward')
else:
max_relative_diff = np.amax(np.abs(ground_hat_X - hat_X) / (ground_hat_X + 1e-8))
print('max_diff_output: ' + str(max_relative_diff))
if max_relative_diff >= 1e-7:
print('relu.forward might be wrong')
else:
print('relu.forward should be correct')
print('##########################')
# check_relu.backward
grad_hat_X = np.random.normal(0, 1, (5, 3))
grad_X = check_relu.backward(X, grad_hat_X)
ground_grad_X = np.array([[-0., 0.92746243, -0.17363568],
[0., 0., -0.87953634],
[0., -0., -1.72766949],
[-0., 0., 0.],
[-0.01183049, 0., 0.]])
if (grad_X.shape[0] != 5) or (grad_X.shape[1] != 3):
print('Wrong output dimension of relu.backward')
else:
max_relative_diff_X = np.amax(np.abs(ground_grad_X - grad_X) / (ground_grad_X + 1e-8))
print('max_diff_grad_X: ' + str(max_relative_diff_X))
if (max_relative_diff_X >= 1e-7):
print('relu.backward might be wrong')
else:
print('relu.backward should be correct')
print('##########################')
def test_backward(self):
self.fail() | 0.345105 | 0.553143 |