hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acf3b2879f4673dd05a5e996125ea5100ccfd3d8 | 4,806 | py | Python | blaze/compute/pytables.py | jreback/blaze | 85c39335cac4ef7f2921a7f621bc13525880fc44 | [
"BSD-3-Clause"
] | 1 | 2015-05-17T23:17:12.000Z | 2015-05-17T23:17:12.000Z | blaze/compute/pytables.py | jreback/blaze | 85c39335cac4ef7f2921a7f621bc13525880fc44 | [
"BSD-3-Clause"
] | null | null | null | blaze/compute/pytables.py | jreback/blaze | 85c39335cac4ef7f2921a7f621bc13525880fc44 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import absolute_import, division, print_function
from functools import partial
import numpy as np
import tables as tb
from datashape import Record, from_numpy, datetime_, date_
from blaze.expr import (Selection, Head, Field, Broadcast, Projection,
Symbol, Sort, Reduction, count, symbol, Slice, Expr,
nelements)
from blaze.compatibility import basestring, map
from ..dispatch import dispatch
__all__ = ['drop', 'create_index']
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, discover(getattr(t.cols, col))]
for col in t.colnames])
@dispatch(tb.Column)
def discover(c):
dshape = from_numpy(c.shape, c.dtype)
return {'time64': datetime_, 'time32': date_}.get(c.type,
dshape.subshape[1])
@dispatch(tb.Table)
def drop(t):
t.remove()
@dispatch(tb.Table, basestring)
def create_index(t, column, name=None, **kwargs):
create_index(getattr(t.cols, column), **kwargs)
@dispatch(tb.Table, list)
def create_index(t, columns, name=None, **kwargs):
if not all(map(partial(hasattr, t.cols), columns)):
raise ValueError('table %s does not have all passed in columns %s' %
(t, columns))
for column in columns:
create_index(t, column, **kwargs)
@dispatch(tb.Column)
def create_index(c, optlevel=9, kind='full', name=None, **kwargs):
c.create_index(optlevel=optlevel, kind=kind, **kwargs)
@dispatch(Selection, tb.Table)
def compute_up(expr, data, **kwargs):
predicate = optimize(expr.predicate, data)
assert isinstance(predicate, Broadcast)
s = predicate._scalars[0]
cols = [s[field] for field in s.fields]
expr_str = print_numexpr(cols, predicate._scalar_expr)
return data.read_where(expr_str)
@dispatch(Symbol, tb.Table)
def compute_up(ts, t, **kwargs):
return t
@dispatch(Reduction, (tb.Column, tb.Table))
def compute_up(r, c, **kwargs):
return compute_up(r, c[:])
@dispatch(Projection, tb.Table)
def compute_up(proj, t, **kwargs):
# only options here are
# read the whole thing in and then select
# or
# create an output array that is at most the size of the on disk table and
# fill it will the columns iteratively
# both of these options aren't ideal but pytables has no way to select
# multiple column subsets so pick the one where we can optimize for the best
# case rather than prematurely pessimizing
#
# TODO: benchmark on big tables because i'm not sure exactly what the
# implications here are for memory usage
columns = proj.fields
dtype = np.dtype([(col, t.dtype[col]) for col in columns])
out = np.empty(t.shape, dtype=dtype)
for c in columns:
out[c] = t.col(c)
return out
@dispatch(Field, tb.File)
def compute_up(expr, data, **kwargs):
return data.getNode('/')._v_children[expr._name]
@dispatch(Field, tb.Node)
def compute_up(expr, data, **kwargs):
return data._v_children[expr._name]
@dispatch(Field, tb.Table)
def compute_up(c, t, **kwargs):
return getattr(t.cols, c._name)
@dispatch(count, tb.Column)
def compute_up(r, c, **kwargs):
return len(c)
@dispatch(Head, (tb.Column, tb.Table))
def compute_up(h, t, **kwargs):
return t[:h.n]
@dispatch(Broadcast, tb.Table)
def compute_up(expr, data, **kwargs):
if len(expr._children) != 1:
raise ValueError("Only one child in Broadcast allowed")
s = expr._scalars[0]
cols = [s[field] for field in s.fields]
expr_str = print_numexpr(cols, expr._scalar_expr)
uservars = dict((c, getattr(data.cols, c)) for c in s.fields)
e = tb.Expr(expr_str, uservars=uservars, truediv=True)
return e.eval()
@dispatch(Sort, tb.Table)
def compute_up(s, t, **kwargs):
if isinstance(s.key, Field) and s.key._child.isidentical(s._child):
key = s.key._name
else:
key = s.key
assert hasattr(t.cols, key), 'Table has no column(s) %s' % s.key
result = t.read_sorted(sortby=key, checkCSI=True)
if s.ascending:
return result
return result[::-1]
@dispatch(Slice, tb.Table)
def compute_up(expr, x, **kwargs):
return x[expr.index]
from .numexpr import broadcast_numexpr_collect, print_numexpr
from ..expr import Arithmetic, RealMath, USub, Not
Broadcastable = (Arithmetic, RealMath, Field, Not, USub)
WantToBroadcast = (Arithmetic, RealMath, Not, USub)
@dispatch(Expr, tb.Table)
def optimize(expr, seq):
return broadcast_numexpr_collect(expr, Broadcastable=Broadcastable,
WantToBroadcast=WantToBroadcast)
@dispatch(nelements, tb.Table)
def compute_up(expr, x, **kwargs):
return compute_up.dispatch(type(expr), np.ndarray)(expr, x, **kwargs)
| 28.778443 | 80 | 0.667915 |
acf3b2b3902b57b1c80b85e954ab86f2ed9c2378 | 1,105 | py | Python | Exercicios/Ex063.py | RenanRibeiroDaSilva/Meu-Aprendizado-Python | 280bf2ad132ae0d26255e70b894fa7dbb69a5d01 | [
"MIT"
] | 2 | 2021-05-21T23:17:44.000Z | 2021-05-22T04:34:37.000Z | Exercicios/Ex063.py | RenanRibeiroDaSilva/Meu-Aprendizado-Python | 280bf2ad132ae0d26255e70b894fa7dbb69a5d01 | [
"MIT"
] | null | null | null | Exercicios/Ex063.py | RenanRibeiroDaSilva/Meu-Aprendizado-Python | 280bf2ad132ae0d26255e70b894fa7dbb69a5d01 | [
"MIT"
] | null | null | null | """ Ex - 063 - Escreva um programa que leia um número N inteiro qualquer e mostre na tela os N primeiros
elementos de um Sequência de Fibonacci.
Exemplo:
0 - 1 - 1 - 2 - 3 - 5 - 8..."""
# Como eu fiz
# Cabeçalho:
print(f'{"":=^50}')
print(f'{"> SEQUÊNCIA DE FIBONACCI <":=^50}')
print(f'{"":=^50}')
# Receber dados do usuario:
ele = int(input('Quanto elementos gostaria de visualizar: '))
fn = 0
fn1 = 0
fn2 = 1
cou = 0
while cou < ele:
print('{}'.format(fn), end='')
print(' - ' if cou < ele - 1 else ' Acabou...\n', end='')
fn1 = fn2
fn2 = fn
fn = fn1 + fn2
cou += 1
print(f'{"":-^50}')
print('Fim')
# Como o Guanabara fez
print('-'*30)
print('Sequência de Fibonacci')
print('-'*30)
n = int(input('Quantos termos você quer mostrar? '))
t1 = 0
t2 = 1
print('~'*30)
print('{} -> {}'.format(t1, t2), end='')
cont = 3
while cont <= n:
t3 = t1 + t2
print(' -> {}'.format(t3),end='')
t1 = t2
t2 = t3
cont += 1
print(' -> Fim')
print('-'*30) | 23.510638 | 116 | 0.499548 |
acf3b32f36eed557cc6a32c9cd4de4cdc46e0f56 | 7,164 | py | Python | pytests/backup/opsduringbackup.py | mhocouchbase/testrunner | 10faf6955a905dee9a254daf90352881d4687735 | [
"Apache-2.0"
] | 14 | 2015-02-06T02:47:57.000Z | 2020-03-14T15:06:05.000Z | pytests/backup/opsduringbackup.py | mhocouchbase/testrunner | 10faf6955a905dee9a254daf90352881d4687735 | [
"Apache-2.0"
] | 3 | 2019-02-27T19:29:11.000Z | 2021-06-02T02:14:27.000Z | pytests/backup/opsduringbackup.py | mhocouchbase/testrunner | 10faf6955a905dee9a254daf90352881d4687735 | [
"Apache-2.0"
] | 108 | 2015-03-26T08:58:49.000Z | 2022-03-21T05:21:39.000Z | import time
import gc
from threading import Thread
from backup.backup_base import BackupBaseTest
from couchbase_helper.documentgenerator import BlobGenerator
from membase.api.rest_client import Bucket
class OpsDuringBackupTests(BackupBaseTest):
def setUp(self):
super(OpsDuringBackupTests, self).setUp()
self.backup_items = self.input.param("backup_items", 1000)
def tearDown(self):
super(OpsDuringBackupTests, self).tearDown()
def LoadDuringBackup(self):
"""Backup the items during data loading is running.
We first load a number of items. Then we start backup while loading another amount number of items into
cluster as "noise" during the backup. During verification, we want to make sure that every item before backup
starts can be restored correctly."""
gen_load_backup = BlobGenerator('couchdb', 'couchdb', self.value_size, end=self.backup_items)
self._load_all_buckets(self.master, gen_load_backup, "create", 0, 2, self.item_flag, True, batch_size=20000, pause_secs=5, timeout_secs=180)
#store items before backup starts to kvstores[2]
self._wait_for_stats_all_buckets(self.servers[:self.num_servers])
gen_load = BlobGenerator('mysql', 'mysql-', self.value_size, end=self.num_items)
data_load_thread = Thread(target=self._load_all_buckets,
name="load_data",
args=(self.master, gen_load, "create", 0, 1, 0, True))
#store noise items during backup to kvstores[1]
backup_thread = Thread(target=self.shell.execute_cluster_backup,
name="backup",
args=(self.couchbase_login_info, self.backup_location, self.command_options))
backup_thread.start()
data_load_thread.start()
data_load_thread.join()
backup_thread.join()
#TODO: implement a mechanism to check the backup progress to prevent backup_thread hangs up
self._wait_for_stats_all_buckets(self.servers[:self.num_servers])
kvs_before = {}
for bucket in self.buckets:
kvs_before[bucket.name] = bucket.kvs[2]
self._all_buckets_delete(self.master)
gc.collect()
if self.default_bucket:
default_params = self._create_bucket_params(server=self.master, size=self.bucket_size,
replicas=self.num_replicas)
self.cluster.create_default_bucket(default_params)
self.buckets.append(Bucket(name="default", num_replicas=self.num_replicas, bucket_size=self.bucket_size))
self._create_standard_buckets(self.master, self.standard_buckets)
for bucket in self.buckets:
bucket.kvs[2] = kvs_before[bucket.name]
del kvs_before
gc.collect()
bucket_names = [bucket.name for bucket in self.buckets]
self.shell.restore_backupFile(self.couchbase_login_info, self.backup_location, bucket_names)
for bucket in self.buckets:
del bucket.kvs[1]
self._wait_for_stats_all_buckets(self.servers[:self.num_servers])
self.verify_results(self.master, 2) #do verification only with kvstores[2]
def CreateUpdateDeleteExpireDuringBackup(self):
"""Backup the items during mutation on existing items is running.
We first load amount of items. After that, when we start backup, we begin do mutations on these existing items."""
gen_load = BlobGenerator('mysql', 'mysql-', self.value_size, end=self.num_items)
gen_update = BlobGenerator('mysql', 'mysql-', self.value_size, end=(self.num_items // 2 - 1))
gen_expire = BlobGenerator('mysql', 'mysql-', self.value_size, start=self.num_items // 2, end=(self.num_items * 3 // 4 - 1))
gen_delete = BlobGenerator('mysql', 'mysql-', self.value_size, start=self.num_items * 3 // 4, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0, 1, 0, True, batch_size=20000, pause_secs=5, timeout_secs=180)
self._wait_for_stats_all_buckets(self.servers[:self.num_servers])
mutate_threads = []
if(self.doc_ops is not None):
if("update" in self.doc_ops):
mutate_threads.append(self._async_load_all_buckets(self.master, gen_update, "update", 0, 1, 0, True, batch_size=20000))
if("delete" in self.doc_ops):
mutate_threads.append(self._async_load_all_buckets(self.master, gen_delete, "delete", 0, 1, 0, True, batch_size=20000))
if("expire" in self.doc_ops):
mutate_threads.append(self._async_load_all_buckets(self.master, gen_expire, "update", self.expire_time, 1, 0, True, batch_size=20000))
if("change_password" in self.doc_ops):
old_pass = self.master.rest_password
self.change_password(new_password=self.input.param("new_password", "new_pass"))
if("change_port" in self.doc_ops):
self.change_port(new_port=self.input.param("new_port", "9090"))
try:
first_backup_thread = Thread(target=self.shell.execute_cluster_backup,
name="backup",
args=(self.couchbase_login_info, self.backup_location, self.command_options))
first_backup_thread.start()
first_backup_thread.join()
for t in mutate_threads:
for task in t:
task.result()
kvs_before = {}
for bucket in self.buckets:
kvs_before[bucket.name] = bucket.kvs[1]
self._all_buckets_delete(self.master)
gc.collect()
if self.default_bucket:
default_params=self._create_bucket_params(server=self.master, size=self.bucket_size,
replicas=self.num_replicas)
self.cluster.create_default_bucket(default_params)
self.buckets.append(Bucket(name="default", num_replicas=self.num_replicas, bucket_size=self.bucket_size))
self._create_standard_buckets(self.master, self.standard_buckets)
for bucket in self.buckets:
bucket.kvs[1] = kvs_before[bucket.name]
del kvs_before
gc.collect()
bucket_names = [bucket.name for bucket in self.buckets]
self.shell.restore_backupFile(self.couchbase_login_info, self.backup_location, bucket_names)
time.sleep(self.expire_time) #system sleeps for expired items
self._wait_for_stats_all_buckets(self.servers[:self.num_servers])
#TODO implement verification for this test case
finally:
if self.doc_ops:
if "change_password" in self.doc_ops:
self.change_password(new_password=old_pass)
elif "change_port" in self.doc_ops:
self.change_port(new_port='8091',
current_port=self.input.param("new_port", "9090"))
| 51.913043 | 150 | 0.643076 |
acf3b4905a5ce8274a8829704e94264d16c96818 | 10,616 | py | Python | code/Resources.py | Penultimate-Panacea/MegaTraveller-Referee-Companion | b3f54288e5e105579fa763b1c96372b2fe6416a4 | [
"Unlicense"
] | null | null | null | code/Resources.py | Penultimate-Panacea/MegaTraveller-Referee-Companion | b3f54288e5e105579fa763b1c96372b2fe6416a4 | [
"Unlicense"
] | null | null | null | code/Resources.py | Penultimate-Panacea/MegaTraveller-Referee-Companion | b3f54288e5e105579fa763b1c96372b2fe6416a4 | [
"Unlicense"
] | null | null | null | import diceroller
class Resources:
def __init__(self, planet, seed, orbit, star, orbit_zone):
self.planet = planet
self.dice = diceroller(seed)
self.natural = None
self.processed = None
self.manufactured = None
self.information = None
def generate_processed(self):
significant_processed = [False, False, False]
agroproducts_score = 0
metals_score = 0
nonmetals_score = 0
if self.planet.size_details.core == "Heavy Core":
metals_score += 2
nonmetals_score += 1
elif self.planet.size_details.core == "Molten Core":
agroproducts_score += 1
elif self.planet.size_details.core == "Rocky Body":
agroproducts_score += 1
elif self.planet.size_details.core == "Icy Body":
metals_score -= 1
nonmetals_score -= 1
else:
print("INVALID CORE TYPE")
if 4 <= self.planet.uwp[2] <= 9:
agroproducts_score += 2
nonmetals_score += 1
elif self.planet.uwp[2] == 0 or 1 or 2 or 3 or 10:
metals_score += 1
nonmetals_score += 1
if 0 <= self.planet.uwp[4] <= 4:
agroproducts_score += 1
metals_score -= 1
elif self.planet.uwp[4] > 4:
agroproducts_score += 2
metals_score += 1
nonmetals_score += 1
if self.planet.uwp[7] < 4:
agroproducts_score += 1
metals_score -= 1
elif self.planet.uwp[7] == 4 or 5 or 6:
agroproducts_score += 2
metals_score += 2
nonmetals_score += 2
elif 7 <= self.planet.uwp[7] <= 11:
agroproducts_score += 1
metals_score += 4
nonmetals_score += 4
elif self.planet.uwp[7] > 11:
agroproducts_score += 1
metals_score += 5
nonmetals_score += 6
if self.planet.atmo_details.native_life:
agroproducts_score += 5
nonmetals_score += 3
agroproducts_roll = self.dice.roll2d6()
metals_roll = self.dice.roll2d6()
nonmetals_roll = self.dice.roll2d6()
if agroproducts_score > agroproducts_roll:
significant_processed[0] = True
if metals_score > metals_roll:
significant_processed[1] = True
if nonmetals_score > nonmetals_roll:
significant_processed[2] = True
return
def generate_natural(self):
significant_natural = [False, False, False, False, False]
agricultural_score = 0
ores_score = 0
radioactives_score = 0
crystals_score = 0
compounds_score = 0
agricultural_roll = self.dice.roll2d6()
ores_roll = self.dice.roll2d6()
radioactives_roll = self.dice.roll2d6()
crystals_roll = self.dice.roll2d6()
compounds_roll = self.dice.roll2d6()
if self.planet.size_details.core == "Heavy Core":
agricultural_score += 1
ores_score += 8
radioactives_score += 7
crystals_score += 6
compounds_score += 5
elif self.planet.size_details.core == "Molten Core":
agricultural_score += 4
ores_score += 7
radioactives_score += 5
crystals_score += 5
compounds_score += 6
elif self.planet.size_details.core == "Rocky Body":
agricultural_score += 4
ores_score += 3
radioactives_score += 3
crystals_score += 2
compounds_score += 1
elif self.planet.size_details.core == "Icy Body":
agricultural_score -= 4
compounds_score -= 4
else:
print("INVALID CORE TYPE")
if 4 <= self.planet.uwp[2] <= 9:
agricultural_score += 1
elif 0 <= self.planet.uwp[2] <= 3 or 10 <= self.planet.uwp[2]:
agricultural_score -= 3
ores_score += 1
radioactives_score += 1
compounds_score += 1
if 0 <= self.planet.uwp[7] <= 3
agricultural_score += 1
ores_score += 1
radioactives_score += 1
crystals_score += 1
compounds_score += 1
elif 7 <= self.planet.uwp[7] <= 11:
agricultural_score -= 1
elif 12 <= self.planet.uwp[7]:
agricultural_score -= 2
ores_score += 1
radioactives_score += 1
crystals_score += 1
compounds_score += 1
if self.planet.atmo_details.native_life:
agricultural_score += 5
compounds_score += 1
elif not self.planet.atmo_details.native_life:
compounds_score -= 1
else:
print("Failure in Atmo Life Gen")
if agricultural_score > agricultural_roll:
significant_natural[0] = True
if ores_score > ores_roll:
significant_natural[1] = True
if radioactives_score > radioactives_roll:
significant_natural[2] = True
if crystals_score > crystals_roll:
significant_natural[3] = True
if compounds_score > compounds_roll:
significant_natural[4] = True
return
def generate_manufactured(self):
significant_manufactured = [False, False, False, False]
parts_score = 0
durables_score = 0
consumables_score = 0
weapons_score = 0
if 0 <= self.planet.uwp[2] <= 3 or 10 <= self.planet.uwp[2]:
parts_score += 1
durables_score += 1
consumables_score += 1
weapons_score += 1
if 0 <= self.planet.uwp[4] <= 4:
parts_score -= 1
durables_score -= 1
consumables_score -= 1
weapons_score -=
elif 5 <= self.planet.uwp[4] <= 8:
parts_score += 1
durables_score += 2
consumables_score += 1
elif 9 <= self.planet.uwp[4]:
parts_score += 2
durables_score += 3
consumables_score += 4
weapons_score += 1
if self.planet.uwp[5] == 0 or 1:
parts_score -= 1
durables_score -= 1
consumables_score -=
elif 2 <= self.planet.uwp[5] <= 6:
parts_score += 1
durables_score += 1
consumables_score += 1
weapons_score += 1
elif self.planet.uwp[5] == 7:
parts_score += 2
durables_score += 2
consumables_score += 2
weapons_score += 3
elif 8 <= self.planet.uwp[5]:
consumables_score += 1
weapons_score += 1
if 4 <= self.planet.uwp[7] <= 6:
durables_score += 1
consumables_score += 1
weapons_score += 1
elif 7 <= self.planet.uwp[7] <= 11:
parts_score += 2
durables_score += 2
consumables_score += 2
weapons_score += 1
elif 12 <= self.planet.uwp[7]:
parts_score += 4
durables_score += 3
consumables_score += 4
weapons_score += 2
if self.planet.atmo_details.native_life:
parts_score += 1
durables_score += 1
consumables_score += 1
weapons_score += 1
parts_roll = self.dice.roll2d6()
durables_roll = self.dice.roll2d6()
consumables_roll = self.dice.roll2d6()
weapons_roll = self.dice.roll2d6()
if parts_score > parts_roll:
significant_manufactured[0] = True
if durables_score > durables_roll:
significant_manufactured[1] = True
if consumables_score > consumables_roll:
significant_manufactured[2] = True
if weapons_score > weapons_roll:
significant_manufactured[3] = True
return
def generate_information(self):
significant_information = [False, False, False, False]
recordings_score = 0
artforms_score = 0
software_score = 0
documents_score = 0
if 0 <= self.planet.uwp[4] <= 4:
documents_score -= 1
elif 5 <= self.planet.uwp[4] <= 8:
recordings_score += 1
artforms_score += 2
software_score += 1
elif 9 <= self.planet.uwp[4]:
recordings_score += 2
artforms_score += 3
software_score += 4
documents_score += 1
if 2 <= self.planet.uwp[5] <= 6:
recordings_score += 1
artforms_score += 1
software_score += 1
documents_score += 1
elif self.planet.uwp[5] == 7:
recordings_score += 1
artforms_score += 2
software_score += 1
documents_score += 2
elif 8 <= self.planet.uwp[5]:
recordings_score += 2
software_score += 1
documents_score += 4
if 3 <= self.planet.uwp[6] <= 6:
recordings_score += 1
software_score += 1
documents_score += 2
elif 7 <= self.planet.uwp[6] <= 9:
recordings_score += 2
software_score += 2
documents_score += 4
elif 10 <= self.planet.uwp[6]:
recordings_score += 3
software_score += 3
documents_score += 6
if 0 <= self.planet.uwp[7] <= 3:
recordings_score -= 3
artforms_score += 2
software_score -= 9
elif 4 <= self.planet.uwp[7] <= 6:
recordings_score += 1
artforms_score += 1
documents_score += 1
elif 7 <= self.planet.uwp[7] <= 11:
recordings_score += 2
artforms_score += 1
software_score += 1
documents_score += 3
elif 12 <= self.planet.uwp[7]:
recordings_score += 3
artforms_score += 1
software_score += 4
documents_score += 1
recordings_roll = self.dice.roll2d6()
artforms_roll = self.dice.roll2d6()
software_roll = self.dice.roll2d6()
documents_roll = self.dice.roll2d6()
if recordings_score > recordings_roll:
significant_information[0] = True
if artforms_score > artforms_roll:
significant_information[1] = True
if software_score > software_roll:
significant_information[2] = True
if documents_score > documents_roll:
significant_information[3] = True
return
| 35.864865 | 70 | 0.53994 |
acf3b6228119cf4a2415f84328ec1508256b76fe | 3,891 | py | Python | bc211/import_open_referral_csv/parser.py | pg-irc/pathways-backend | 05a8c4e750523d2d081b030a248c5444d1ed7992 | [
"BSD-3-Clause"
] | 12 | 2017-08-30T18:21:00.000Z | 2021-12-09T04:04:17.000Z | bc211/import_open_referral_csv/parser.py | pg-irc/pathways-backend | 05a8c4e750523d2d081b030a248c5444d1ed7992 | [
"BSD-3-Clause"
] | 424 | 2017-08-08T18:32:14.000Z | 2022-03-30T21:42:51.000Z | bc211/import_open_referral_csv/parser.py | pg-irc/pathways-backend | 05a8c4e750523d2d081b030a248c5444d1ed7992 | [
"BSD-3-Clause"
] | 7 | 2017-09-29T21:14:37.000Z | 2019-12-30T21:07:37.000Z | import logging
from urllib import parse as urlparse
from datetime import datetime
from bc211.import_icarol_xml.parser import remove_double_escaped_html_markup, clean_one_phone_number
from bc211.import_open_referral_csv import exceptions
from django.core import validators
from django.core.exceptions import ValidationError
LOGGER = logging.getLogger(__name__)
def parse_required_field_with_double_escaped_html(field, value):
required_value = parse_required_field(field, value)
return remove_double_escaped_html_markup(required_value)
def parse_optional_field_with_double_escaped_html(value):
optional_value = parse_optional_field(value)
return remove_double_escaped_html_markup(optional_value)
def parse_email(active_record_id, value):
email = parse_optional_field(value)
if csv_value_is_empty(email):
return None
cleaned_email = remove_double_escaped_html_markup(email)
return validated_email_or_none(active_record_id, cleaned_email)
def validated_email_or_none(active_record_id, email):
try:
validators.validate_email(email)
return email
except ValidationError:
LOGGER.debug('The record with the id: "%s" has an invalid email.', active_record_id)
return None
def parse_last_verified_date(value):
last_verified_date = parse_optional_field(value)
if last_verified_date is None or value == '':
return None
return datetime.strptime(last_verified_date, '%Y-%m-%d')
def parse_addresses(addresses_from_csv):
addresses = [
parse_optional_field_with_double_escaped_html(address) for address in addresses_from_csv
]
non_empty_addresses = [address for address in addresses if address]
return '\n'.join(non_empty_addresses)
def parse_country(value):
country = parse_required_field('country', value)
cleaned_country = remove_double_escaped_html_markup(country)
return two_letter_country_code_or_none(cleaned_country)
def two_letter_country_code_or_none(country):
if country == 'Canada':
return 'CA'
if country == 'United States':
return 'US'
if len(country) > 2:
raise exceptions.InvalidFieldCsvParseException(
'Country field with value: {} is invalid'.format(country)
)
return country
def parse_phone_number(value):
phone_number = parse_required_field('number', value)
phone_number_without_markup = remove_double_escaped_html_markup(phone_number)
return clean_one_phone_number(phone_number_without_markup)
def parse_required_field(field, value):
if csv_value_is_empty(value):
raise exceptions.MissingRequiredFieldCsvParseException(
'Missing required field: "{0}"'.format(field)
)
return value
def parse_optional_field(value):
if csv_value_is_empty(value):
return ''
return value
def parse_website_with_prefix(active_record_id, value):
website = parse_optional_field(value)
if csv_value_is_empty(website):
return None
website_with_http_prefix = add_http_prefix_to_website(website)
return validated_website_or_none(active_record_id, website_with_http_prefix)
def validated_website_or_none(active_record_id, website):
validate_url = validators.URLValidator()
try:
validate_url(website)
return website
except ValidationError:
LOGGER.debug('The record with the id: "%s" has an invalid URL.', active_record_id)
return None
def add_http_prefix_to_website(website):
parts = urlparse.urlparse(website, 'http')
whole_with_extra_slash = urlparse.urlunparse(parts)
return whole_with_extra_slash.replace('///', '//')
def parse_coordinate_if_defined(value):
coordinate = parse_optional_field(value)
return None if csv_value_is_empty(value) else float(coordinate)
def csv_value_is_empty(value):
return value is None or value == ''
| 31.634146 | 100 | 0.759702 |
acf3b689adc8540b7159a7785495d17821c1273a | 1,845 | py | Python | saleor/core/payments.py | Kibria7533/saleor | d72f5bff9afeab13d2e2e09c9a32a964bfd9476d | [
"CC-BY-4.0"
] | 2 | 2021-05-16T13:46:07.000Z | 2021-05-16T13:49:21.000Z | saleor/core/payments.py | kobbycyber/saleor | a7aa9e786d5c55243433800e414703fdcebbea23 | [
"CC-BY-4.0"
] | 1 | 2019-04-05T17:22:52.000Z | 2019-04-05T17:22:52.000Z | saleor/core/payments.py | kobbycyber/saleor | a7aa9e786d5c55243433800e414703fdcebbea23 | [
"CC-BY-4.0"
] | 2 | 2021-12-03T16:59:37.000Z | 2022-02-19T13:05:42.000Z | from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, List, Optional
if TYPE_CHECKING:
# flake8: noqa
from ..checkout.models import Checkout
from ..payment.interface import (
CustomerSource,
GatewayResponse,
PaymentData,
PaymentGateway,
TokenConfig,
)
class PaymentInterface(ABC):
@abstractmethod
def list_payment_gateways(
self,
currency: Optional[str] = None,
checkout: Optional["Checkout"] = None,
active_only: bool = True,
) -> List["PaymentGateway"]:
pass
@abstractmethod
def authorize_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def capture_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def refund_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def void_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def confirm_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def token_is_required_as_payment_input(self, gateway) -> bool:
pass
@abstractmethod
def process_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def get_client_token(self, gateway: str, token_config: "TokenConfig") -> str:
pass
@abstractmethod
def list_payment_sources(
self, gateway: str, customer_id: str
) -> List["CustomerSource"]:
pass
| 24.6 | 81 | 0.639024 |
acf3b6e29b7567b0a505b87055856f322e831740 | 1,908 | py | Python | code/knn/self_knn/KNN_kd_tree.py | Nicolas-gaofeng/Salute_Machine_Learning | bdb478f44d39abf829ac056fcb0f08b03b4503d2 | [
"Apache-2.0"
] | 3 | 2020-12-27T05:17:50.000Z | 2021-02-23T08:05:55.000Z | code/knn/self_knn/KNN_kd_tree.py | Nicolas-gaofeng/Salute_Machine_Learning | bdb478f44d39abf829ac056fcb0f08b03b4503d2 | [
"Apache-2.0"
] | null | null | null | code/knn/self_knn/KNN_kd_tree.py | Nicolas-gaofeng/Salute_Machine_Learning | bdb478f44d39abf829ac056fcb0f08b03b4503d2 | [
"Apache-2.0"
] | null | null | null | #!/usz/bin/env python
# -*- coding:utf-8 -*-
from collections import namedtuple
import numpy as np
"""
@file : KNN_kd_tree.py
@author : zgf
@brief : kd_tree算法自编程实现
@attention : life is short,I need python
"""
# 建立节点类
class Node(namedtuple("Node", "location left_child right_child")):
def __repr__(self):
return str(tuple(self))
# kd tree类
class KdTree:
def __init(self, k=1, p=2):
self.k = k
self.p = p
self.kdtree = None
# 构建kd tree
def _fit(self, X, depth=0):
try:
k = X.shape[1]
except IndexError as e:
return None
# 这里可以展开,通过方差选择axis
axis = depth % k
X = X[X[:, axis].argsort()]
median = X.shape[0] // 2
try:
X[median]
except IndexError:
return None
return Node(
location=X[median],
left_child=self._fit(X[:median], depth + 1),
right_child=self._fit(X[median + 1 :], depth + 1),
)
def _search(self, point, tree=None, depth=0, best=None):
if tree is None:
return best
k = point.shape[1]
# 更新 branch
if point[0][depth % k] < tree.location[depth % k]:
next_branch = tree.left_child
else:
next_branch = tree.right_child
if not next_branch is None:
best = next_branch.location
return self._search(point, tree=next_branch, depth=depth + 1, best=best)
def fit(self, X):
self.kdtree = self._fit(X)
return self.kdtree
def predict(self, X):
res = self._search(X, self.kdtree)
return res
def main():
KNN = KdTree()
X_train = np.array([[5, 4], [9, 6], [4, 7], [2, 3], [8, 1], [7, 2]])
KNN.fit(X_train)
X_new = np.array([[5, 3]])
res = KNN.predict(X_new)
print(res)
if __name__ == "__main__":
main()
| 23.85 | 80 | 0.537736 |
acf3b75e0cb52e3775465d31dabb5c2a1bcdbdd9 | 2,001 | py | Python | tests/python/relay/test_op_grad_level10.py | robo-corg/incubator-tvm | 4ddfdb4b15d05a5bf85a984837967d004efee5dd | [
"Apache-2.0"
] | 286 | 2020-06-23T06:40:44.000Z | 2022-03-30T01:27:49.000Z | tests/python/relay/test_op_grad_level10.py | robo-corg/incubator-tvm | 4ddfdb4b15d05a5bf85a984837967d004efee5dd | [
"Apache-2.0"
] | 10 | 2020-07-31T03:26:59.000Z | 2021-12-27T15:00:54.000Z | tests/python/relay/test_op_grad_level10.py | robo-corg/incubator-tvm | 4ddfdb4b15d05a5bf85a984837967d004efee5dd | [
"Apache-2.0"
] | 30 | 2020-07-17T01:04:14.000Z | 2021-12-27T14:05:19.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from tvm import relay
from tvm.relay.testing import check_grad
def test_cross_entropy_grad():
x = relay.var("x", shape=(2, 5))
y = relay.var("y", shape=(2, 5))
check_grad(relay.Function([x, y], relay.op.nn.cross_entropy(x, y)), eps=0.01, scale=0.1, mean=1)
def test_cross_entropy_with_logits_grad():
x = relay.var("x", shape=(2, 5))
y = relay.var("y", shape=(2, 5))
check_grad(relay.Function([x, y], relay.op.nn.cross_entropy_with_logits(x, y)), eps=0.01, scale=0.1, mean=1)
def test_checkpoint():
inputs = [relay.var("x{}".format(i), shape=(1,)) for i in range(4)]
output = relay.multiply(relay.add(inputs[0], inputs[1]),
relay.add(inputs[2], inputs[3]))
check_grad(relay.Function(inputs, relay.annotation.checkpoint(output)))
out_tuple = relay.Tuple([relay.add(inputs[0], inputs[1]),
relay.multiply(inputs[2], inputs[3])])
out_single = relay.subtract(relay.TupleGetItem(relay.annotation.checkpoint(out_tuple), 0),
relay.TupleGetItem(out_tuple, 1))
check_grad(relay.Function(inputs, out_single))
if __name__ == "__main__":
pytest.main([__file__])
| 40.836735 | 112 | 0.687656 |
acf3b809e9761ec8bff150b0557bd834206598c3 | 7,946 | py | Python | mfDiMP/ltr/dataset/imagenetvid.py | zhanglichao/end2end_rgbt_tracking | f96bf9a810f77885b3faa219f06a82c6e22cb824 | [
"MIT"
] | 43 | 2019-08-17T15:02:20.000Z | 2022-03-30T04:07:20.000Z | mfDiMP/ltr/dataset/imagenetvid.py | zhulishun/end2end_rgbt_tracking | 6de2532feec97b388e3617f85ed779933426dfa5 | [
"MIT"
] | 10 | 2019-08-20T12:37:55.000Z | 2022-02-20T11:02:13.000Z | mfDiMP/ltr/dataset/imagenetvid.py | zhulishun/end2end_rgbt_tracking | 6de2532feec97b388e3617f85ed779933426dfa5 | [
"MIT"
] | 16 | 2019-11-23T01:00:10.000Z | 2022-02-18T19:17:46.000Z | import os
from .base_dataset import BaseDataset
from ltr.data.image_loader import jpeg4py_loader
import xml.etree.ElementTree as ET
import json
import torch
from collections import OrderedDict
# import nltk
# from nltk.corpus import wordnet
from ltr.admin.environment import env_settings
def get_target_to_image_ratio(seq):
anno = torch.Tensor(seq['anno'])
img_sz = torch.Tensor(seq['image_size'])
return (anno[0, 2:4].prod() / (img_sz.prod())).sqrt()
class ImagenetVID(BaseDataset):
""" Imagenet VID dataset.
Publication:
ImageNet Large Scale Visual Recognition Challenge
Olga Russakovsky, Jia Deng, Hao Su, Jonathan Krause, Sanjeev Satheesh, Sean Ma, Zhiheng Huang, Andrej Karpathy,
Aditya Khosla, Michael Bernstein, Alexander C. Berg and Li Fei-Fei
IJCV, 2015
https://arxiv.org/pdf/1409.0575.pdf
Download the dataset from http://image-net.org/
"""
def __init__(self, root=None, image_loader=jpeg4py_loader, min_length=0, max_target_area=1, use_target_visible=False):
root = env_settings().imagenet_dir if root is None else root
super().__init__(root, image_loader)
cache_file = os.path.join(root, 'cache.json')
if os.path.isfile(cache_file):
# If available, load the pre-processed cache file containing meta-info for each sequence
with open(cache_file, 'r') as f:
sequence_list_dict = json.load(f)
self.sequence_list = sequence_list_dict
else:
# Else process the imagenet annotations and generate the cache file
self.sequence_list = self._process_anno(root)
with open(cache_file, 'w') as f:
json.dump(self.sequence_list, f)
# Filter the sequences based on min_length and max_target_area in the first frame
self.sequence_list = [x for x in self.sequence_list if len(x['anno']) >= min_length and
get_target_to_image_ratio(x) < max_target_area]
if not use_target_visible:
raise NotImplementedError
# TODO add the class info
# we do not have the class_lists for the tracking net
self.class_list = self._get_class_list()
# self.class_list_non_unique = self._get_class_list_non_unique()
def _get_class_list(self):
# sequence list is a
class_list = []
for x in self.sequence_list:
class_list.append(x['class_name'])
class_list = list(set(class_list))
class_list.sort()
return class_list
def _get_class_list_non_unique(self):
# sequence list is a
class_list = []
for x in self.sequence_list:
class_list.append(x['class_name'])
# class_list = list(set(class_list))
class_list.sort()
return class_list
def get_name(self):
return 'imagenetvid'
def has_class_info(self):
return False
def get_num_sequences(self):
return len(self.sequence_list)
def get_num_classes(self):
return -1
# TODO implement
def get_sequences_in_class(self, class_id):
return None
def get_sequence_info(self, seq_id):
bb_anno = torch.Tensor(self.sequence_list[seq_id]['anno'])
valid = (bb_anno[:, 2] > 0) & (bb_anno[:, 3] > 0)
visible = torch.Tensor(self.sequence_list[seq_id]['target_visible']) & valid
return {'bbox': bb_anno, 'valid': valid, 'visible': visible}
def _get_frame(self, sequence, frame_id):
set_name = 'ILSVRC2015_VID_train_{:04d}'.format(sequence['set_id'])
vid_name = 'ILSVRC2015_train_{:08d}'.format(sequence['vid_id'])
frame_number = frame_id + sequence['start_frame']
frame_path = os.path.join(self.root, 'Data', 'VID', 'train', set_name, vid_name,
'{:06d}.JPEG'.format(frame_number))
return self.image_loader(frame_path)
def get_frames(self, seq_id, frame_ids, anno=None):
sequence = self.sequence_list[seq_id]
frame_list = [self._get_frame(sequence, f) for f in frame_ids]
if anno is None:
anno = self.get_sequence_info(seq_id)
anno_frames = {}
for key, value in anno.items():
anno_frames[key] = [value[f_id, ...].clone() for f_id in frame_ids]
# added the class info to the meta info
object_meta = OrderedDict({'object_class': sequence['class_name'],
'motion_class': None,
'major_class': None,
'root_class': None,
'motion_adverb': None})
return frame_list, anno_frames, object_meta
# def _get_class_name_from_id(self, class_name_id):
# pos = class_name_id[0]
# offset = int(class_name_id[2:len(class_name_id)])
# syns = wordnet.synset_from_pos_and_offset(pos, offset)
# return syns.name()[:-5]
def _process_anno(self, root):
# Builds individual tracklets
# nltk.download('wordnet')
base_vid_anno_path = os.path.join(root, 'Annotations', 'VID', 'train')
all_sequences = []
for set in sorted(os.listdir(base_vid_anno_path)):
set_id = int(set.split('_')[-1])
for vid in sorted(os.listdir(os.path.join(base_vid_anno_path, set))):
vid_id = int(vid.split('_')[-1])
anno_files = sorted(os.listdir(os.path.join(base_vid_anno_path, set, vid)))
frame1_anno = ET.parse(os.path.join(base_vid_anno_path, set, vid, anno_files[0]))
image_size = [int(frame1_anno.find('size/width').text), int(frame1_anno.find('size/height').text)]
objects = [ET.ElementTree(file=os.path.join(base_vid_anno_path, set, vid, f)).findall('object')
for f in anno_files]
tracklets = {}
# Find all tracklets along with start frame
for f_id, all_targets in enumerate(objects):
for target in all_targets:
tracklet_id = target.find('trackid').text
if tracklet_id not in tracklets:
tracklets[tracklet_id] = f_id
for tracklet_id, tracklet_start in tracklets.items():
tracklet_anno = []
target_visible = []
class_name = None
for f_id in range(tracklet_start, len(objects)):
found = False
for target in objects[f_id]:
if target.find('trackid').text == tracklet_id:
# if not class_name:
# class_name_id = target.find('name').text
# class_name = self._get_class_name_from_id(class_name_id)
x1 = int(target.find('bndbox/xmin').text)
y1 = int(target.find('bndbox/ymin').text)
x2 = int(target.find('bndbox/xmax').text)
y2 = int(target.find('bndbox/ymax').text)
tracklet_anno.append([x1, y1, x2 - x1, y2 - y1])
target_visible.append(target.find('occluded').text == '0')
found = True
break
if not found:
break
new_sequence = {'set_id': set_id, 'vid_id': vid_id, 'class_name': class_name,
'start_frame': tracklet_start, 'anno': tracklet_anno,
'target_visible': target_visible, 'image_size': image_size}
all_sequences.append(new_sequence)
return all_sequences
| 40.540816 | 122 | 0.57853 |
acf3b8c21ce642c02972cdbd9d5d01817e88aeec | 1,902 | py | Python | tests/unit/master/test_20_recursiverender.py | BBVA/buildbot-washer | 34beadc146ba9cd224f211aea875320836d9cdab | [
"Apache-2.0"
] | 1 | 2018-09-06T01:53:52.000Z | 2018-09-06T01:53:52.000Z | tests/unit/master/test_20_recursiverender.py | BBVA/buildbot-washer | 34beadc146ba9cd224f211aea875320836d9cdab | [
"Apache-2.0"
] | 3 | 2018-09-24T07:28:50.000Z | 2019-10-18T08:29:48.000Z | tests/unit/master/test_20_recursiverender.py | BBVA/buildbot-washer | 34beadc146ba9cd224f211aea875320836d9cdab | [
"Apache-2.0"
] | 1 | 2019-06-17T11:48:52.000Z | 2019-06-17T11:48:52.000Z | from washer.master.recursiverender import recursiverender
from buildbot.process.properties import Property, Secret
import pytest
def test_nonrecursive_rendering():
# recursiverender(<object>, <callback>)
obj = Property('foo')
def callback(irenderable):
return irenderable
assert recursiverender(obj, callback) is obj
def test_recursiverendering_calls_callback_if_renderable():
obj = Property('foo')
was_called = False
def callback(irenderable):
nonlocal was_called
was_called = True
recursiverender(obj, callback)
assert was_called
def test_recursiverendering_dont_call_callback_if_not_renderable():
obj = object()
was_called = False
def callback(irenderable):
nonlocal was_called
was_called = True
recursiverender(obj, callback)
assert not was_called
@pytest.mark.parametrize("obj", [
(Property('foo'), "NotAProperty"),
[Property('foo'), "NotAProperty"],
{'foo': Property('foo'), 'bar': "NotAProperty"}])
def test_simplestruct_nested_rendering(obj):
was_called = False
def callback(irenderable):
nonlocal was_called
was_called = True
assert irenderable == Property("foo")
return irenderable
assert recursiverender(obj, callback) == obj
assert was_called
def test_complexstruct_nested_rendering():
obj = [{'foo1': (Property('foo'), Secret('bar')),
'bar1': {'foo': 'foo', 'bar': Property('bar')}},
{'foo2': (Property('foo'), Secret('bar')),
'bar2': {'foo': 'foo', 'bar': Property('bar')}}]
expected = [{'foo1': (None, None),
'bar1': {'foo': 'foo', 'bar': None}},
{'foo2': (None, None),
'bar2': {'foo': 'foo', 'bar': None}}]
def callback(irenderable):
return None
assert recursiverender(obj, callback) == expected
| 25.026316 | 67 | 0.634595 |
acf3b8eaeeb3792a24a700b585eace244a9a3da3 | 6,611 | py | Python | server/api/ws.py | the-artists-formerly-known-as-spam/hacktm2018 | 3f3523b0b0bf9ed0acf7585015c2d58277c12e42 | [
"Apache-2.0"
] | 1 | 2018-10-11T07:48:03.000Z | 2018-10-11T07:48:03.000Z | server/api/ws.py | the-artists-formerly-known-as-spam/hacktm2018 | 3f3523b0b0bf9ed0acf7585015c2d58277c12e42 | [
"Apache-2.0"
] | 6 | 2021-03-08T19:18:49.000Z | 2022-02-26T03:43:13.000Z | server/api/ws.py | the-artists-formerly-known-as-spam/hacktm2018 | 3f3523b0b0bf9ed0acf7585015c2d58277c12e42 | [
"Apache-2.0"
] | null | null | null | import uuid
from gevent.lock import RLock
import json
from geventwebsocket import WebSocketServer, WebSocketApplication, Resource
from collections import OrderedDict
from game import game as awesome_sauce
from game.names import get_name
from .pubsub import Publisher, Subscriber
PLAYER_COUNT = 3
bus = Publisher()
def uid():
return str(uuid.uuid4())
class Player:
def __init__(self, p_id, name, game_player=None, state=None):
self.p_id = p_id
self.index = None
self.name = name
self.game_player = game_player
self.state = state
self.inbox = Subscriber(p_id)
self.game_id = None
def start_game(self, game_id):
self.game_id = game_id
@classmethod
def new(cls, name, **kwa):
p_id = uid()
return cls(p_id, name, state='lobby', **kwa)
@property
def x(self):
return self.game_player.position[0]
@property
def y(self):
return self.game_player.position[1]
@property
def health(self):
return self.game_player.health
class Game:
def __init__(self, game_id):
self.game_id = game_id
self.players = {}
self.lock = RLock()
self.game = awesome_sauce.Game(game_id)
self.game.start()
self.moved = set([])
def add_player(self, player, index):
player.index = index
game_player = self.game.add_player(player.p_id, name=player.name, position=[3, 2+index * 2])
player.game_player = game_player
self.players[player.p_id] = player
def move_player(self, player_id, new_x, new_y):
player = self.players[player_id]
with self.lock:
self.game.move_player(player_id, new_x, new_y)
self.moved.add(player_id)
if len(self.moved) >= len(self.game.get_alive_players()):
self.game.make_a_turn()
self.moved = set([])
self.update_players()
print(self.game_id, player_id, new_x, new_y)
# self.update_players()
def update_players(self):
for row in self.game.grid.squares:
print(" ".join([str(elem)[0:2] for elem in row]))
state = self.game.serialize()
for p_id in self.players:
bus.send(p_id, {'action': 'UPDATE', **state})
class GameServer:
def __init__(self):
self.games = {}
def new_game(self, players):
g_id = uid()
game = Game(g_id)
self.games[g_id] = game
for index, player in enumerate(players):
game.add_player(player, index);
player.start_game(g_id)
players_4_response = [
{'name': player.name, 'x': player.x, 'y': player.y, 'health': player.health}
for player in players
]
for player in players:
print("bus.send", player.p_id, {'action': 'START_GAME', 'g_id': game.game_id})
bus.send(player.p_id, {
'action': 'START_GAME',
'g_id': game.game_id,
'p_id': player.p_id,
'p_name': player.name,
'p_index': player.index,
'grid': game.game.serialize_grid(),
})
game.update_players()
return game
def handle_message(self, message):
g_id = message.get('g_id')
game = self.games.get(g_id)
if g_id is None or game is None:
return {'status': 'error', 'reason': 'No such game.'}
action = message.get('action')
if action == 'MOVE':
p_id = message['p_id']
x, y = message['x'], message['y']
return game.move_player(p_id, x, y)
else:
return {'status': 'error', 'reason': 'bad action'}
class Lobby:
def __init__(self):
self.players = []
self.lock = RLock()
def add_player(self, player):
with self.lock:
self.players.append(player)
print("lobby", len(self.players))
if len(self.players) >= PLAYER_COUNT:
g_players = self.players[0:PLAYER_COUNT]
self.players = self.players[PLAYER_COUNT:]
return gs.new_game(g_players)
else:
for player in self.players:
bus.send(player.p_id, {'action': 'WAITING', 'q_id': (len(self.players))})
gs = GameServer()
lobby = Lobby()
class Application(WebSocketApplication):
def __init__(self, *a, **kwa):
super().__init__(*a, **kwa)
self.player = None
def on_KEEP_ALIVE(self, message):
pass
def on_MOVE(self, message):
return gs.handle_message(message)
def on_CREATE_PLAYER_ID(self, message):
name = message.get('name', get_name())
self.player = Player.new(name=name)
self.p_id = self.player.p_id
game = lobby.add_player(self.player)
msg = {
"action": "WAITING",
"p_id": self.player.p_id,
"q_id": len(lobby.players),
}
return msg
def on_open(self):
print("connected")
# self.player = Player.new()
# lobby.add_player(self.player)
def on_message(self, message):
if message is None:
print("got None!")
return
message = json.loads(message);
action = message['action']
self.p_id = message.get('p_id')
callback = getattr(self, f"on_{action}")
if action == 'KEEP_ALIVE':
return
print(message)
resp = callback(message)
if resp is not None:
print("send:", resp)
self.ws.send(json.dumps(resp))
def on_close(self, reason):
print(reason)
def handle_zmq(self, message):
self.ws.send(json.dumps(message))
def handle(self):
# override geventwebsocket's handle so we plug in zmq as well
self.protocol.on_open()
while True:
if self.player:
zmq_message = self.player.inbox.recv()
while zmq_message:
self.handle_zmq(zmq_message)
zmq_message = self.player.inbox.recv()
try:
message = self.ws.receive()
except Exception as excp:
print(excp)
self.protocol.on_close()
break
self.protocol.on_message(message)
def main():
WebSocketServer(
('', 8100),
Resource(OrderedDict([
('/.*', Application)
])),
debug=True,
).serve_forever()
if __name__ == '__main__':
main()
| 27.777311 | 100 | 0.559219 |
acf3b9a72b69538eadb705fe42e6f55a43632ae0 | 913 | py | Python | setup.py | pirroman/centermask2 | 5721c23ef1fa228d644bee61cabdc9bb47e0ad66 | [
"Apache-2.0"
] | null | null | null | setup.py | pirroman/centermask2 | 5721c23ef1fa228d644bee61cabdc9bb47e0ad66 | [
"Apache-2.0"
] | null | null | null | setup.py | pirroman/centermask2 | 5721c23ef1fa228d644bee61cabdc9bb47e0ad66 | [
"Apache-2.0"
] | null | null | null | from os import path
from setuptools import find_packages, setup
def get_version():
init_py_path = path.join(
path.abspath(path.dirname(__file__)), "centermask", "__init__.py"
)
init_py = open(init_py_path, "r").readlines()
version_line = [l.strip() for l in init_py if l.startswith("__version__")][0]
version = version_line.split("=")[-1].strip().strip("'\"")
return version
setup(
name="centermask2",
version=get_version(),
author="",
url="https://github.com/pirroman/centermask2",
description="CenterMask2 is an upgraded implementation on top of detectron2 beyond original CenterMask based on "
"maskrcnn-benchmark. "
"platform for object detection and segmentation.",
packages=find_packages(exclude=("demo", "datasets")),
python_requires=">=3.6",
install_requires=["detectron2"],
include_package_data=True,
zip_safe=False,
)
| 30.433333 | 117 | 0.688938 |
acf3baeb4019f01754bac203bfe87bbcb21f6b7f | 4,011 | py | Python | mezzanine_file_collections/migrations/0003_auto_20151130_1513.py | roberzguerra/rover | 14b6a7a47e75d6b6f8ca44fc0eb1cca500e0eecb | [
"BSD-3-Clause"
] | 2 | 2015-12-02T17:26:12.000Z | 2015-12-03T00:43:14.000Z | mezzanine_file_collections/migrations/0003_auto_20151130_1513.py | roberzguerra/rover | 14b6a7a47e75d6b6f8ca44fc0eb1cca500e0eecb | [
"BSD-3-Clause"
] | 1 | 2015-12-02T17:26:43.000Z | 2016-03-15T00:01:20.000Z | mezzanine_file_collections/migrations/0003_auto_20151130_1513.py | roberzguerra/rover | 14b6a7a47e75d6b6f8ca44fc0eb1cca500e0eecb | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
('mezzanine_file_collections', '0002_auto_20150928_1038'),
]
operations = [
migrations.AddField(
model_name='mediafile',
name='_meta_title',
field=models.CharField(help_text='Optional title to be used in the HTML title tag. If left blank, the main title field will be used.', max_length=500, null=True, verbose_name='Title', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='mediafile',
name='created',
field=models.DateTimeField(null=True, editable=False),
preserve_default=True,
),
migrations.AddField(
model_name='mediafile',
name='expiry_date',
field=models.DateTimeField(help_text="With Published chosen, won't be shown after this time", null=True, verbose_name='Expires on', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='mediafile',
name='gen_description',
field=models.BooleanField(default=True, help_text='If checked, the description will be automatically generated from content. Uncheck if you want to manually set a custom description.', verbose_name='Generate description'),
preserve_default=True,
),
migrations.AddField(
model_name='mediafile',
name='in_sitemap',
field=models.BooleanField(default=True, verbose_name='Show in sitemap'),
preserve_default=True,
),
migrations.AddField(
model_name='mediafile',
name='keywords_string',
field=models.CharField(max_length=500, editable=False, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='mediafile',
name='publish_date',
field=models.DateTimeField(help_text="With Published chosen, won't be shown until this time", null=True, verbose_name='Published from', db_index=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='mediafile',
name='short_url',
field=models.URLField(null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='mediafile',
name='site',
field=models.ForeignKey(default=1, editable=False, to='sites.Site'),
preserve_default=False,
),
migrations.AddField(
model_name='mediafile',
name='slug',
field=models.CharField(help_text='Leave blank to have the URL auto-generated from the title.', max_length=2000, null=True, verbose_name='URL', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='mediafile',
name='status',
field=models.IntegerField(default=2, help_text='With Draft chosen, will only be shown for admin users on the site.', verbose_name='Status', choices=[(1, 'Draft'), (2, 'Published')]),
preserve_default=True,
),
migrations.AddField(
model_name='mediafile',
name='updated',
field=models.DateTimeField(null=True, editable=False),
preserve_default=True,
),
migrations.AlterField(
model_name='mediafile',
name='description',
field=models.TextField(verbose_name='Description', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='mediafile',
name='title',
field=models.CharField(max_length=500, verbose_name='Title'),
preserve_default=True,
),
]
| 40.11 | 234 | 0.602344 |
acf3bb14a90d358bd06a4aeaf84395cbe98bf174 | 47 | py | Python | Parte1/Cap4/numbers.py | fabianoflorentino/python-CursoIntensivoDePython | 822288cc4b382936dde1bc647e3f8c2b925ced70 | [
"Apache-2.0"
] | null | null | null | Parte1/Cap4/numbers.py | fabianoflorentino/python-CursoIntensivoDePython | 822288cc4b382936dde1bc647e3f8c2b925ced70 | [
"Apache-2.0"
] | null | null | null | Parte1/Cap4/numbers.py | fabianoflorentino/python-CursoIntensivoDePython | 822288cc4b382936dde1bc647e3f8c2b925ced70 | [
"Apache-2.0"
] | 1 | 2020-02-05T13:07:08.000Z | 2020-02-05T13:07:08.000Z | for value in range(1,11):
print(str(value)) | 23.5 | 25 | 0.659574 |
acf3bb53bfaa76604779a4db04d4c39d058417e6 | 291 | py | Python | costar_bullet/setup.py | cpaxton/costar_plan | be5c12f9d0e9d7078e6a5c283d3be059e7f3d040 | [
"Apache-2.0"
] | 66 | 2018-10-31T04:58:53.000Z | 2022-03-17T02:32:25.000Z | costar_bullet/setup.py | cpaxton/costar_plan | be5c12f9d0e9d7078e6a5c283d3be059e7f3d040 | [
"Apache-2.0"
] | 8 | 2018-10-23T21:19:25.000Z | 2018-12-03T02:08:41.000Z | costar_bullet/setup.py | cpaxton/costar_plan | be5c12f9d0e9d7078e6a5c283d3be059e7f3d040 | [
"Apache-2.0"
] | 25 | 2018-10-19T00:54:17.000Z | 2021-10-10T08:28:15.000Z | #!/usr/bin/env python
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
## don't do this unless you want a globally visible script
packages=['costar_bullet'],
package_dir={'': 'python'},
)
setup(**d)
| 20.785714 | 63 | 0.731959 |
acf3bbe6fd2649784960fdb4b4ec29a06b9acc9d | 2,222 | py | Python | src/primaires/supenr/convertisseurs/base_type_objet.py | vlegoff/tsunami | 36b3b974f6eefbf15cd5d5f099fc14630e66570b | [
"BSD-3-Clause"
] | 14 | 2015-08-21T19:15:21.000Z | 2017-11-26T13:59:17.000Z | src/primaires/supenr/convertisseurs/base_type_objet.py | vincent-lg/tsunami | 36b3b974f6eefbf15cd5d5f099fc14630e66570b | [
"BSD-3-Clause"
] | 20 | 2015-09-29T20:50:45.000Z | 2018-06-21T12:58:30.000Z | src/primaires/supenr/convertisseurs/base_type_objet.py | vlegoff/tsunami | 36b3b974f6eefbf15cd5d5f099fc14630e66570b | [
"BSD-3-Clause"
] | 3 | 2015-05-02T19:42:03.000Z | 2018-09-06T10:55:00.000Z | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant les convertisseurs de la classe BaseType."""
class Convertisseur:
"""Classe pour envelopper les convertisseurs."""
def depuis_version_0(objet, classe):
objet.set_version(classe, 1)
del objet.nom
def depuis_version_1(objet, classe):
objet.set_version(classe, 2)
objet.cle = objet.identifiant
del objet.identifiant
def depuis_version_2(objet, classe):
objet.set_version(classe, 3)
if "empilable_sur" in objet.__dict__:
del objet.__dict__["empilable_sur"]
if "empilable_sous" in objet.__dict__:
del objet.__dict__["empilable_sous"]
| 46.291667 | 79 | 0.748425 |
acf3be3eb86fb072c535366ae4b8e590326c2835 | 21 | py | Python | research_jupyter_templates/_version.py | martinlarsalbert/research_jupyter_templates | 834c61753923239480cd2a18b01ba46aaa3a37ad | [
"MIT"
] | 1 | 2020-12-02T14:48:21.000Z | 2020-12-02T14:48:21.000Z | research_jupyter_templates/_version.py | martinlarsalbert/research_jupyter_templates | 834c61753923239480cd2a18b01ba46aaa3a37ad | [
"MIT"
] | 1 | 2022-03-17T03:02:44.000Z | 2022-03-17T03:02:44.000Z | research_jupyter_templates/_version.py | martinlarsalbert/research_jupyter_templates | 834c61753923239480cd2a18b01ba46aaa3a37ad | [
"MIT"
] | 2 | 2022-03-03T16:35:22.000Z | 2022-03-17T02:54:25.000Z | __version__ = "0.1.3" | 21 | 21 | 0.666667 |
acf3beb81486e286cea677a1444a4437d0ef5cb7 | 16,289 | py | Python | python/filemetadata.py | ek0/binaryninja-api | d9661f34eec6855d495a10eaafc2a8e2679756a7 | [
"MIT"
] | null | null | null | python/filemetadata.py | ek0/binaryninja-api | d9661f34eec6855d495a10eaafc2a8e2679756a7 | [
"MIT"
] | null | null | null | python/filemetadata.py | ek0/binaryninja-api | d9661f34eec6855d495a10eaafc2a8e2679756a7 | [
"MIT"
] | null | null | null | # Copyright (c) 2015-2022 Vector 35 Inc
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import traceback
import ctypes
from typing import Any, Callable, Optional, List
# Binary Ninja Components
import binaryninja
from . import _binaryninjacore as core
from .enums import SaveOption
from . import associateddatastore #required for _FileMetadataAssociatedDataStore
from .log import log_error
from . import binaryview
from . import database
ProgressFuncType = Callable[[int, int], bool]
ViewName = str
class NavigationHandler:
def _register(self, handle) -> None:
self._cb = core.BNNavigationHandler()
self._cb.context = 0
self._cb.getCurrentView = self._cb.getCurrentView.__class__(self._get_current_view)
self._cb.getCurrentOffset = self._cb.getCurrentOffset.__class__(self._get_current_offset)
self._cb.navigate = self._cb.navigate.__class__(self._navigate)
core.BNSetFileMetadataNavigationHandler(handle, self._cb)
def _get_current_view(self, ctxt:Any):
try:
view = self.get_current_view()
except:
log_error(traceback.format_exc())
view = ""
return core.BNAllocString(view)
def _get_current_offset(self, ctxt:Any) -> int:
try:
return self.get_current_offset()
except:
log_error(traceback.format_exc())
return 0
def _navigate(self, ctxt:Any, view:ViewName, offset:int) -> bool:
try:
return self.navigate(view, offset)
except:
log_error(traceback.format_exc())
return False
def get_current_view(self) -> str:
return NotImplemented
def get_current_offset(self) -> int:
return NotImplemented
def navigate(self, view:ViewName, offset:int) -> bool:
return NotImplemented
class SaveSettings:
"""
``class SaveSettings`` is used to specify actions and options that apply to saving a database (.bndb).
"""
def __init__(self, handle=None):
if handle is None:
self.handle = core.BNCreateSaveSettings()
else:
self.handle = handle
def __del__(self):
if core is not None:
core.BNFreeSaveSettings(self.handle)
def is_option_set(self, option:SaveOption) -> bool:
if isinstance(option, str):
option = SaveOption[option]
return core.BNIsSaveSettingsOptionSet(self.handle, option)
def set_option(self, option:SaveOption, state:bool=True):
"""
Set a SaveOption in this instance.
:param SaveOption option: Option to set.
:param bool state: State to assign. Defaults to True.
:Example:
>>> settings = SaveSettings()
>>> settings.set_option(SaveOption.TrimSnapshots)
"""
if isinstance(option, str):
option = SaveOption[option]
core.BNSetSaveSettingsOption(self.handle, option, state)
class _FileMetadataAssociatedDataStore(associateddatastore._AssociatedDataStore):
_defaults = {}
class FileMetadata:
"""
``class FileMetadata`` represents the file being analyzed by Binary Ninja. It is responsible for opening,
closing, creating the database (.bndb) files, and is used to keep track of undoable actions.
"""
_associated_data = {}
def __init__(self, filename:Optional[str]=None, handle:Optional[core.BNFileMetadataHandle]=None):
"""
Instantiates a new FileMetadata class.
:param str filename: The string path to the file to be opened. Defaults to None.
:param handle: A handle to the underlying C FileMetadata object. Defaults to None.
"""
if handle is not None:
_type = core.BNFileMetadataHandle
_handle = ctypes.cast(handle, _type)
else:
binaryninja._init_plugins()
_handle = core.BNCreateFileMetadata()
if filename is not None:
core.BNSetFilename(_handle, str(filename))
self._nav:Optional[NavigationHandler] = None
assert _handle is not None
self.handle = _handle
def __repr__(self):
return f"<FileMetadata: {self.filename}>"
def __del__(self):
if core is not None:
if self.navigation is not None:
core.BNSetFileMetadataNavigationHandler(self.handle, None)
core.BNFreeFileMetadata(self.handle)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return ctypes.addressof(self.handle.contents) == ctypes.addressof(other.handle.contents)
def __ne__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return not (self == other)
def __hash__(self):
return hash(ctypes.addressof(self.handle.contents))
@property
def nav(self) -> Optional[NavigationHandler]:
return self._nav
@nav.setter
def nav(self, value:NavigationHandler) -> None:
self._nav = value
@classmethod
def _unregister(cls, f):
handle = ctypes.cast(f, ctypes.c_void_p)
if handle.value in cls._associated_data:
del cls._associated_data[handle.value]
@staticmethod
def set_default_session_data(name:str, value:Any) -> None:
_FileMetadataAssociatedDataStore.set_default(name, value)
@property
def original_filename(self) -> str:
"""The original name of the binary opened if a bndb, otherwise reads or sets the current filename (read/write)"""
return core.BNGetOriginalFilename(self.handle)
@original_filename.setter
def original_filename(self, value:str) -> None:
core.BNSetOriginalFilename(self.handle, str(value))
@property
def filename(self) -> str:
"""The name of the open bndb or binary filename (read/write)"""
return core.BNGetFilename(self.handle)
@filename.setter
def filename(self, value:str) -> None:
core.BNSetFilename(self.handle, str(value))
@property
def modified(self) -> bool:
"""Boolean result of whether the file is modified (Inverse of 'saved' property) (read/write)"""
return core.BNIsFileModified(self.handle)
@modified.setter
def modified(self, value:bool) -> None:
if value:
core.BNMarkFileModified(self.handle)
else:
core.BNMarkFileSaved(self.handle)
@property
def analysis_changed(self) -> bool:
"""Boolean result of whether the auto-analysis results have changed (read-only)"""
return core.BNIsAnalysisChanged(self.handle)
@property
def has_database(self, binary_view_type:ViewName="") -> bool:
"""Whether the FileMetadata is backed by a database, or if specified, a specific BinaryViewType (read-only)"""
return core.BNIsBackedByDatabase(self.handle, binary_view_type)
@property
def view(self) -> ViewName:
return core.BNGetCurrentView(self.handle)
@view.setter
def view(self, value:ViewName) -> None:
core.BNNavigate(self.handle, str(value), core.BNGetCurrentOffset(self.handle))
@property
def offset(self) -> int:
"""The current offset into the file (read/write)"""
return core.BNGetCurrentOffset(self.handle)
@offset.setter
def offset(self, value:int) -> None:
core.BNNavigate(self.handle, core.BNGetCurrentView(self.handle), value)
@property
def raw(self) -> Optional['binaryview.BinaryView']:
"""Gets the "Raw" BinaryView of the file"""
view = core.BNGetFileViewOfType(self.handle, "Raw")
if view is None:
return None
return binaryview.BinaryView(file_metadata = self, handle = view)
@property
def database(self) -> Optional['database.Database']:
"""Gets the backing Database of the file"""
handle = core.BNGetFileMetadataDatabase(self.handle)
if handle is None:
return None
return database.Database(handle=handle)
@property
def saved(self) -> bool:
"""Boolean result of whether the file has been saved (Inverse of 'modified' property) (read/write)"""
return not core.BNIsFileModified(self.handle)
@saved.setter
def saved(self, value:bool) -> None:
if value:
core.BNMarkFileSaved(self.handle)
else:
core.BNMarkFileModified(self.handle)
@property
def navigation(self) -> Optional[NavigationHandler]:
"""Alias for nav"""
return self._nav
@navigation.setter
def navigation(self, value:NavigationHandler) -> None:
value._register(self.handle)
self._nav = value
@property
def session_data(self) -> Any:
"""Dictionary object where plugins can store arbitrary data associated with the file"""
handle = ctypes.cast(self.handle, ctypes.c_void_p) # type: ignore
if handle.value not in FileMetadata._associated_data:
obj = _FileMetadataAssociatedDataStore()
FileMetadata._associated_data[handle.value] = obj
return obj
else:
return FileMetadata._associated_data[handle.value]
@property
def snapshot_data_applied_without_error(self) -> bool:
return core.BNIsSnapshotDataAppliedWithoutError(self.handle)
def close(self) -> None:
"""
Closes the underlying file handle. It is recommended that this is done in a
`finally` clause to avoid handle leaks.
"""
core.BNCloseFile(self.handle)
def begin_undo_actions(self) -> None:
"""
``begin_undo_actions`` start recording actions taken so the can be undone at some point.
:rtype: None
:Example:
>>> bv.get_disassembly(0x100012f1)
'xor eax, eax'
>>> bv.begin_undo_actions()
>>> bv.convert_to_nop(0x100012f1)
True
>>> bv.commit_undo_actions()
>>> bv.get_disassembly(0x100012f1)
'nop'
>>> bv.undo()
>>> bv.get_disassembly(0x100012f1)
'xor eax, eax'
>>>
"""
core.BNBeginUndoActions(self.handle)
def commit_undo_actions(self) -> None:
"""
``commit_undo_actions`` commit the actions taken since the last commit to the undo database.
:rtype: None
:Example:
>>> bv.get_disassembly(0x100012f1)
'xor eax, eax'
>>> bv.begin_undo_actions()
>>> bv.convert_to_nop(0x100012f1)
True
>>> bv.commit_undo_actions()
>>> bv.get_disassembly(0x100012f1)
'nop'
>>> bv.undo()
>>> bv.get_disassembly(0x100012f1)
'xor eax, eax'
>>>
"""
core.BNCommitUndoActions(self.handle)
def undo(self) -> None:
"""
``undo`` undo the last committed action in the undo database.
:rtype: None
:Example:
>>> bv.get_disassembly(0x100012f1)
'xor eax, eax'
>>> bv.begin_undo_actions()
>>> bv.convert_to_nop(0x100012f1)
True
>>> bv.commit_undo_actions()
>>> bv.get_disassembly(0x100012f1)
'nop'
>>> bv.undo()
>>> bv.get_disassembly(0x100012f1)
'xor eax, eax'
>>> bv.redo()
>>> bv.get_disassembly(0x100012f1)
'nop'
>>>
"""
core.BNUndo(self.handle)
def redo(self) -> None:
"""
``redo`` redo the last committed action in the undo database.
:rtype: None
:Example:
>>> bv.get_disassembly(0x100012f1)
'xor eax, eax'
>>> bv.begin_undo_actions()
>>> bv.convert_to_nop(0x100012f1)
True
>>> bv.commit_undo_actions()
>>> bv.get_disassembly(0x100012f1)
'nop'
>>> bv.undo()
>>> bv.get_disassembly(0x100012f1)
'xor eax, eax'
>>> bv.redo()
>>> bv.get_disassembly(0x100012f1)
'nop'
>>>
"""
core.BNRedo(self.handle)
def navigate(self, view:ViewName, offset:int) -> bool:
"""
``navigate`` navigates the UI to the specified virtual address
.. note:: Despite the confusing name, ``view`` in this context is not a BinaryView but rather a string describing the different UI Views. Check :py:attr:`view` while in different views to see examples such as ``Linear:ELF``, ``Graph:PE``.
:param str view: virtual address to read from.
:param int offset: address to navigate to
:return: whether or not navigation succeeded
:rtype: bool
:Example:
>>> import random
>>> bv.navigate(bv.view, random.choice(list(bv.functions)).start)
True
"""
return core.BNNavigate(self.handle, str(view), offset)
def create_database(self, filename:str, progress_func:Optional[ProgressFuncType]= None, settings:SaveSettings=None):
"""
``create_database`` writes the current database (.bndb) out to the specified file.
:param str filename: path and filename to write the bndb to, this string `should` have ".bndb" appended to it.
:param callback progress_func: optional function to be called with the current progress and total count.
:param SaveSettings settings: optional argument for special save options.
:return: true on success, false on failure
:rtype: bool
:Example:
>>> settings = SaveSettings()
>>> bv.file.create_database(f"{bv.file.filename}.bndb", None, settings)
True
"""
_settings = None
if settings is not None:
_settings = settings.handle
assert self.raw is not None, "BinaryView.create_database called when raw view is None"
if progress_func is None:
return core.BNCreateDatabase(self.raw.handle, str(filename), _settings)
else:
_progress_func = progress_func
return core.BNCreateDatabaseWithProgress(self.raw.handle, str(filename), None,
ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_ulonglong, ctypes.c_ulonglong)(
lambda ctxt, cur, total: _progress_func(cur, total)), settings)
def open_existing_database(self, filename:str, progress_func:Callable[[int, int], bool]=None):
if progress_func is None:
view = core.BNOpenExistingDatabase(self.handle, str(filename))
else:
view = core.BNOpenExistingDatabaseWithProgress(self.handle, str(filename), None,
ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_ulonglong, ctypes.c_ulonglong)(
lambda ctxt, cur, total: progress_func(cur, total)))
if view is None:
return None
return binaryview.BinaryView(file_metadata = self, handle = view)
def open_database_for_configuration(self, filename:str) -> Optional['binaryview.BinaryView']:
view = core.BNOpenDatabaseForConfiguration(self.handle, str(filename))
if view is None:
return None
return binaryview.BinaryView(file_metadata = self, handle = view)
def save_auto_snapshot(self, progress_func:Optional[ProgressFuncType]=None, settings:SaveSettings=None):
_settings = None
if settings is not None:
_settings = settings.handle
assert self.raw is not None, "BinaryView.save_auto_snapshot called when raw view is None"
if progress_func is None:
return core.BNSaveAutoSnapshot(self.raw.handle, _settings)
else:
_progress_func = progress_func
return core.BNSaveAutoSnapshotWithProgress(self.raw.handle, None,
ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_ulonglong, ctypes.c_ulonglong)(
lambda ctxt, cur, total: _progress_func(cur, total)), _settings)
def merge_user_analysis(self, path:str, progress_func:ProgressFuncType):
return core.BNMergeUserAnalysis(self.handle, str(path), None,
ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_ulonglong, ctypes.c_ulonglong)(
lambda ctxt, cur, total: progress_func(cur, total)))
def get_view_of_type(self, name:str) -> Optional['binaryview.BinaryView']:
view = core.BNGetFileViewOfType(self.handle, str(name))
if view is None:
view_type = core.BNGetBinaryViewTypeByName(str(name))
if view_type is None:
return None
assert self.raw is not None, "BinaryView.save_auto_snapshot called when raw view is None"
view = core.BNCreateBinaryViewOfType(view_type, self.raw.handle)
if view is None:
return None
return binaryview.BinaryView(file_metadata = self, handle = view)
def open_project(self) -> bool:
return core.BNOpenProject(self.handle)
def close_project(self) -> None:
core.BNCloseProject(self.handle)
def is_project_open(self) -> bool:
return core.BNIsProjectOpen(self.handle)
@property
def existing_views(self) -> List[ViewName]:
length = ctypes.c_ulonglong()
result = core.BNGetExistingViews(self.handle, ctypes.byref(length))
assert result is not None, "core.BNGetExistingViews returned None"
views = []
for i in range(length.value):
views.append(result[i].decode("utf-8"))
core.BNFreeStringList(result, length)
return views | 32.128205 | 241 | 0.733931 |
acf3bf305a6a51584405af670a800e5f2dae8c78 | 2,562 | py | Python | pyxlpr/ppocr/metrics/rec_metric.py | XLPRUtils/pyUtils | 3a62c14b0658ad3c24d83f953ee0d88530b02b23 | [
"Apache-2.0"
] | null | null | null | pyxlpr/ppocr/metrics/rec_metric.py | XLPRUtils/pyUtils | 3a62c14b0658ad3c24d83f953ee0d88530b02b23 | [
"Apache-2.0"
] | null | null | null | pyxlpr/ppocr/metrics/rec_metric.py | XLPRUtils/pyUtils | 3a62c14b0658ad3c24d83f953ee0d88530b02b23 | [
"Apache-2.0"
] | null | null | null | # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import Levenshtein
import string
class RecMetric(object):
def __init__(self, main_indicator='acc', is_filter=False, **kwargs):
self.main_indicator = main_indicator
self.is_filter = is_filter
self.reset()
def _normalize_text(self, text):
text = ''.join(
filter(lambda x: x in (string.digits + string.ascii_letters), text))
return text.lower()
def __call__(self, pred_label, *args, **kwargs):
preds, labels = pred_label
correct_num = 0
all_num = 0
norm_edit_dis = 0.0
for (pred, pred_conf), (target, _) in zip(preds, labels):
pred = pred.replace(" ", "")
target = target.replace(" ", "")
if self.is_filter:
pred = self._normalize_text(pred)
target = self._normalize_text(target)
norm_edit_dis += Levenshtein.distance(pred, target) / max(
len(pred), len(target), 1)
if pred == target:
correct_num += 1
all_num += 1
self.correct_num += correct_num
self.all_num += all_num
self.norm_edit_dis += norm_edit_dis
return {
'acc': correct_num / all_num,
'norm_edit_dis': 1 - norm_edit_dis / (all_num + 1e-3)
}
def get_metric(self):
"""
return metrics {
'acc': 0,
'norm_edit_dis': 0,
}
"""
acc = 1.0 * self.correct_num / (self.all_num + 1e-3)
norm_edit_dis = 1 - self.norm_edit_dis / (self.all_num + 1e-3)
self.reset()
return {'acc': acc, 'norm_edit_dis': norm_edit_dis}
def reset(self):
self.correct_num = 0
self.all_num = 0
self.norm_edit_dis = 0
@classmethod
def eval(cls, preds, labels):
preds = [(x, 1) for x in preds]
labels = [(x, 1) for x in labels]
return cls()([preds, labels])
| 33.710526 | 80 | 0.593286 |
acf3c0d22228dbb7a8d85a092abbde00f7b75231 | 3,240 | py | Python | pytorch_tdnn/tdnn.py | ishine/pytorch-tdnn | 97672f0ea3419ae22fbf56baef5e60fc7e352130 | [
"Apache-2.0"
] | 22 | 2020-12-16T01:49:20.000Z | 2021-12-10T05:24:11.000Z | pytorch_tdnn/tdnn.py | ishine/pytorch-tdnn | 97672f0ea3419ae22fbf56baef5e60fc7e352130 | [
"Apache-2.0"
] | null | null | null | pytorch_tdnn/tdnn.py | ishine/pytorch-tdnn | 97672f0ea3419ae22fbf56baef5e60fc7e352130 | [
"Apache-2.0"
] | 4 | 2020-12-16T02:47:04.000Z | 2021-11-29T08:07:15.000Z | # This implementation is based on: https://github.com/jonasvdd/TDNN
import torch
from torch.nn.utils import weight_norm
class TDNN(torch.nn.Module):
def __init__(self,
input_dim: int,
output_dim: int,
context: list,
bias: bool = True):
"""
Implementation of TDNN using the dilation argument of the PyTorch Conv1d class
Due to its fastness the context has gained two constraints:
* The context must be symmetric
* The context must have equal spacing between each consecutive element
The context can either be of size 2 (e.g. {-1,1} or {-3,3}, like for TDNN-F), or an
of odd length with 0 in the middle and equal spacing on either side.
For example: the context {-3, -2, 0, +2, +3} is not valid since it doesn't
have equal spacing; The context {-6, -3, 0, 3, 6} is both symmetric and has
an equal spacing, this is considered valid.
:param input_dim: The number of input channels
:param output_dim: The number of channels produced by the temporal convolution
:param context: The temporal context
"""
super(TDNN, self).__init__()
self.input_dim = input_dim
self.output_dim = output_dim
context = sorted(context)
self.check_valid_context(context)
kernel_size = len(context)
if len(context) == 1:
dilation = 1
padding = 0
else:
delta = [context[i] - context[i - 1] for i in range(1, len(context))]
dilation = delta[0]
padding = max(context)
self.temporal_conv = weight_norm(
torch.nn.Conv1d(
input_dim,
output_dim,
kernel_size=kernel_size,
dilation=dilation,
padding=padding,
bias=bias # will be set to False for semi-orthogonal TDNNF convolutions
))
def forward(self, x):
"""
:param x: is one batch of data, x.size(): [batch_size, input_dim, in_seq_length]
sequence length is the dimension of the arbitrary length data
:return: [batch_size, output_dim, out_seq_length ]
"""
return self.temporal_conv(x)
@staticmethod
def check_valid_context(context: list) -> None:
"""
Check whether the context is symmetrical and whether the passed
context can be used for creating a convolution kernel with dil
:param context: The context of the model, must be of length 2 or odd, with
equal spacing.
"""
assert len(context) == 2 or len(context) % 2 != 0, "Context length must be 2 or odd"
if len(context) == 2:
assert context[0] + context[1] == 0, "Context must be of type {-1, 1}"
else:
assert context[len(context) // 2] == 0, "The context contain 0 in the center"
if len(context) > 1:
delta = [context[i] - context[i - 1] for i in range(1, len(context))]
assert all(delta[0] == delta[i] for i in range(1, len(delta))), \
"Intra context spacing must be equal!"
| 41.538462 | 92 | 0.582099 |
acf3c0e67300dd8567d8e27273a33d0d9f6479e4 | 12,421 | py | Python | test/test_it.py | huihui7987/pattern | d25511f9ca7ed9356b801d8663b8b5168464e68f | [
"BSD-3-Clause"
] | 6,201 | 2015-01-01T17:40:43.000Z | 2022-03-30T21:28:15.000Z | test/test_it.py | WZBSocialScienceCenter/patternlite | 99271c8f20afdc3ae3f05246c43100dc00604e3f | [
"BSD-3-Clause"
] | 199 | 2015-01-03T10:24:13.000Z | 2022-03-14T12:53:34.000Z | test/test_it.py | WZBSocialScienceCenter/patternlite | 99271c8f20afdc3ae3f05246c43100dc00604e3f | [
"BSD-3-Clause"
] | 1,537 | 2015-01-07T06:45:24.000Z | 2022-03-31T07:30:03.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from builtins import str, bytes, dict, int
from builtins import map, zip, filter
from builtins import object, range
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
import unittest
import subprocess
from pattern import it
from io import open
try:
PATH = os.path.dirname(os.path.realpath(__file__))
except:
PATH = ""
#---------------------------------------------------------------------------------------------------
class TestInflection(unittest.TestCase):
def setUp(self):
pass
def test_article(self):
# Assert definite and indefinite article inflection.
for a, n, g in (
("il" , "giorno" , it.M),
("l'" , "altro giorno", it.M),
("lo" , "zio" , it.M),
("l'" , "amica" , it.F),
("la" , "nouva amica" , it.F),
("i" , "giapponesi" , it.M + it.PL),
("gli", "italiani" , it.M + it.PL),
("gli", "zii" , it.M + it.PL),
("le" , "zie" , it.F + it.PL)):
v = it.article(n, "definite", gender=g)
self.assertEqual(a, v)
for a, n, g in (
("uno", "zio" , it.M),
("una", "zia" , it.F),
("un" , "amico", it.M),
("un'", "amica", it.F)):
v = it.article(n, "indefinite", gender=g)
self.assertEqual(a, v)
v = it.referenced("amica", gender="f")
self.assertEqual(v, "un'amica")
print("pattern.it.article()")
print("pattern.it.referenced()")
def test_gender(self):
# Assert the accuracy of the gender disambiguation algorithm.
from pattern.db import Datasheet
i, n = 0, 0
for pos, sg, pl, mf in Datasheet.load(os.path.join(PATH, "corpora", "wordforms-it-wiktionary.csv")):
g = it.gender(sg)
if mf in g and it.PLURAL not in g:
i += 1
g = it.gender(pl)
if mf in g and it.PLURAL in g:
i += 1
n += 2
self.assertTrue(float(i) / n > 0.92)
print("pattern.it.gender()")
def test_pluralize(self):
# Assert the accuracy of the pluralization algorithm.
from pattern.db import Datasheet
i, n = 0, 0
for pos, sg, pl, mf in Datasheet.load(os.path.join(PATH, "corpora", "wordforms-it-wiktionary.csv")):
if it.pluralize(sg) == pl:
i += 1
n += 1
self.assertTrue(float(i) / n > 0.93)
print("pattern.it.pluralize()")
def test_singularize(self):
# Assert the accuracy of the singularization algorithm.
from pattern.db import Datasheet
i, n = 0, 0
for pos, sg, pl, mf in Datasheet.load(os.path.join(PATH, "corpora", "wordforms-it-wiktionary.csv")):
if it.singularize(pl) == sg:
i += 1
n += 1
self.assertTrue(float(i) / n > 0.84)
print("pattern.it.singularize()")
def test_predicative(self):
# Assert the accuracy of the predicative algorithm ("cruciali" => "cruciale").
from pattern.db import Datasheet
i, n = 0, 0
for pos, sg, pl, mf in Datasheet.load(os.path.join(PATH, "corpora", "wordforms-it-wiktionary.csv")):
if pos != "j":
continue
if it.predicative(pl) == sg:
i += 1
n += 1
self.assertTrue(float(i) / n > 0.87)
print("pattern.it.predicative()")
def test_find_lemma(self):
# Assert the accuracy of the verb lemmatization algorithm.
i, n = 0, 0
r = 0
for v1, v2 in it.inflect.verbs.inflections.items():
if it.inflect.verbs.find_lemma(v1) == v2:
i += 1
n += 1
self.assertTrue(float(i) / n > 0.81)
print("pattern.it.inflect.verbs.find_lemma()")
def test_find_lexeme(self):
# Assert the accuracy of the verb conjugation algorithm.
i, n = 0, 0
for v, lexeme1 in it.inflect.verbs.infinitives.items():
lexeme2 = it.inflect.verbs.find_lexeme(v)
for j in range(len(lexeme2)):
if lexeme1[j] == lexeme2[j]:
i += 1
n += 1
self.assertTrue(float(i) / n > 0.89)
print("pattern.it.inflect.verbs.find_lexeme()")
def test_conjugate(self):
# Assert different tenses with different conjugations.
for (v1, v2, tense) in (
("essere", "essere", it.INFINITIVE),
("essere", "sono", (it.PRESENT, 1, it.SINGULAR)),
("essere", "sei", (it.PRESENT, 2, it.SINGULAR)),
("essere", "è", (it.PRESENT, 3, it.SINGULAR)),
("essere", "siamo", (it.PRESENT, 1, it.PLURAL)),
("essere", "siete", (it.PRESENT, 2, it.PLURAL)),
("essere", "sono", (it.PRESENT, 3, it.PLURAL)),
("essere", "essendo", (it.PRESENT + it.PARTICIPLE)),
("essere", "stato", (it.PAST + it.PARTICIPLE)),
("essere", "ero", (it.IMPERFECT, 1, it.SINGULAR)),
("essere", "eri", (it.IMPERFECT, 2, it.SINGULAR)),
("essere", "era", (it.IMPERFECT, 3, it.SINGULAR)),
("essere", "eravamo", (it.IMPERFECT, 1, it.PLURAL)),
("essere", "eravate", (it.IMPERFECT, 2, it.PLURAL)),
("essere", "erano", (it.IMPERFECT, 3, it.PLURAL)),
("essere", "fui", (it.PRETERITE, 1, it.SINGULAR)),
("essere", "fosti", (it.PRETERITE, 2, it.SINGULAR)),
("essere", "fu", (it.PRETERITE, 3, it.SINGULAR)),
("essere", "fummo", (it.PRETERITE, 1, it.PLURAL)),
("essere", "foste", (it.PRETERITE, 2, it.PLURAL)),
("essere", "furono", (it.PRETERITE, 3, it.PLURAL)),
("essere", "sarei", (it.CONDITIONAL, 1, it.SINGULAR)),
("essere", "saresti", (it.CONDITIONAL, 2, it.SINGULAR)),
("essere", "sarebbe", (it.CONDITIONAL, 3, it.SINGULAR)),
("essere", "saremmo", (it.CONDITIONAL, 1, it.PLURAL)),
("essere", "sareste", (it.CONDITIONAL, 2, it.PLURAL)),
("essere", "sarebbero", (it.CONDITIONAL, 3, it.PLURAL)),
("essere", "sarò", (it.FUTURE, 1, it.SINGULAR)),
("essere", "sarai", (it.FUTURE, 2, it.SINGULAR)),
("essere", "sarà", (it.FUTURE, 3, it.SINGULAR)),
("essere", "saremo", (it.FUTURE, 1, it.PLURAL)),
("essere", "sarete", (it.FUTURE, 2, it.PLURAL)),
("essere", "saranno", (it.FUTURE, 3, it.PLURAL)),
("essere", "sii", (it.PRESENT, 2, it.SINGULAR, it.IMPERATIVE)),
("essere", "sia", (it.PRESENT, 3, it.SINGULAR, it.IMPERATIVE)),
("essere", "siamo", (it.PRESENT, 1, it.PLURAL, it.IMPERATIVE)),
("essere", "siate", (it.PRESENT, 2, it.PLURAL, it.IMPERATIVE)),
("essere", "siano", (it.PRESENT, 3, it.PLURAL, it.IMPERATIVE)),
("essere", "sia", (it.PRESENT, 1, it.SINGULAR, it.SUBJUNCTIVE)),
("essere", "sia", (it.PRESENT, 2, it.SINGULAR, it.SUBJUNCTIVE)),
("essere", "sia", (it.PRESENT, 3, it.SINGULAR, it.SUBJUNCTIVE)),
("essere", "siamo", (it.PRESENT, 1, it.PLURAL, it.SUBJUNCTIVE)),
("essere", "siate", (it.PRESENT, 2, it.PLURAL, it.SUBJUNCTIVE)),
("essere", "siano", (it.PRESENT, 3, it.PLURAL, it.SUBJUNCTIVE)),
("essere", "fossi", (it.PAST, 1, it.SINGULAR, it.SUBJUNCTIVE)),
("essere", "fossi", (it.PAST, 2, it.SINGULAR, it.SUBJUNCTIVE)),
("essere", "fosse", (it.PAST, 3, it.SINGULAR, it.SUBJUNCTIVE)),
("essere", "fossimo", (it.PAST, 1, it.PLURAL, it.SUBJUNCTIVE)),
("essere", "foste", (it.PAST, 2, it.PLURAL, it.SUBJUNCTIVE)),
("essere", "fossero", (it.PAST, 3, it.PLURAL, it.SUBJUNCTIVE))):
self.assertEqual(it.conjugate(v1, tense), v2)
print("pattern.it.conjugate()")
def test_lexeme(self):
# Assert all inflections of "essere".
v = it.lexeme("essere")
self.assertEqual(v, [
'essere', 'sono', 'sei', 'è', 'siamo', 'siete', 'essendo',
'fui', 'fosti', 'fu', 'fummo', 'foste', 'furono', 'stato',
'ero', 'eri', 'era', 'eravamo', 'eravate', 'erano',
'sarò', 'sarai', 'sarà', 'saremo', 'sarete', 'saranno',
'sarei', 'saresti', 'sarebbe', 'saremmo', 'sareste', 'sarebbero',
'sii', 'sia', 'siate', 'siano',
'fossi', 'fosse', 'fossimo', 'fossero'
])
print("pattern.it.inflect.lexeme()")
def test_tenses(self):
# Assert tense recognition.
self.assertTrue((it.PRESENT, 3, it.SG) in it.tenses("è"))
self.assertTrue("2sg" in it.tenses("sei"))
print("pattern.it.tenses()")
#---------------------------------------------------------------------------------------------------
class TestParser(unittest.TestCase):
def setUp(self):
pass
def test_find_lemmata(self):
# Assert lemmata for nouns, adjectives, verbs and determiners.
v = it.parser.find_lemmata([
["I", "DT"], ["gatti", "NNS"], ["neri", "JJ"],
["seduti", "VB"], ["sul", "IN"], ["tatami", "NN"]])
self.assertEqual(v, [
["I", "DT", "il"],
["gatti", "NNS", "gatto"],
["neri", "JJ", "nero"],
["seduti", "VB", "sedutare"],
["sul", "IN", "sul"],
["tatami", "NN", "tatami"]])
print("pattern.it.parser.find_lemmata()")
def test_parse(self):
# Assert parsed output with Penn Treebank II tags (slash-formatted).
# "il gatto nero" is a noun phrase, "sulla stuoia" is a prepositional noun phrase.
v = it.parser.parse("Il gatto nero seduto sulla stuoia.")
self.assertEqual(v,
"Il/DT/B-NP/O gatto/NN/I-NP/O nero/JJ/I-NP/O " +
"seduto/VB/B-VP/O " + \
"sulla/IN/B-PP/B-PNP stuoia/NN/B-NP/I-PNP ././O/O"
)
# Assert the accuracy of the Italian tagger.
i, n = 0, 0
for sentence in open(os.path.join(PATH, "corpora", "tagged-it-wacky.txt")).readlines():
sentence = sentence.strip()
s1 = [w.split("/") for w in sentence.split(" ")]
s2 = [[w for w, pos in s1]]
s2 = it.parse(s2, tokenize=False)
s2 = [w.split("/") for w in s2.split(" ")]
for j in range(len(s1)):
t1 = s1[j][1]
t2 = s2[j][1]
# WaCKy test set tags plural nouns as "NN", pattern.it as "NNS".
# Some punctuation marks are also tagged differently,
# but these are not necessarily errors.
if t1 == t2 or (t1 == "NN" and t2.startswith("NN")) or s1[j][0] in "\":;)-":
i += 1
n += 1
#print(float(i) / n)
self.assertTrue(float(i) / n > 0.92)
print("pattern.it.parser.parse()")
def test_tag(self):
# Assert [("il", "DT"), ("gatto", "NN"), ("nero", "JJ")].
v = it.tag("il gatto nero")
self.assertEqual(v, [("il", "DT"), ("gatto", "NN"), ("nero", "JJ")])
print("pattern.it.tag()")
def test_command_line(self):
# Assert parsed output from the command-line (example from the documentation).
p = ["python", "-m", "pattern.it", "-s", "Il gatto nero.", "-OTCRL"]
p = subprocess.Popen(p, stdout=subprocess.PIPE)
p.wait()
v = p.stdout.read().decode('utf-8')
v = v.strip()
self.assertEqual(v, "Il/DT/B-NP/O/O/il gatto/NN/I-NP/O/O/gatto nero/JJ/I-NP/O/O/nero ././O/O/O/.")
print("python -m pattern.it")
#---------------------------------------------------------------------------------------------------
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestInflection))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestParser))
return suite
if __name__ == "__main__":
result = unittest.TextTestRunner(verbosity=1).run(suite())
sys.exit(not result.wasSuccessful())
| 42.683849 | 108 | 0.508252 |
acf3c114548c08e174edbc006c32371d3478010b | 25,854 | py | Python | python/pyspark/sql/context.py | MeethuM/spark | 8136810dfad12008ac300116df7bc8448740f1ae | [
"Apache-2.0"
] | null | null | null | python/pyspark/sql/context.py | MeethuM/spark | 8136810dfad12008ac300116df7bc8448740f1ae | [
"Apache-2.0"
] | null | null | null | python/pyspark/sql/context.py | MeethuM/spark | 8136810dfad12008ac300116df7bc8448740f1ae | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import warnings
import json
if sys.version >= '3':
basestring = unicode = str
else:
from itertools import imap as map
from py4j.protocol import Py4JError
from py4j.java_collections import MapConverter
from pyspark.rdd import RDD, _prepare_for_python_RDD, ignore_unicode_prefix
from pyspark.serializers import AutoBatchedSerializer, PickleSerializer
from pyspark.sql.types import Row, StringType, StructType, _verify_type, \
_infer_schema, _has_nulltype, _merge_type, _create_converter, _python_to_sql_converter
from pyspark.sql.dataframe import DataFrame
try:
import pandas
has_pandas = True
except ImportError:
has_pandas = False
__all__ = ["SQLContext", "HiveContext", "UDFRegistration"]
def _monkey_patch_RDD(sqlContext):
def toDF(self, schema=None, sampleRatio=None):
"""
Converts current :class:`RDD` into a :class:`DataFrame`
This is a shorthand for ``sqlContext.createDataFrame(rdd, schema, sampleRatio)``
:param schema: a StructType or list of names of columns
:param samplingRatio: the sample ratio of rows used for inferring
:return: a DataFrame
>>> rdd.toDF().collect()
[Row(name=u'Alice', age=1)]
"""
return sqlContext.createDataFrame(self, schema, sampleRatio)
RDD.toDF = toDF
class SQLContext(object):
"""Main entry point for Spark SQL functionality.
A SQLContext can be used create :class:`DataFrame`, register :class:`DataFrame` as
tables, execute SQL over tables, cache tables, and read parquet files.
:param sparkContext: The :class:`SparkContext` backing this SQLContext.
:param sqlContext: An optional JVM Scala SQLContext. If set, we do not instantiate a new
SQLContext in the JVM, instead we make all calls to this object.
"""
@ignore_unicode_prefix
def __init__(self, sparkContext, sqlContext=None):
"""Creates a new SQLContext.
>>> from datetime import datetime
>>> sqlContext = SQLContext(sc)
>>> allTypes = sc.parallelize([Row(i=1, s="string", d=1.0, l=1,
... b=True, list=[1, 2, 3], dict={"s": 0}, row=Row(a=1),
... time=datetime(2014, 8, 1, 14, 1, 5))])
>>> df = allTypes.toDF()
>>> df.registerTempTable("allTypes")
>>> sqlContext.sql('select i+1, d+1, not b, list[1], dict["s"], time, row.a '
... 'from allTypes where b and i > 0').collect()
[Row(c0=2, c1=2.0, c2=False, c3=2, c4=0, time=datetime.datetime(2014, 8, 1, 14, 1, 5), a=1)]
>>> df.map(lambda x: (x.i, x.s, x.d, x.l, x.b, x.time, x.row.a, x.list)).collect()
[(1, u'string', 1.0, 1, True, datetime.datetime(2014, 8, 1, 14, 1, 5), 1, [1, 2, 3])]
"""
self._sc = sparkContext
self._jsc = self._sc._jsc
self._jvm = self._sc._jvm
self._scala_SQLContext = sqlContext
_monkey_patch_RDD(self)
@property
def _ssql_ctx(self):
"""Accessor for the JVM Spark SQL context.
Subclasses can override this property to provide their own
JVM Contexts.
"""
if self._scala_SQLContext is None:
self._scala_SQLContext = self._jvm.SQLContext(self._jsc.sc())
return self._scala_SQLContext
def setConf(self, key, value):
"""Sets the given Spark SQL configuration property.
"""
self._ssql_ctx.setConf(key, value)
def getConf(self, key, defaultValue):
"""Returns the value of Spark SQL configuration property for the given key.
If the key is not set, returns defaultValue.
"""
return self._ssql_ctx.getConf(key, defaultValue)
@property
def udf(self):
"""Returns a :class:`UDFRegistration` for UDF registration."""
return UDFRegistration(self)
@ignore_unicode_prefix
def registerFunction(self, name, f, returnType=StringType()):
"""Registers a lambda function as a UDF so it can be used in SQL statements.
In addition to a name and the function itself, the return type can be optionally specified.
When the return type is not given it default to a string and conversion will automatically
be done. For any other return type, the produced object must match the specified type.
:param name: name of the UDF
:param samplingRatio: lambda function
:param returnType: a :class:`DataType` object
>>> sqlContext.registerFunction("stringLengthString", lambda x: len(x))
>>> sqlContext.sql("SELECT stringLengthString('test')").collect()
[Row(c0=u'4')]
>>> from pyspark.sql.types import IntegerType
>>> sqlContext.registerFunction("stringLengthInt", lambda x: len(x), IntegerType())
>>> sqlContext.sql("SELECT stringLengthInt('test')").collect()
[Row(c0=4)]
>>> from pyspark.sql.types import IntegerType
>>> sqlContext.udf.register("stringLengthInt", lambda x: len(x), IntegerType())
>>> sqlContext.sql("SELECT stringLengthInt('test')").collect()
[Row(c0=4)]
"""
func = lambda _, it: map(lambda x: f(*x), it)
ser = AutoBatchedSerializer(PickleSerializer())
command = (func, None, ser, ser)
pickled_cmd, bvars, env, includes = _prepare_for_python_RDD(self._sc, command, self)
self._ssql_ctx.udf().registerPython(name,
bytearray(pickled_cmd),
env,
includes,
self._sc.pythonExec,
bvars,
self._sc._javaAccumulator,
returnType.json())
def _inferSchema(self, rdd, samplingRatio=None):
first = rdd.first()
if not first:
raise ValueError("The first row in RDD is empty, "
"can not infer schema")
if type(first) is dict:
warnings.warn("Using RDD of dict to inferSchema is deprecated,"
"please use pyspark.sql.Row instead")
if samplingRatio is None:
schema = _infer_schema(first)
if _has_nulltype(schema):
for row in rdd.take(100)[1:]:
schema = _merge_type(schema, _infer_schema(row))
if not _has_nulltype(schema):
break
else:
raise ValueError("Some of types cannot be determined by the "
"first 100 rows, please try again with sampling")
else:
if samplingRatio < 0.99:
rdd = rdd.sample(False, float(samplingRatio))
schema = rdd.map(_infer_schema).reduce(_merge_type)
return schema
@ignore_unicode_prefix
def inferSchema(self, rdd, samplingRatio=None):
"""::note: Deprecated in 1.3, use :func:`createDataFrame` instead.
"""
warnings.warn("inferSchema is deprecated, please use createDataFrame instead")
if isinstance(rdd, DataFrame):
raise TypeError("Cannot apply schema to DataFrame")
return self.createDataFrame(rdd, None, samplingRatio)
@ignore_unicode_prefix
def applySchema(self, rdd, schema):
"""::note: Deprecated in 1.3, use :func:`createDataFrame` instead.
"""
warnings.warn("applySchema is deprecated, please use createDataFrame instead")
if isinstance(rdd, DataFrame):
raise TypeError("Cannot apply schema to DataFrame")
if not isinstance(schema, StructType):
raise TypeError("schema should be StructType, but got %s" % type(schema))
return self.createDataFrame(rdd, schema)
@ignore_unicode_prefix
def createDataFrame(self, data, schema=None, samplingRatio=None):
"""
Creates a :class:`DataFrame` from an :class:`RDD` of :class:`tuple`/:class:`list`,
list or :class:`pandas.DataFrame`.
When ``schema`` is a list of column names, the type of each column
will be inferred from ``data``.
When ``schema`` is ``None``, it will try to infer the schema (column names and types)
from ``data``, which should be an RDD of :class:`Row`,
or :class:`namedtuple`, or :class:`dict`.
If schema inference is needed, ``samplingRatio`` is used to determined the ratio of
rows used for schema inference. The first row will be used if ``samplingRatio`` is ``None``.
:param data: an RDD of :class:`Row`/:class:`tuple`/:class:`list`/:class:`dict`,
:class:`list`, or :class:`pandas.DataFrame`.
:param schema: a :class:`StructType` or list of column names. default None.
:param samplingRatio: the sample ratio of rows used for inferring
>>> l = [('Alice', 1)]
>>> sqlContext.createDataFrame(l).collect()
[Row(_1=u'Alice', _2=1)]
>>> sqlContext.createDataFrame(l, ['name', 'age']).collect()
[Row(name=u'Alice', age=1)]
>>> d = [{'name': 'Alice', 'age': 1}]
>>> sqlContext.createDataFrame(d).collect()
[Row(age=1, name=u'Alice')]
>>> rdd = sc.parallelize(l)
>>> sqlContext.createDataFrame(rdd).collect()
[Row(_1=u'Alice', _2=1)]
>>> df = sqlContext.createDataFrame(rdd, ['name', 'age'])
>>> df.collect()
[Row(name=u'Alice', age=1)]
>>> from pyspark.sql import Row
>>> Person = Row('name', 'age')
>>> person = rdd.map(lambda r: Person(*r))
>>> df2 = sqlContext.createDataFrame(person)
>>> df2.collect()
[Row(name=u'Alice', age=1)]
>>> from pyspark.sql.types import *
>>> schema = StructType([
... StructField("name", StringType(), True),
... StructField("age", IntegerType(), True)])
>>> df3 = sqlContext.createDataFrame(rdd, schema)
>>> df3.collect()
[Row(name=u'Alice', age=1)]
>>> sqlContext.createDataFrame(df.toPandas()).collect() # doctest: +SKIP
[Row(name=u'Alice', age=1)]
"""
if isinstance(data, DataFrame):
raise TypeError("data is already a DataFrame")
if has_pandas and isinstance(data, pandas.DataFrame):
if schema is None:
schema = list(data.columns)
data = [r.tolist() for r in data.to_records(index=False)]
if not isinstance(data, RDD):
try:
# data could be list, tuple, generator ...
rdd = self._sc.parallelize(data)
except Exception:
raise TypeError("cannot create an RDD from type: %s" % type(data))
else:
rdd = data
if schema is None:
schema = self._inferSchema(rdd, samplingRatio)
converter = _create_converter(schema)
rdd = rdd.map(converter)
if isinstance(schema, (list, tuple)):
first = rdd.first()
if not isinstance(first, (list, tuple)):
raise TypeError("each row in `rdd` should be list or tuple, "
"but got %r" % type(first))
row_cls = Row(*schema)
schema = self._inferSchema(rdd.map(lambda r: row_cls(*r)), samplingRatio)
# take the first few rows to verify schema
rows = rdd.take(10)
# Row() cannot been deserialized by Pyrolite
if rows and isinstance(rows[0], tuple) and rows[0].__class__.__name__ == 'Row':
rdd = rdd.map(tuple)
rows = rdd.take(10)
for row in rows:
_verify_type(row, schema)
# convert python objects to sql data
converter = _python_to_sql_converter(schema)
rdd = rdd.map(converter)
jrdd = self._jvm.SerDeUtil.toJavaArray(rdd._to_java_object_rdd())
df = self._ssql_ctx.applySchemaToPythonRDD(jrdd.rdd(), schema.json())
return DataFrame(df, self)
def registerDataFrameAsTable(self, df, tableName):
"""Registers the given :class:`DataFrame` as a temporary table in the catalog.
Temporary tables exist only during the lifetime of this instance of :class:`SQLContext`.
>>> sqlContext.registerDataFrameAsTable(df, "table1")
"""
if (df.__class__ is DataFrame):
self._ssql_ctx.registerDataFrameAsTable(df._jdf, tableName)
else:
raise ValueError("Can only register DataFrame as table")
def parquetFile(self, *paths):
"""Loads a Parquet file, returning the result as a :class:`DataFrame`.
>>> import tempfile, shutil
>>> parquetFile = tempfile.mkdtemp()
>>> shutil.rmtree(parquetFile)
>>> df.saveAsParquetFile(parquetFile)
>>> df2 = sqlContext.parquetFile(parquetFile)
>>> sorted(df.collect()) == sorted(df2.collect())
True
"""
gateway = self._sc._gateway
jpaths = gateway.new_array(gateway.jvm.java.lang.String, len(paths))
for i in range(0, len(paths)):
jpaths[i] = paths[i]
jdf = self._ssql_ctx.parquetFile(jpaths)
return DataFrame(jdf, self)
def jsonFile(self, path, schema=None, samplingRatio=1.0):
"""Loads a text file storing one JSON object per line as a :class:`DataFrame`.
If the schema is provided, applies the given schema to this JSON dataset.
Otherwise, it samples the dataset with ratio ``samplingRatio`` to determine the schema.
>>> import tempfile, shutil
>>> jsonFile = tempfile.mkdtemp()
>>> shutil.rmtree(jsonFile)
>>> with open(jsonFile, 'w') as f:
... f.writelines(jsonStrings)
>>> df1 = sqlContext.jsonFile(jsonFile)
>>> df1.printSchema()
root
|-- field1: long (nullable = true)
|-- field2: string (nullable = true)
|-- field3: struct (nullable = true)
| |-- field4: long (nullable = true)
>>> from pyspark.sql.types import *
>>> schema = StructType([
... StructField("field2", StringType()),
... StructField("field3",
... StructType([StructField("field5", ArrayType(IntegerType()))]))])
>>> df2 = sqlContext.jsonFile(jsonFile, schema)
>>> df2.printSchema()
root
|-- field2: string (nullable = true)
|-- field3: struct (nullable = true)
| |-- field5: array (nullable = true)
| | |-- element: integer (containsNull = true)
"""
if schema is None:
df = self._ssql_ctx.jsonFile(path, samplingRatio)
else:
scala_datatype = self._ssql_ctx.parseDataType(schema.json())
df = self._ssql_ctx.jsonFile(path, scala_datatype)
return DataFrame(df, self)
@ignore_unicode_prefix
def jsonRDD(self, rdd, schema=None, samplingRatio=1.0):
"""Loads an RDD storing one JSON object per string as a :class:`DataFrame`.
If the schema is provided, applies the given schema to this JSON dataset.
Otherwise, it samples the dataset with ratio ``samplingRatio`` to determine the schema.
>>> df1 = sqlContext.jsonRDD(json)
>>> df1.first()
Row(field1=1, field2=u'row1', field3=Row(field4=11, field5=None), field6=None)
>>> df2 = sqlContext.jsonRDD(json, df1.schema)
>>> df2.first()
Row(field1=1, field2=u'row1', field3=Row(field4=11, field5=None), field6=None)
>>> from pyspark.sql.types import *
>>> schema = StructType([
... StructField("field2", StringType()),
... StructField("field3",
... StructType([StructField("field5", ArrayType(IntegerType()))]))
... ])
>>> df3 = sqlContext.jsonRDD(json, schema)
>>> df3.first()
Row(field2=u'row1', field3=Row(field5=None))
"""
def func(iterator):
for x in iterator:
if not isinstance(x, basestring):
x = unicode(x)
if isinstance(x, unicode):
x = x.encode("utf-8")
yield x
keyed = rdd.mapPartitions(func)
keyed._bypass_serializer = True
jrdd = keyed._jrdd.map(self._jvm.BytesToString())
if schema is None:
df = self._ssql_ctx.jsonRDD(jrdd.rdd(), samplingRatio)
else:
scala_datatype = self._ssql_ctx.parseDataType(schema.json())
df = self._ssql_ctx.jsonRDD(jrdd.rdd(), scala_datatype)
return DataFrame(df, self)
def load(self, path=None, source=None, schema=None, **options):
"""Returns the dataset in a data source as a :class:`DataFrame`.
The data source is specified by the ``source`` and a set of ``options``.
If ``source`` is not specified, the default data source configured by
``spark.sql.sources.default`` will be used.
Optionally, a schema can be provided as the schema of the returned DataFrame.
"""
if path is not None:
options["path"] = path
if source is None:
source = self.getConf("spark.sql.sources.default",
"org.apache.spark.sql.parquet")
joptions = MapConverter().convert(options,
self._sc._gateway._gateway_client)
if schema is None:
df = self._ssql_ctx.load(source, joptions)
else:
if not isinstance(schema, StructType):
raise TypeError("schema should be StructType")
scala_datatype = self._ssql_ctx.parseDataType(schema.json())
df = self._ssql_ctx.load(source, scala_datatype, joptions)
return DataFrame(df, self)
def createExternalTable(self, tableName, path=None, source=None,
schema=None, **options):
"""Creates an external table based on the dataset in a data source.
It returns the DataFrame associated with the external table.
The data source is specified by the ``source`` and a set of ``options``.
If ``source`` is not specified, the default data source configured by
``spark.sql.sources.default`` will be used.
Optionally, a schema can be provided as the schema of the returned :class:`DataFrame` and
created external table.
"""
if path is not None:
options["path"] = path
if source is None:
source = self.getConf("spark.sql.sources.default",
"org.apache.spark.sql.parquet")
joptions = MapConverter().convert(options,
self._sc._gateway._gateway_client)
if schema is None:
df = self._ssql_ctx.createExternalTable(tableName, source, joptions)
else:
if not isinstance(schema, StructType):
raise TypeError("schema should be StructType")
scala_datatype = self._ssql_ctx.parseDataType(schema.json())
df = self._ssql_ctx.createExternalTable(tableName, source, scala_datatype,
joptions)
return DataFrame(df, self)
@ignore_unicode_prefix
def sql(self, sqlQuery):
"""Returns a :class:`DataFrame` representing the result of the given query.
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> df2 = sqlContext.sql("SELECT field1 AS f1, field2 as f2 from table1")
>>> df2.collect()
[Row(f1=1, f2=u'row1'), Row(f1=2, f2=u'row2'), Row(f1=3, f2=u'row3')]
"""
return DataFrame(self._ssql_ctx.sql(sqlQuery), self)
def table(self, tableName):
"""Returns the specified table as a :class:`DataFrame`.
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> df2 = sqlContext.table("table1")
>>> sorted(df.collect()) == sorted(df2.collect())
True
"""
return DataFrame(self._ssql_ctx.table(tableName), self)
@ignore_unicode_prefix
def tables(self, dbName=None):
"""Returns a :class:`DataFrame` containing names of tables in the given database.
If ``dbName`` is not specified, the current database will be used.
The returned DataFrame has two columns: ``tableName`` and ``isTemporary``
(a column with :class:`BooleanType` indicating if a table is a temporary one or not).
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> df2 = sqlContext.tables()
>>> df2.filter("tableName = 'table1'").first()
Row(tableName=u'table1', isTemporary=True)
"""
if dbName is None:
return DataFrame(self._ssql_ctx.tables(), self)
else:
return DataFrame(self._ssql_ctx.tables(dbName), self)
def tableNames(self, dbName=None):
"""Returns a list of names of tables in the database ``dbName``.
If ``dbName`` is not specified, the current database will be used.
>>> sqlContext.registerDataFrameAsTable(df, "table1")
>>> "table1" in sqlContext.tableNames()
True
>>> "table1" in sqlContext.tableNames("db")
True
"""
if dbName is None:
return [name for name in self._ssql_ctx.tableNames()]
else:
return [name for name in self._ssql_ctx.tableNames(dbName)]
def cacheTable(self, tableName):
"""Caches the specified table in-memory."""
self._ssql_ctx.cacheTable(tableName)
def uncacheTable(self, tableName):
"""Removes the specified table from the in-memory cache."""
self._ssql_ctx.uncacheTable(tableName)
def clearCache(self):
"""Removes all cached tables from the in-memory cache. """
self._ssql_ctx.clearCache()
class HiveContext(SQLContext):
"""A variant of Spark SQL that integrates with data stored in Hive.
Configuration for Hive is read from ``hive-site.xml`` on the classpath.
It supports running both SQL and HiveQL commands.
:param sparkContext: The SparkContext to wrap.
:param hiveContext: An optional JVM Scala HiveContext. If set, we do not instantiate a new
:class:`HiveContext` in the JVM, instead we make all calls to this object.
"""
def __init__(self, sparkContext, hiveContext=None):
SQLContext.__init__(self, sparkContext)
if hiveContext:
self._scala_HiveContext = hiveContext
@property
def _ssql_ctx(self):
try:
if not hasattr(self, '_scala_HiveContext'):
self._scala_HiveContext = self._get_hive_ctx()
return self._scala_HiveContext
except Py4JError as e:
raise Exception("You must build Spark with Hive. "
"Export 'SPARK_HIVE=true' and run "
"build/sbt assembly", e)
def _get_hive_ctx(self):
return self._jvm.HiveContext(self._jsc.sc())
def refreshTable(self, tableName):
"""Invalidate and refresh all the cached the metadata of the given
table. For performance reasons, Spark SQL or the external data source
library it uses might cache certain metadata about a table, such as the
location of blocks. When those change outside of Spark SQL, users should
call this function to invalidate the cache.
"""
self._ssql_ctx.refreshTable(tableName)
class UDFRegistration(object):
"""Wrapper for user-defined function registration."""
def __init__(self, sqlContext):
self.sqlContext = sqlContext
def register(self, name, f, returnType=StringType()):
return self.sqlContext.registerFunction(name, f, returnType)
register.__doc__ = SQLContext.registerFunction.__doc__
def _test():
import doctest
from pyspark.context import SparkContext
from pyspark.sql import Row, SQLContext
import pyspark.sql.context
globs = pyspark.sql.context.__dict__.copy()
sc = SparkContext('local[4]', 'PythonTest')
globs['sc'] = sc
globs['sqlContext'] = SQLContext(sc)
globs['rdd'] = rdd = sc.parallelize(
[Row(field1=1, field2="row1"),
Row(field1=2, field2="row2"),
Row(field1=3, field2="row3")]
)
globs['df'] = rdd.toDF()
jsonStrings = [
'{"field1": 1, "field2": "row1", "field3":{"field4":11}}',
'{"field1" : 2, "field3":{"field4":22, "field5": [10, 11]},'
'"field6":[{"field7": "row2"}]}',
'{"field1" : null, "field2": "row3", '
'"field3":{"field4":33, "field5": []}}'
]
globs['jsonStrings'] = jsonStrings
globs['json'] = sc.parallelize(jsonStrings)
(failure_count, test_count) = doctest.testmod(
pyspark.sql.context, globs=globs,
optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs['sc'].stop()
if failure_count:
exit(-1)
if __name__ == "__main__":
_test()
| 40.333853 | 100 | 0.605206 |
acf3c1c4e602d9b1203994330ba5bdcab59b8c2d | 2,085 | py | Python | tests/gui/settings_test.py | sumau/PredictCode | e2a2d5a8fa5d83f011c33e18d4ce6ac7e1429aa8 | [
"Artistic-2.0"
] | 18 | 2017-04-19T09:17:19.000Z | 2021-05-24T08:53:28.000Z | tests/gui/settings_test.py | sumau/PredictCode | e2a2d5a8fa5d83f011c33e18d4ce6ac7e1429aa8 | [
"Artistic-2.0"
] | 8 | 2017-06-11T17:46:35.000Z | 2021-06-07T10:49:10.000Z | tests/gui/settings_test.py | sumau/PredictCode | e2a2d5a8fa5d83f011c33e18d4ce6ac7e1429aa8 | [
"Artistic-2.0"
] | 10 | 2017-07-19T18:29:37.000Z | 2020-11-12T22:06:45.000Z | import pytest
import open_cp.gui.settings as settings
import tests.helpers as helpers
from unittest.mock import patch
import json, logging, io
@pytest.fixture
def setts():
return settings.Settings("temp.json")
def test_add_get(setts):
setts["key"] = "value1"
setts["key5"] = "value2"
assert( setts["key"] == "value1" )
assert( setts["key5"] == "value2" )
assert( "key" in setts )
assert( set(setts.values()) == {"value1", "value2"} )
def test_logging():
logger = logging.getLogger("open_cp.gui.settings")
logger.setLevel(logging.DEBUG)
stream = io.StringIO()
ch = logging.StreamHandler(stream)
logger.addHandler(ch)
settings.Settings()
log = stream.getvalue().split("\n")
assert log[0].startswith("Using filename '")
assert log[0].endswith("open_cp_ui_settings.json'")
def test_filename():
s = settings.Settings("bob.json")
assert s.filename == "bob.json"
def test_save_settings():
capture = helpers.StrIOWrapper()
with patch("builtins.open", helpers.MockOpen(capture)) as open_mock:
open_mock.filter = helpers.ExactlyTheseFilter([2])
sett = settings.Settings("temp.json")
sett["name"] = "matt"
sett.save()
assert json.loads(capture.data) == {"name":"matt"}
def test_load_settings():
with patch("builtins.open", helpers.MockOpen("{\"stuff\":\"value00\"}")):
sett = settings.Settings()
assert sett["stuff"] == "value00"
assert set(sett.keys()) == {"stuff"}
def test_context():
capture = helpers.StrIOWrapper("{\"stuff\":\"value00\"}")
with patch("builtins.open", helpers.MockOpen(capture)) as open_mock:
open_mock.filter = helpers.FilenameFilter("test.json")
with settings.Settings("test.json") as sett:
assert set(sett.keys()) == {"stuff"}
assert sett["stuff"] == "value00"
sett["stuff"] = "value"
sett["name"] = "matt"
assert json.loads(capture.data) == {"name":"matt", "stuff":"value"}
| 30.217391 | 77 | 0.614868 |
acf3c1ed65a37decf3e80c27651c809f04a7917b | 418 | py | Python | hudapobuda/home/migrations/0014_metasettings_share_twitter_text.py | danesjenovdan/hudapobuda | 7c63d95ed21df2d08909a02a25ef61b8ffc5cd01 | [
"Unlicense"
] | null | null | null | hudapobuda/home/migrations/0014_metasettings_share_twitter_text.py | danesjenovdan/hudapobuda | 7c63d95ed21df2d08909a02a25ef61b8ffc5cd01 | [
"Unlicense"
] | null | null | null | hudapobuda/home/migrations/0014_metasettings_share_twitter_text.py | danesjenovdan/hudapobuda | 7c63d95ed21df2d08909a02a25ef61b8ffc5cd01 | [
"Unlicense"
] | null | null | null | # Generated by Django 3.2.3 on 2021-05-17 11:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0013_metasettings_share_email_text'),
]
operations = [
migrations.AddField(
model_name='metasettings',
name='share_twitter_text',
field=models.TextField(blank=True, null=True),
),
]
| 22 | 58 | 0.62201 |
acf3c25eefc6acb4ab99e3c28a104dccbca101dd | 1,070 | py | Python | HW6/covid19_tweets.py | punithpatil/buff-overflow | e6f6652b6c7c8ac8682c25f735dc117eafe5adde | [
"MIT"
] | 1 | 2020-05-13T20:22:02.000Z | 2020-05-13T20:22:02.000Z | HW6/covid19_tweets.py | priya8/buff-overflow | e6f6652b6c7c8ac8682c25f735dc117eafe5adde | [
"MIT"
] | 1 | 2020-04-14T01:06:02.000Z | 2020-04-14T01:06:02.000Z | HW6/covid19_tweets.py | priya8/buff-overflow | e6f6652b6c7c8ac8682c25f735dc117eafe5adde | [
"MIT"
] | 2 | 2020-04-08T16:03:25.000Z | 2020-05-13T20:21:43.000Z | import cred
import json
import requests
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import datetime
keywords = ['covid19','ncov','corona','coronavirus','covid2019']
language = ['en']
def print_coordinates(coordinates):
print(coordinates)
class StdOutListener(StreamListener):
def on_data(self, data):
tweet_data = json.loads(data)
try:
place=tweet_data['place']
if place!=None:
coordinates=place['bounding_box']['coordinates']
print_coordinates(coordinates)
except KeyError:
print_coordinates("\n")
return True
# Prints the reason for an error to your console
def on_error(self, status):
print(status)
if __name__ == '__main__':
listener = StdOutListener()
auth = OAuthHandler(cred.consumer_key, cred.consumer_secret)
auth.set_access_token(cred.access_token, cred.access_secret)
stream = Stream(auth, listener)
stream.filter(track=keywords, languages=language)
| 27.435897 | 64 | 0.68785 |
acf3c37e5b3346ffa5f8f945d871a5b9d2ee01ce | 1,601 | py | Python | BB/bbObjects/bounties/bountyBoards/bountyBoard.py | Morgenkroete/GOF2BountyBot | b4fe3d765b764ab169284ce0869a810825013389 | [
"MIT"
] | 6 | 2020-06-09T16:36:52.000Z | 2021-02-02T17:53:44.000Z | BB/bbObjects/bounties/bountyBoards/bountyBoard.py | Morgenkroete/GOF2BountyBot | b4fe3d765b764ab169284ce0869a810825013389 | [
"MIT"
] | 138 | 2020-08-02T11:20:34.000Z | 2020-12-15T15:55:11.000Z | BB/bbObjects/bounties/bountyBoards/bountyBoard.py | Morgenkroete/GOF2BountyBot | b4fe3d765b764ab169284ce0869a810825013389 | [
"MIT"
] | 6 | 2020-07-05T05:32:16.000Z | 2020-11-01T21:58:31.000Z | # CURRENTLY UNUSED FILE
from __future__ import annotations
from ....bbDatabases import bbBountyDB
from ....baseClasses import bbSerializable
class BountyBoard(bbSerializable.bbSerializable):
"""A single message that acts as a duplicate of the output of $bounties, except it is continuously updated with new and completed bounties.
:var msgID: The id of the message to continuously update
:vartype msgID: int
:var bountiesDB: The database to pull active bounties from
:vartype bountiesDB: bbBountyDB
"""
def __init__(self, msgID : int, bountiesDB : bbBountyDB.bbBounty):
"""
:param int msgID: The id of the message to continuously update
:param bbBountyDB bountiesDB: The database to pull active bounties from
"""
self.msgID = msgID
self.bountiesDB = bountiesDB
def toDict(self, **kwargs) -> dict:
"""Serialise this BountyBoard into dictionary format for saving to file
:return: A dictionary containing all data needed to reload this BountyBoard
:rtype: dict
"""
return {"msgID": self.msgID}
@classmethod
def fromDict(bountyBoardDict : dict, **kwargs) -> BountyBoard:
"""Factory function constructing a BountyBoard from the data contained in the given dictionary. The opposite of BountyBoard.toDict
:param dict bountyBoardDict: A dict containing all information needed to reconstruct the desired BountyBoard
:return: The new BountyBoard object
:rtype: BountyBoard
"""
return BountyBoard(bountyBoardDict["msgID"]) | 38.119048 | 143 | 0.700187 |
acf3c4813ff8446698fa89b9a2bc47aeb2984e01 | 1,494 | py | Python | 01-DesenvolvimentoDeSistemas/02-LinguagensDeProgramacao/01-Python/01-ListaDeExercicios/02-Aluno/Roberto/exc0094.py | moacirsouza/nadas | ad98d73b4281d1581fd2b2a9d29001acb426ee56 | [
"MIT"
] | 1 | 2020-07-03T13:54:18.000Z | 2020-07-03T13:54:18.000Z | 01-DesenvolvimentoDeSistemas/02-LinguagensDeProgramacao/01-Python/01-ListaDeExercicios/02-Aluno/Roberto/exc0094.py | moacirsouza/nadas | ad98d73b4281d1581fd2b2a9d29001acb426ee56 | [
"MIT"
] | null | null | null | 01-DesenvolvimentoDeSistemas/02-LinguagensDeProgramacao/01-Python/01-ListaDeExercicios/02-Aluno/Roberto/exc0094.py | moacirsouza/nadas | ad98d73b4281d1581fd2b2a9d29001acb426ee56 | [
"MIT"
] | null | null | null | print("""
094) Crie um programa que leia nome, sexo e idade de várias pessoas, guardando os dados de cada pessoa em um dicionário e todos os dicionários em uma lista. No final, mostre:
A) Quantas pessoas cadastradas
B) A média de idade
C) Uma lista com mulheres
D) Uma lista com idade acima da média
""")
galera = list()
pessoa = dict()
soma = média = 0
while True:
pessoa.clear()
pessoa['nome'] = str(input('Nome: '))
while True:
pessoa['sexo'] = str(input('Sexo: [M/F] ')).upper()[0]
if pessoa['sexo'] in 'MF':
break
print('ERRO! Por favor, digite apenas M ou F. ')
pessoa['idade'] = int(input('Idade: '))
soma += pessoa['idade']
galera.append(pessoa.copy())
while True:
resp = str(input('Quer continuar? [S/N] ')).upper()[0]
if resp in 'SN':
break
print('ERRO! Responda apenas S ou N. ')
if resp == 'N':
break
print('-= * 30')
print(galera)
print(f'A) Ao todo temos {len(galera)} pessoas cadastradas.')
média = soma / len(galera)
print(f'B) A média de idade é de {média:5.2f} anos. ')
print(' C) As mulheres cadastradas foram ', end='')
for p in galera:
if p['sexo'] == 'F':
print(f'{p["nome"]} ', end='')
print()
print('D) Lista das pessoas que estão acima da média: ', end='')
for p in galera:
if p['idade'] >= média:
print(' ')
for k, v in p.items():
print(f'{k} = {v}; ', end='')
print()
print('<< ENCERRADO >>')
| 30.489796 | 174 | 0.575636 |
acf3c4d8b90ebe3e2a322b4687de9fb83fe1abdf | 9,556 | py | Python | davos/implementations/ipython_common.py | jeremymanning/davos | 4d6465ab9f6cf98dab927c25e426cf9998da7126 | [
"MIT"
] | 18 | 2021-03-03T01:02:25.000Z | 2022-03-21T16:27:14.000Z | davos/implementations/ipython_common.py | jeremymanning/davos | 4d6465ab9f6cf98dab927c25e426cf9998da7126 | [
"MIT"
] | 51 | 2021-03-03T01:03:17.000Z | 2022-03-28T23:28:38.000Z | davos/implementations/ipython_common.py | jeremymanning/davos | 4d6465ab9f6cf98dab927c25e426cf9998da7126 | [
"MIT"
] | 3 | 2021-03-04T07:09:26.000Z | 2022-03-02T04:32:32.000Z | """
This modules contains implementations of helper functions common across
all IPython versions and front-end interfaces.
"""
import sys
import textwrap
from contextlib import redirect_stdout
from io import StringIO
from pathlib import Path
from subprocess import CalledProcessError
from IPython.utils.process import system as _run_shell_cmd
from davos import config
from davos.core.exceptions import DavosParserError
def _check_conda_avail_helper():
"""
Check whether the `conda` executable is available.
`IPython` implementation of the helper function for
`davos.core.core.check_conda`. Runs a command shell (`conda list
IPython`) whose stdout contains the path to the current conda
environment, which can be parsed by the main `check_conda` function.
Uses the `%conda` IPython line magic, if available (usually if
`IPython>=7.3`). Otherwise, looks for `conda` history file and runs
some logic to determine whether -- if the `conda` exe is available,
whether `davos` is running from the base environment or not, and if
not, passes the `--prefix` to the command. If successful, returns
the (suppressed) stdout generated by the command. Otherwise, returns
`None`.
Returns
-------
str or None
If the command runs successfully, the captured stdout.
Otherwise, `None`.
See Also
--------
davos.core.core.check_conda : core function that calls this helper.
"""
if 'conda' in set(config._ipython_shell.magics_manager.magics['line']):
# if the %conda line magic is available (IPython>=7.3), use that
# directly
try:
with redirect_stdout(StringIO()) as conda_list_output:
config._ipython_shell.run_line_magic('conda', 'list IPython')
except ValueError:
# kernel is not running within a conda environment
return None
else:
conda_history_path = Path(sys.prefix, "conda-meta", "history")
# if conda history file doesn't exist at this location, davos
# isn't running in a conda environment
if not conda_history_path.is_file():
return None
cmd = "conda list IPython"
# location we'd expect to find conda executable if davos is
# running in the 'base' environment (and no prefix is needed)
base_exe_loc = Path(sys.executable).parent.joinpath('conda')
if not base_exe_loc.is_file():
cmd += f" --prefix {sys.prefix}"
with redirect_stdout(StringIO()) as conda_list_output:
_run_shell_cmd(cmd)
return conda_list_output.getvalue()
def _run_shell_command_helper(command):
"""
Run a shell command in a subprocess, piping stdout & stderr.
`IPython` implementation of helper function for
`davos.core.core.run_shell_command`. stdout & stderr streams are
captured or suppressed by the outer function. If the command runs
successfully, return its exit status (`0`). Otherwise, raise an
error.
Parameters
----------
command : str
The command to execute.
Returns
-------
int
The exit code of the command. This will always be `0` if the
function returns. Otherwise, an error is raised.
Raises
------
subprocess.CalledProcessError :
If the command returned a non-zero exit status.
See Also
--------
IPython.utils.process.system : `IPython` shell command runner.
"""
retcode = _run_shell_cmd(command)
if retcode != 0:
raise CalledProcessError(returncode=retcode, cmd=command)
def _set_custom_showsyntaxerror():
"""
Overload the `IPython` shell's `.showsyntaxerror()` method.
Replaces the global `IPython` interactive shell object's
`.showsyntaxerror()` method with a custom function that allows
`davos`-native exceptions raised during the pre-execution cell
parsing phase to display a full traceback. Also:
- updates the custom function's docstring to include the
original `.showsyntaxerror()` method's docstring and
explicitly note that the method was updated by `davos`
- stores a reference to the original `.showsyntaxerror()` method
in the `davos.config` object so it can be called from the
custom version
- binds the custom function to the interactive shell object
*instance* so it implicitly receives the instance as its first
argument when called (like a normal instance method)
See Also
-------
davos.implementations.ipython_common._showsyntaxerror_davos :
The custom `.showsyntaxerror()` method set by `davos`.
IPython.core.interactiveshell.InteractiveShell.showsyntaxerror :
The original, overloaded `.showsyntaxerror()` method.
Notes
-----
Runs exactly once when `davos` is imported and initialized in an
`IPython` environment, and takes no action if run again. This
prevents overwriting the reference to the original
`.showsyntaxerror()` method stored in the `davos.config` object.
"""
if config._ipy_showsyntaxerror_orig is not None:
# function has already been called
return
ipy_shell = config.ipython_shell
new_doc = textwrap.dedent(f"""\
{' METHOD UPDATED BY DAVOS PACKAGE '.center(72, '=')}
{textwrap.indent(_showsyntaxerror_davos.__doc__, ' ')}
{' ORIGINAL DOCSTRING: '.center(72, '=')}
{ipy_shell.showsyntaxerror.__doc__}""")
_showsyntaxerror_davos.__doc__ = new_doc
config._ipy_showsyntaxerror_orig = ipy_shell.showsyntaxerror
# bind function as method
# pylint: disable=no-value-for-parameter
# (pylint bug: expects __get__ method to take same args as function)
ipy_shell.showsyntaxerror = _showsyntaxerror_davos.__get__(ipy_shell,
type(ipy_shell))
# noinspection PyUnusedLocal
def _showsyntaxerror_davos(
ipy_shell,
filename=None,
running_compiled_code=False # pylint: disable=unused-argument
):
"""
Show `davos` library `SyntaxError` subclasses with full tracebacks.
Replaces global IPython interactive shell object's
`.showsyntaxerror()` method during initialization as a way to hook
into `IPython`'s exception handling machinery for errors raised
during the pre-execution cell parsing phase.
Because cell content is parsed as text rather than actually executed
during this stage, the only exceptions `IPython` expects input
transformers (such as the `davos` parser) to raise are
`SyntaxError`s. Thus, all `davos` library exceptions that may be
raised by the parser inherit from `SyntaxError`). And because
`IPython` assumes any `SyntaxError`s raised during parsing were
caused by issues with the cell content itself, it expects their
stack traces to comprise only a single frame, and displays them in a
format that does not include a full traceback. This function
excludes `davos` library errors from this behavior, and displays
them in full using the standard, more readable & informative format.
Parameters
----------
ipy_shell : IPython.core.interactiveshell.InteractiveShell
The global `IPython` shell instance. Because the function is
bound as a method of the shell instance, this is passed
implicitly (i.e., equivalent to `self`).
filename : str, optional
The name of the file the `SyntaxError` occurred in. If `None`
(default), the name of the cell's entry in `linecache.cache`
will be used.
running_compiled_code : bool, optional
Whether the `SyntaxError` occurred while running compiled code
(see **Notes** below).
See Also
--------
davos.implementations.ipython_common._set_custom_showsyntaxerror :
Replaces the `.showsyntaxerror()` method with this function.
IPython.core.compilerop.code_name :
Generates unique names for each cell used in `linecache.cache`.
IPython.core.interactiveshell.InteractiveShell.showsyntaxerror :
The original `.showsyntaxerror()` method this function replaces.
Notes
-----
The `running_compiled_code` argument was added in `IPython` 6.1.0,
and setting it to `True` accomplishes (something close to) the same
thing this workaround does. However, since `davos` needs to support
`IPython` versions back to v5.5.0 (which is used by Colab), we can't
rely on it being available.
"""
etype, value, tb = ipy_shell._get_exc_info()
if issubclass(etype, DavosParserError):
try:
# noinspection PyBroadException
try:
# display custom traceback, if class supports it
stb = value._render_traceback_()
except Exception: # pylint: disable=broad-except
stb = ipy_shell.InteractiveTB.structured_traceback(
etype, value, tb, tb_offset=ipy_shell.InteractiveTB.tb_offset
)
ipy_shell._showtraceback(etype, value, stb)
if ipy_shell.call_pdb:
ipy_shell.debugger(force=True)
except KeyboardInterrupt:
print('\n' + ipy_shell.get_exception_only(), file=sys.stderr)
return None
# original method is stored in Davos instance, but still bound
# IPython.core.interactiveshell.InteractiveShell instance
return config._ipy_showsyntaxerror_orig(filename=filename)
| 38.688259 | 81 | 0.681457 |
acf3c5851104bbeaba913a2dcad085c83e3c7ce9 | 832 | py | Python | tests/test_tyco130.py | atollk/flake8-typing-collections | 5046007ac4efa357f888f7fcc00876c02d627605 | [
"MIT"
] | 2 | 2021-02-19T00:33:33.000Z | 2021-02-19T10:57:01.000Z | tests/test_tyco130.py | atollk/flake8-typing-collections | 5046007ac4efa357f888f7fcc00876c02d627605 | [
"MIT"
] | null | null | null | tests/test_tyco130.py | atollk/flake8-typing-collections | 5046007ac4efa357f888f7fcc00876c02d627605 | [
"MIT"
] | null | null | null | from tests.util import BaseTest
class Test_TYCO130(BaseTest):
@classmethod
def flags(cls):
return ["--tyco_generic_alt"]
def test_pass_1(self):
code = """
import typing
def foo(x: typing.OrderedDict):
...
"""
result = self.run_flake8(code)
assert result == []
def test_fail_1(self):
code = """
import collections
def foo(x: collections.OrderedDict):
...
"""
result = self.run_flake8(code)
self.assert_error_at(result, "TYCO130", 2, 12)
def test_fail_2(self):
code = """
from collections import OrderedDict
def foo(x) -> OrderedDict:
...
"""
result = self.run_flake8(code)
self.assert_error_at(result, "TYCO130", 2, 15)
| 23.771429 | 54 | 0.538462 |
acf3c68600ce6c1e1331f9a35eaeb6c845c929c5 | 8,371 | py | Python | python/paddle/fluid/contrib/slim/tests/test_quantization_scale_pass.py | SmirnovKol/Paddle | a3730dc87bc61593514b830727e36e5d19e753cd | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/contrib/slim/tests/test_quantization_scale_pass.py | SmirnovKol/Paddle | a3730dc87bc61593514b830727e36e5d19e753cd | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/contrib/slim/tests/test_quantization_scale_pass.py | SmirnovKol/Paddle | a3730dc87bc61593514b830727e36e5d19e753cd | [
"Apache-2.0"
] | null | null | null | # copyright (c) 2018 paddlepaddle authors. all rights reserved.
#
# licensed under the apache license, version 2.0 (the "license");
# you may not use this file except in compliance with the license.
# you may obtain a copy of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis,
# without warranties or conditions of any kind, either express or implied.
# see the license for the specific language governing permissions and
# limitations under the license.
import os
import unittest
import random
import numpy as np
import six
import paddle.fluid as fluid
import paddle
from paddle.fluid.framework import IrGraph
from paddle.fluid.contrib.slim.quantization import QuantizationTransformPass
from paddle.fluid.contrib.slim.quantization import QuantizationFreezePass
from paddle.fluid.contrib.slim.quantization import OutScaleForTrainingPass
from paddle.fluid.contrib.slim.quantization import OutScaleForInferencePass
from paddle.fluid.contrib.slim.quantization import AddQuantDequantPass
from paddle.fluid import core
paddle.enable_static()
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
os.environ["CPU_NUM"] = "1"
def conv_net(img, label):
conv_pool_1 = fluid.nets.simple_img_conv_pool(input=img,
filter_size=5,
num_filters=20,
pool_size=2,
pool_stride=2,
pool_type='max',
act="relu")
conv_pool_1 = fluid.layers.batch_norm(conv_pool_1)
conv_pool_2 = fluid.nets.simple_img_conv_pool(input=conv_pool_1,
filter_size=5,
num_filters=50,
pool_size=2,
pool_stride=2,
pool_type='avg',
act="relu")
hidden = fluid.layers.fc(input=conv_pool_2, size=100, act='relu')
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
avg_loss = fluid.layers.mean(loss)
return avg_loss
class TestQuantizationScalePass(unittest.TestCase):
def quantization_scale(self,
use_cuda,
seed,
activation_quant_type,
weight_quant_type='abs_max',
for_ci=False):
def build_program(main, startup, is_test):
main.random_seed = seed
startup.random_seed = seed
with fluid.unique_name.guard():
with fluid.program_guard(main, startup):
img = fluid.layers.data(name='image',
shape=[1, 28, 28],
dtype='float32')
label = fluid.layers.data(name='label',
shape=[1],
dtype='int64')
loss = conv_net(img, label)
if not is_test:
opt = fluid.optimizer.Adam(learning_rate=0.0001)
opt.minimize(loss)
return [img, label], loss
random.seed(0)
np.random.seed(0)
main = fluid.Program()
startup = fluid.Program()
test_program = fluid.Program()
feeds, loss = build_program(main, startup, False)
build_program(test_program, startup, True)
test_program = test_program.clone(for_test=True)
main_graph = IrGraph(core.Graph(main.desc), for_test=False)
test_graph = IrGraph(core.Graph(test_program.desc), for_test=True)
place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
exe = fluid.Executor(place)
scope = fluid.Scope()
with fluid.scope_guard(scope):
exe.run(startup)
transform_pass = QuantizationTransformPass(
scope=scope,
place=place,
activation_quantize_type=activation_quant_type,
weight_quantize_type=weight_quant_type)
transform_pass.apply(main_graph)
transform_pass.apply(test_graph)
add_quant_dequant_pass = AddQuantDequantPass(scope=scope, place=place)
add_quant_dequant_pass.apply(main_graph)
add_quant_dequant_pass.apply(test_graph)
scale_training_pass = OutScaleForTrainingPass(scope=scope, place=place)
scale_training_pass.apply(main_graph)
dev_name = '_gpu' if use_cuda else '_cpu'
if not for_ci:
marked_nodes = set()
for op in main_graph.all_op_nodes():
if op.name().find('quantize') > -1:
marked_nodes.add(op)
main_graph.draw('.', 'main_scale' + dev_name, marked_nodes)
marked_nodes = set()
for op in test_graph.all_op_nodes():
if op.name().find('quantize') > -1:
marked_nodes.add(op)
test_graph.draw('.', 'test_scale' + dev_name, marked_nodes)
build_strategy = fluid.BuildStrategy()
build_strategy.memory_optimize = False
build_strategy.enable_inplace = False
build_strategy.fuse_all_reduce_ops = False
binary = fluid.CompiledProgram(main_graph.graph).with_data_parallel(
loss_name=loss.name, build_strategy=build_strategy)
iters = 5
batch_size = 8
train_reader = paddle.batch(paddle.reader.shuffle(
paddle.dataset.mnist.train(), buf_size=500),
batch_size=batch_size)
feeder = fluid.DataFeeder(feed_list=feeds, place=place)
with fluid.scope_guard(scope):
for _ in range(iters):
data = next(train_reader())
loss_v = exe.run(binary,
feed=feeder.feed(data),
fetch_list=[loss])
if not for_ci:
print('{}: {}'.format('loss' + dev_name, loss_v))
scale_inference_pass = OutScaleForInferencePass(scope=scope)
scale_inference_pass.apply(test_graph)
# Freeze graph for inference, but the weight of fc/conv is still float type.
freeze_pass = QuantizationFreezePass(
scope=scope, place=place, weight_quantize_type=weight_quant_type)
freeze_pass.apply(test_graph)
server_program = test_graph.to_program()
if not for_ci:
marked_nodes = set()
for op in test_graph.all_op_nodes():
if op.name().find('quantize') > -1:
marked_nodes.add(op)
test_graph.draw('.', 'quant_scale' + dev_name, marked_nodes)
with open('quant_scale_model' + dev_name + '.txt', 'w') as f:
f.write(str(server_program))
with fluid.scope_guard(scope):
fluid.io.save_inference_model('quant_scale_model' + dev_name,
['image', 'label'], [loss],
exe,
server_program,
clip_extra=True)
def test_quant_scale_cuda(self):
if fluid.core.is_compiled_with_cuda():
with fluid.unique_name.guard():
self.quantization_scale(
True,
seed=1,
activation_quant_type='moving_average_abs_max',
weight_quant_type='channel_wise_abs_max',
for_ci=True)
def test_quant_scale_cpu(self):
with fluid.unique_name.guard():
self.quantization_scale(
False,
seed=2,
activation_quant_type='moving_average_abs_max',
weight_quant_type='channel_wise_abs_max',
for_ci=True)
if __name__ == '__main__':
unittest.main()
| 41.646766 | 84 | 0.562179 |
acf3c6d564cc91f7be0b9f642a05a30d4d8d0d01 | 674 | py | Python | WebMirror/management/rss_parser_funcs/feed_parse_extractUminovelBlogspotCom.py | fake-name/ReadableWebProxy | ed5c7abe38706acc2684a1e6cd80242a03c5f010 | [
"BSD-3-Clause"
] | 193 | 2016-08-02T22:04:35.000Z | 2022-03-09T20:45:41.000Z | WebMirror/management/rss_parser_funcs/feed_parse_extractUminovelBlogspotCom.py | fake-name/ReadableWebProxy | ed5c7abe38706acc2684a1e6cd80242a03c5f010 | [
"BSD-3-Clause"
] | 533 | 2016-08-23T20:48:23.000Z | 2022-03-28T15:55:13.000Z | WebMirror/management/rss_parser_funcs/feed_parse_extractUminovelBlogspotCom.py | rrosajp/ReadableWebProxy | ed5c7abe38706acc2684a1e6cd80242a03c5f010 | [
"BSD-3-Clause"
] | 19 | 2015-08-13T18:01:08.000Z | 2021-07-12T17:13:09.000Z | def extractUminovelBlogspotCom(item):
'''
Parser for 'uminovel.blogspot.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('With contract Skill many Brides!', 'With contract Skill many Brides!', 'translated'),
('WCSB', 'With contract Skill many Brides!', 'translated'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | 33.7 | 116 | 0.623145 |
acf3c71de8920bde6cd12875236861dd5452c2da | 377 | py | Python | molsysmt/tools/file_top/to_openmm_Topology.py | dprada/molsysmt | 83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d | [
"MIT"
] | null | null | null | molsysmt/tools/file_top/to_openmm_Topology.py | dprada/molsysmt | 83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d | [
"MIT"
] | null | null | null | molsysmt/tools/file_top/to_openmm_Topology.py | dprada/molsysmt | 83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d | [
"MIT"
] | null | null | null | def to_openmm_Topology(item, selection='all', frame_indices='all', syntaxis='MolSysMT'):
from molsysmt.tools.file_top import is_file_top
from molsysmt.basic import convert
if not is_file_top(item):
raise ValueError
tmp_item = convert(item, 'openmm.Topology', selection=selection, frame_indices=frame_indices, syntaxis=syntaxis)
return tmp_item
| 29 | 116 | 0.750663 |
acf3c7bd0e1af3d11aca04eca351f7df465a2560 | 12,168 | py | Python | tsi-remimport.py | bpschmitt/TSI-RemExcel | 872831a814e1c040a4843bffeab87cb4499e5f9a | [
"MIT"
] | null | null | null | tsi-remimport.py | bpschmitt/TSI-RemExcel | 872831a814e1c040a4843bffeab87cb4499e5f9a | [
"MIT"
] | null | null | null | tsi-remimport.py | bpschmitt/TSI-RemExcel | 872831a814e1c040a4843bffeab87cb4499e5f9a | [
"MIT"
] | null | null | null | import csv
import json
import logging
import sys
import time
import datetime
import concurrent.futures as cf
from requests_futures.sessions import FuturesSession
with open('param_secret.json') as json_data:
parms = json.load(json_data)
def getCSVHeader():
mappings = {}
f = open(parms["file"])
reader = csv.reader(f)
header = reader.__next__()
for i, val in enumerate(header):
mappings[val] = i
return mappings
def getItem(mappings, value):
text = ""
# print(mappings)
# print(value)
for k, v in mappings.items():
# print("value: " + str(value) + " = v: " + str(v))
if (int(value) == int(v)):
text = k
# print(text)
return text
def convertTS(ts):
if ts != "":
ts = int(ts) + parms['timeshift']
return datetime.datetime.fromtimestamp(ts).strftime('%m-%d-%Y %H:%M:%S')
else:
return ts
def createEventJSON():
eventList = []
header = getCSVHeader()
f = open(parms["file"], encoding='Windows-1252')
reader = csv.reader(f)
reader.__next__()
for events in reader:
try:
if parms['file_type'] == "I":
event = {
"source": parms["sourcesender"],
"sender": parms["sourcesender"],
"fingerprintFields": parms["fingerprintfields"],
"title": events[header["DESCRIPTION"]],
"status": getItem(parms["status"], events[header["STATUS"]]),
"createdAt": int(events[header["SUBMIT_DATE"]])+parms['timeshift'],
"eventClass": "Incident",
"properties": {
"app_id": parms["app_id"],
"assigned_group": events[header["ASSIGNED_GROUP"]],
"assigned_support_company": events[header["ASSIGNED_SUPPORT_COMPANY"]],
"assigned_support_org": events[header["ASSIGNED_SUPPORT_ORGANIZATION"]],
"assignee": events[header["ASSIGNEE"]],
"categorization_tier1": events[header["CATEGORIZATION_TIER_1"]],
"categorization_tier2": events[header["CATEGORIZATION_TIER_2"]],
"categorization_tier3": events[header["CATEGORIZATION_TIER_3"]],
"city": events[header["CITY"]],
"closed_date": convertTS(events[header["CLOSED_DATE"]]),
"closure_manufacturer": events[header["CLOSURE_MANUFACTURER"]],
"closure_prod_cat_tier1": events[header["CLOSURE_PRODUCT_CATEGORY_TIER1"]],
"closure_prod_cat_tier2": events[header["CLOSURE_PRODUCT_CATEGORY_TIER2"]],
"closure_prod_cat_tier3": events[header["CLOSURE_PRODUCT_CATEGORY_TIER3"]],
"company": events[header["COMPANY"]],
"country": events[header["COUNTRY"]],
"department": events[header["DEPARTMENT"]],
"first_name": events[header["FIRST_NAME"]],
"last_name": events[header["LAST_NAME"]],
"impact": getItem(parms["impact"], events[header["IMPACT"]]),
"last_modified_date": convertTS(events[header["LAST_MODIFIED_DATE"]]),
"last_resolved_date": convertTS(events[header["LAST_RESOLVED_DATE"]]),
"incident_id": events[header["INCIDENT_NUMBER"]],
"organization": events[header["ORGANIZATION"]],
"owner_group": events[header["OWNER_GROUP"]],
"prod_cat_tier1": events[header["PRODUCT_CATEGORIZATION_TIER_1"]],
"prod_cat_tier2": events[header["PRODUCT_CATEGORIZATION_TIER_2"]],
"prod_cat_tier3": events[header["PRODUCT_CATEGORIZATION_TIER_3"]],
"reported_date": convertTS(events[header["REPORTED_DATE"]]),
"resolution_category": events[header["RESOLUTION_CATEGORY"]],
#"resolution_cat_tier1": events[header["RESOLUTION_CATEGORY_TIER_1"]],
"resolution_cat_tier2": events[header["RESOLUTION_CATEGORY_TIER_2"]],
"resolution_cat_tier3": events[header["RESOLUTION_CATEGORY_TIER_3"]],
"site": events[header["SITE"]],
"site_group": events[header["SITE_GROUP"]],
"state_province": events[header["STATE_PROVINCE"]],
"submit_date": convertTS(events[header["SUBMIT_DATE"]]),
"urgency": getItem(parms["urgency"], events[header["URGENCY"]]),
},
"tags": [parms["app_id"]]
}
elif parms['file_type'] == "P":
event = {
"source": parms["sourcesender"],
"sender": parms["sourcesender"],
"fingerprintFields": parms["fingerprintfields"],
"title": events[header["DESCRIPTION"]],
"status": events[header["STAGECONDITION"]],
"createdAt": int(events[header["SUBMIT_DATE"]]) + parms['timeshift'],
"eventClass": "Problem",
"properties": {
"app_id": parms["app_id"],
"assigned_group": events[header["ASSIGNED_GROUP"]],
"assigned_group_pblm_mgr": events[header["ASSIGNED_GROUP_PBLM_MGR"]],
"assigned_support_company": events[header["ASSIGNED_SUPPORT_COMPANY"]],
"assigned_support_org": events[header["ASSIGNED_SUPPORT_ORGANIZATION"]],
"assignee": events[header["ASSIGNEE"]],
"assignee_pblm_mgr": events[header["ASSIGNEE_PBLM_MGR"]],
"categorization_tier1": events[header["CATEGORIZATION_TIER_1"]],
"categorization_tier2": events[header["CATEGORIZATION_TIER_2"]],
"categorization_tier3": events[header["CATEGORIZATION_TIER_3"]],
"closed_date": convertTS(events[header["CLOSED_DATE"]]),
"company": events[header["COMPANY"]],
"contact_company": events[header["CONTACT_COMPANY"]],
"department": events[header["DEPARTMENT"]],
"first_name": events[header["FIRST_NAME"]],
"first_reported_on": convertTS(events[header["FIRST_REPORTED_ON"]]),
"last_name": events[header["LAST_NAME"]],
"impact": getItem(parms["impact"], events[header["IMPACT"]]),
"last_modified_date": convertTS(events[header["LAST_MODIFIED_DATE"]]),
"priority": events[header["PRIORITY"]],
"prod_cat_tier1": events[header["PRODUCT_CATEGORIZATION_TIER_1"]],
"prod_cat_tier2": events[header["PRODUCT_CATEGORIZATION_TIER_2"]],
"prod_cat_tier3": events[header["PRODUCT_CATEGORIZATION_TIER_3"]],
"region": events[header["REGION"]],
"serviceci": events[header["SERVICECI"]],
"serviceci_class": events[header["SERVICECI_CLASS"]],
"site": events[header["SITE"]],
"site_group": events[header["SITE_GROUP"]],
"stage_condition": events[header["STAGECONDITION"]],
"submit_date": convertTS(events[header["SUBMIT_DATE"]]),
"support_company_pblm_mgr": events[header["SUPPORT_COMPANY_PBLM_MGR"]],
"support_group_name_requestor": events[header["SUPPORT_GROUP_NAME_REQUESTER"]],
"support_organization_requestor": events[header["SUPPORT_ORGANIZATION_REQUESTOR"]],
"urgency": getItem(parms["urgency"], events[header["URGENCY"]]),
},
"tags": [parms["app_id"]]
}
elif parms['file_type'] == "C":
event = {
"source": parms["sourcesender"],
"sender": parms["sourcesender"],
"fingerprintFields": parms["fingerprintfields"],
"title": events[header["DESCRIPTION2"]],
"status": getItem(parms["change_request_status"], events[header["CHANGE_REQUEST_STATUS"]]),
"createdAt": int(events[header["SUBMIT_DATE"]]),
"eventClass": "Change",
"properties": {
"app_id": parms["app_id"],
"company": events[header["COMPANY"]],
"customer_company": events[header["CUSTOMER_COMPANY"]],
"customer_department": events[header["CUSTOMER_DEPARTMENT"]],
"customer_first_name": events[header["CUSTOMER_FIRST_NAME"]],
"customer_last_name": events[header["CUSTOMER_LAST_NAME"]],
"customer_organization": events[header["CUSTOMER_ORGANIZATION"]],
"department": events[header["DEPARTMENT"]],
"first_name": events[header["FIRST_NAME"]],
"last_name": events[header["LAST_NAME"]],
"impact": getItem(parms["impact"], events[header["IMPACT"]]),
"last_modified_date": convertTS(events[header["LAST_MODIFIED_DATE"]]),
"organization": events[header["ORGANIZATION"]],
"requested_start_date": convertTS(events[header["REQUESTED_START_DATE"]]),
"scheduled_start_date": convertTS(events[header["SCHEDULED_START_DATE"]]),
"site_group": events[header["SITE_GROUP"]],
"submitter": events[header["SUBMITTER"]],
"submit_date": convertTS(events[header["SUBMIT_DATE"]]),
"support_group_name": events[header["SUPPORT_GROUP_NAME"]],
"support_group_name2": events[header["SUPPORT_GROUP_NAME2"]],
"support_organization": events[header["SUPPORT_ORGANIZATION"]],
"urgency": getItem(parms["urgency"], events[header["URGENCY"]]),
},
"tags": [parms["app_id"]]
}
else:
print("Please specify the proper file type in param.json")
exit(1)
event = json.dumps(event)
eventList.append(event)
except Exception as ex:
template = "An exception of type {0} occurred. Arguments: {1!r}"
message = template.format(type(ex).__name__, ex.args)
print(message)
pass
return eventList
def sendAsyncEvents(events):
logging.basicConfig(
stream=sys.stderr, level=logging.INFO,
format='%(relativeCreated)s %(message)s',
)
session = FuturesSession()
futures = {}
logging.info('start!')
bufcounter = 0
eventcounter = 0
while (eventcounter < len(events)):
if (bufcounter == parms['chunksize']):
print('sleeping...')
time.sleep(parms['sleeptime'])
bufcounter = 0
else:
future = session.post(parms['url'], data=events[eventcounter], headers=parms['headers'], auth=(parms['email'], parms['apikey']))
futures[future] = events[eventcounter]
print(str(eventcounter) + ": " + events[eventcounter])
eventcounter += 1
bufcounter += 1
for future in cf.as_completed(futures):
res = future.result()
logging.info(
"event=%s, %s, %s",
futures[future],
res,
len(res.text)
)
logging.info('done!')
events = createEventJSON()
sendAsyncEvents(events)
| 50.280992 | 140 | 0.53164 |
acf3c84dd07617f2a4cedb5dcc59a0c77089359e | 3,826 | py | Python | src/pytest_mock_resources/fixture/database/relational/postgresql.py | ocaballeror/pytest-mock-resources | 6f388237d7ca51b5d5ce5739fb717bf242cfc86c | [
"MIT"
] | 49 | 2020-01-24T21:08:43.000Z | 2022-03-31T23:55:21.000Z | src/pytest_mock_resources/fixture/database/relational/postgresql.py | michaelbukachi/pytest-mock-resources | f0c5d56af7aeca3cd1a64cd84237d8d4b5b993a4 | [
"MIT"
] | 29 | 2020-03-11T19:07:50.000Z | 2022-03-30T16:49:06.000Z | src/pytest_mock_resources/fixture/database/relational/postgresql.py | michaelbukachi/pytest-mock-resources | f0c5d56af7aeca3cd1a64cd84237d8d4b5b993a4 | [
"MIT"
] | 10 | 2020-01-23T19:04:09.000Z | 2022-02-22T19:57:54.000Z | import pytest
import sqlalchemy
from pytest_mock_resources.container.postgres import get_sqlalchemy_engine, PostgresConfig
from pytest_mock_resources.fixture.database.generic import assign_fixture_credentials
from pytest_mock_resources.fixture.database.relational.generic import EngineManager
@pytest.fixture(scope="session")
def pmr_postgres_config():
"""Override this fixture with a :class:`PostgresConfig` instance to specify different defaults.
Examples:
>>> @pytest.fixture(scope='session')
... def pmr_postgres_config():
... return PostgresConfig(image="postgres:9.6.10", root_database="foo")
"""
return PostgresConfig()
def create_engine_manager(pmr_postgres_config, ordered_actions, tables):
database_name = _create_clean_database(pmr_postgres_config)
engine = get_sqlalchemy_engine(pmr_postgres_config, database_name)
assign_fixture_credentials(
engine,
drivername="postgresql+psycopg2",
host=pmr_postgres_config.host,
port=pmr_postgres_config.port,
database=database_name,
username=pmr_postgres_config.username,
password=pmr_postgres_config.password,
)
return EngineManager(engine, ordered_actions, tables=tables, default_schema="public")
def create_postgres_fixture(
*ordered_actions, scope="function", tables=None, session=None, async_=False
):
"""Produce a Postgres fixture.
Any number of fixture functions can be created. Under the hood they will all share the same
database server.
Arguments:
ordered_actions: Any number of ordered actions to be run on test setup.
scope: Passthrough pytest's fixture scope.
tables: Subsets the tables created by `ordered_actions`. This is generally
most useful when a model-base was specified in `ordered_actions`.
session: Whether to return a session instead of an engine directly. This can
either be a bool or a callable capable of producing a session.
async_: Whether to return an async fixture/client.
"""
@pytest.fixture(scope=scope)
def _sync(_postgres_container, pmr_postgres_config):
engine_manager = create_engine_manager(pmr_postgres_config, ordered_actions, tables)
yield from engine_manager.manage_sync(session=session)
@pytest.fixture(scope=scope)
async def _async(_postgres_container, pmr_postgres_config):
engine_manager = create_engine_manager(pmr_postgres_config, ordered_actions, tables)
async for engine in engine_manager.manage_async(session=session):
yield engine
if async_:
return _async
else:
return _sync
def _create_clean_database(config):
root_engine = get_sqlalchemy_engine(config, config.root_database, isolation_level="AUTOCOMMIT")
try:
root_engine.execute(
"""
CREATE TABLE IF NOT EXISTS pytest_mock_resource_db(
id serial
);
"""
)
except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.ProgrammingError):
# A race condition may occur during table creation if:
# - another process has already created the table
# - the current process begins creating the table
# - the other process commits the table creation
# - the current process tries to commit the table creation
pass
result = root_engine.execute(
"INSERT INTO pytest_mock_resource_db VALUES (DEFAULT) RETURNING id"
)
id_ = tuple(result)[0][0]
database_name = "pytest_mock_resource_db_{}".format(id_)
root_engine.execute('CREATE DATABASE "{}"'.format(database_name))
root_engine.execute(
'GRANT ALL PRIVILEGES ON DATABASE "{}" TO CURRENT_USER'.format(database_name)
)
return database_name
| 37.509804 | 99 | 0.714062 |
acf3c8630a1aa537b7cd2683f14a0866fd6d593a | 16,116 | py | Python | smtbx/refinement/constraints/geometrical/hydrogens.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | smtbx/refinement/constraints/geometrical/hydrogens.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | smtbx/refinement/constraints/geometrical/hydrogens.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | """ All X-H bond lengths are in Angstrom and their values are taken from
ShelXL documentation (p. 4-3) """
from __future__ import division
from __future__ import absolute_import, print_function
import smtbx.refinement.constraints as _
from smtbx.refinement.constraints import InvalidConstraint, geometrical
from scitbx.matrix import col
import math
tetrahedral_angle = math.degrees(math.acos(-1./3))
class hydrogens(geometrical.any):
need_pivot_neighbour_substituents = False
def add_to(self, reparametrisation):
i_pivot = self.pivot
scatterers = reparametrisation.structure.scatterers()
conformer_indices = reparametrisation.connectivity_table.conformer_indices
if conformer_indices is not None:
constrained_site_conformer = conformer_indices[
self.constrained_site_indices[0]]
for i in self.constrained_site_indices:
assert conformer_indices[i] == constrained_site_conformer
else: constrained_site_conformer = 0
pivot_site = scatterers[i_pivot].site
pivot_site_param = reparametrisation.add_new_site_parameter(i_pivot)
pivot_neighbour_sites = ()
pivot_neighbour_site_params = ()
pivot_neighbour_substituent_site_params = ()
for j, ops in reparametrisation.pair_sym_table[i_pivot].items():
if j in self.constrained_site_indices: continue
for op in ops:
if (conformer_indices is None or
conformer_indices[j] == 0 or
constrained_site_conformer == 0 or
(conformer_indices[j] == constrained_site_conformer)):
s = reparametrisation.add_new_site_parameter(j, op)
pivot_neighbour_site_params += (s,)
pivot_neighbour_sites += (op*scatterers[j].site,)
if (self.need_pivot_neighbour_substituents):
for k, ops_k in reparametrisation.pair_sym_table[j].items():
if k != i_pivot and scatterers[k].scattering_type != 'H':
s = reparametrisation.add_new_site_parameter(k, ops_k[0])
pivot_neighbour_substituent_site_params += (s,)
length_value = self.bond_length
if length_value is None:
length_value = self.ideal_bond_length(scatterers[i_pivot],
reparametrisation.temperature)
if self.stretching:
uc = reparametrisation.structure.unit_cell()
_length_value = uc.distance(
col(scatterers[i_pivot].site),
col(scatterers[self.constrained_site_indices[0]].site))
if _length_value > 0.5: #check for dummy values
length_value = _length_value
bond_length = reparametrisation.add(
_.independent_scalar_parameter,
value=length_value,
variable=self.stretching)
if not self.stretching:
for i in self.constrained_site_indices:
reparametrisation.fixed_distances.setdefault(
(self.pivot, i), bond_length.value)
hydrogens = tuple(
[ scatterers[i_sc] for i_sc in self.constrained_site_indices ])
param = self.add_hydrogen_to(
reparametrisation=reparametrisation,
bond_length=bond_length,
pivot_site=pivot_site,
pivot_neighbour_sites=pivot_neighbour_sites,
pivot_site_param=pivot_site_param,
pivot_neighbour_site_params=pivot_neighbour_site_params,
pivot_neighbour_substituent_site_params=
pivot_neighbour_substituent_site_params,
hydrogens=hydrogens)
for i_sc in self.constrained_site_indices:
reparametrisation.asu_scatterer_parameters[i_sc].site = param
def ideal_bond_length(self, pivot, temperature):
pivot_element = pivot.scattering_type
d = self.room_temperature_bond_length.get(pivot_element)
if d is None:
raise InvalidConstraint(
"Invalid %s constraint involving %s:"
" ideal bond length not defined to atom type %s" %(
self.__class__.__name__, pivot.label, pivot_element))
if temperature is not None:
if temperature < -70: d += 0.02
elif temperature < -20: d += 0.01
return d
class terminal_tetrahedral_xhn_site(hydrogens):
def add_hydrogen_to(self, reparametrisation, bond_length,
pivot_site , pivot_neighbour_sites,
pivot_site_param, pivot_neighbour_site_params,
hydrogens, **kwds):
if len(pivot_neighbour_site_params) != 1:
raise InvalidConstraint(_.bad_connectivity_msg %(
self.__class__.__name__, pivot_site_param.scatterers[0].label))
azimuth = reparametrisation.add(_.independent_scalar_parameter,
value=0, variable=self.rotating)
uc = reparametrisation.structure.unit_cell()
for j, ops in reparametrisation.pair_sym_table[self.pivot].items():
for k, ops in reparametrisation.pair_sym_table[self.pivot].items():
if j == k: continue
reparametrisation.fixed_angles.setdefault(
(j, self.pivot, k), tetrahedral_angle)
return reparametrisation.add(
getattr(_, self.__class__.__name__),
pivot=pivot_site_param,
pivot_neighbour=pivot_neighbour_site_params[0],
length=bond_length,
azimuth=azimuth,
e_zero_azimuth=uc.orthogonalize(
col(hydrogens[0].site) - col(pivot_site)),
hydrogen=hydrogens)
class terminal_tetrahedral_xh_site(terminal_tetrahedral_xhn_site):
n_constrained_sites = 1
room_temperature_bond_length = { 'O' : 0.82,
'S' : 1.20,
}
class terminal_tetrahedral_xh3_sites(terminal_tetrahedral_xhn_site):
n_constrained_sites = 3
room_temperature_bond_length = { 'C' : 0.96,
'N' : 0.89,
}
class tertiary_xh_site(hydrogens):
n_constrained_sites = 1
room_temperature_bond_length = { 'C' : 0.98,
'N' : 0.91,
'B' : 0.98,
}
def add_hydrogen_to(self, reparametrisation, bond_length,
pivot_site , pivot_neighbour_sites,
pivot_site_param, pivot_neighbour_site_params,
hydrogens, **kwds):
if len(pivot_neighbour_site_params) != 3:
raise InvalidConstraint(_.bad_connectivity_msg %(
self.__class__.__name__, pivot_site_param.scatterers[0].label))
return reparametrisation.add(
_.tertiary_xh_site,
pivot=pivot_site_param,
pivot_neighbour_0=pivot_neighbour_site_params[0],
pivot_neighbour_1=pivot_neighbour_site_params[1],
pivot_neighbour_2=pivot_neighbour_site_params[2],
length=bond_length,
hydrogen=hydrogens[0])
class secondary_xh2_sites(hydrogens):
n_constrained_sites = 2
room_temperature_bond_length = { 'C' : 0.97,
'N' : 0.90,
}
def add_hydrogen_to(self, reparametrisation, bond_length,
pivot_site , pivot_neighbour_sites,
pivot_site_param, pivot_neighbour_site_params,
hydrogens, **kwds):
if len(pivot_neighbour_site_params) != 2:
raise InvalidConstraint(_.bad_connectivity_msg %(
self.__class__.__name__, pivot_site_param.scatterers[0].label))
x_h = [ col(h.site) for h in hydrogens ]
x_p = col(pivot_site)
uc = reparametrisation.structure.unit_cell()
theta = col(uc.orthogonalize(x_h[0] - x_p)).angle(
col(uc.orthogonalize(x_h[1] - x_p)))
angle_param = None
if self.flapping:
angle_param = reparametrisation.add(_.independent_scalar_parameter,
value=theta,
variable=True)
else:
if self.angle is not None:
angle_param = reparametrisation.add(_.independent_scalar_parameter,
value=self.angle,
variable=False)
else:
angle_param = reparametrisation.add(_.angle_parameter,
left=pivot_neighbour_site_params[0],
center=pivot_site_param,
right=pivot_neighbour_site_params[1],
value=theta)
return reparametrisation.add(
_.secondary_xh2_sites,
pivot=pivot_site_param,
pivot_neighbour_0=pivot_neighbour_site_params[0],
pivot_neighbour_1=pivot_neighbour_site_params[1],
length=bond_length,
h_c_h_angle=angle_param,
hydrogen_0=hydrogens[0],
hydrogen_1=hydrogens[1])
class secondary_planar_xh_site(hydrogens):
n_constrained_sites = 1
room_temperature_bond_length = { 'C' : 0.93,
'N' : 0.86,
}
def add_hydrogen_to(self, reparametrisation, bond_length,
pivot_site , pivot_neighbour_sites,
pivot_site_param, pivot_neighbour_site_params,
hydrogens, **kwds):
# e.g. Carbon atoms in Cyclopentadienyl complexes will have
# 3 pivot neighbours
if len(pivot_neighbour_site_params) not in (2, 3):
raise InvalidConstraint(_.bad_connectivity_msg %(
self.__class__.__name__, pivot_site_param.scatterers[0].label))
uc = reparametrisation.structure.unit_cell()
x_s = col(pivot_site)
d_s = sorted(
(uc.distance(s, x_s), i)
for i, s in enumerate(pivot_neighbour_sites)
)
return reparametrisation.add(
_.secondary_planar_xh_site,
pivot=pivot_site_param,
pivot_neighbour_0=pivot_neighbour_site_params[d_s[0][1]],
pivot_neighbour_1=pivot_neighbour_site_params[d_s[1][1]],
length=bond_length,
hydrogen=hydrogens[0])
class terminal_planar_xh2_sites(hydrogens):
n_constrained_sites = 2
need_pivot_neighbour_substituents = True
room_temperature_bond_length = \
secondary_planar_xh_site.room_temperature_bond_length
def add_hydrogen_to(self, reparametrisation, bond_length,
pivot_site , pivot_neighbour_sites,
pivot_site_param, pivot_neighbour_site_params,
pivot_neighbour_substituent_site_params,
hydrogens, **kwds):
if len(pivot_neighbour_site_params) != 1:
raise InvalidConstraint(_.bad_connectivity_msg %(
self.__class__.__name__, pivot_site_param.scatterers[0].label))
for j, ops in reparametrisation.pair_sym_table[self.pivot].items():
for k, ops in reparametrisation.pair_sym_table[self.pivot].items():
if j == k: continue
reparametrisation.fixed_angles.setdefault(
(j, self.pivot, k), 120.0)
return reparametrisation.add(
_.terminal_planar_xh2_sites,
pivot=pivot_site_param,
pivot_neighbour=pivot_neighbour_site_params[0],
pivot_neighbour_substituent=pivot_neighbour_substituent_site_params[0],
length=bond_length,
hydrogen_0=hydrogens[0],
hydrogen_1=hydrogens[1])
class terminal_linear_ch_site(hydrogens):
n_constrained_sites = 1
room_temperature_bond_length = { 'C' : 0.93,
}
def add_hydrogen_to(self, reparametrisation, bond_length,
pivot_site , pivot_neighbour_sites,
pivot_site_param, pivot_neighbour_site_params,
hydrogens, **kwds):
if len(pivot_neighbour_site_params) != 1:
raise InvalidConstraint(_.bad_connectivity_msg %(
self.__class__.__name__, pivot_site_param.scatterers[0].label))
for j, ops in reparametrisation.pair_sym_table[self.pivot].items():
for k, ops in reparametrisation.pair_sym_table[self.pivot].items():
if j == k: continue
reparametrisation.fixed_angles.setdefault(
(j, self.pivot, k), 180.0)
return reparametrisation.add(
_.terminal_linear_ch_site,
pivot=pivot_site_param,
pivot_neighbour=pivot_neighbour_site_params[0],
length=bond_length,
hydrogen=hydrogens[0])
need_at_least_one_substituent_msg = (
"Invalid %s constraint involving %s: "
"pivot neighbour must have at least one non-H substituent")
class staggered_terminal_tetrahedral_xhn_sites(hydrogens):
staggered = True
need_pivot_neighbour_substituents = True
stagger_on = None
def add_hydrogen_to(self, reparametrisation, bond_length,
pivot_site , pivot_neighbour_sites,
pivot_site_param, pivot_neighbour_site_params,
pivot_neighbour_substituent_site_params,
hydrogens, **kwds):
if len(pivot_neighbour_site_params) != 1:
raise InvalidConstraint(_.bad_connectivity_msg %(
self.__class__.__name__, pivot_site_param.scatterers[0].label))
if not len(pivot_neighbour_substituent_site_params):
raise InvalidConstraint(need_at_least_one_substituent_msg %(
self.__class__.__name__, pivot_site_param.scatterers[0].label))
if self.stagger_on is None:
if len(pivot_neighbour_substituent_site_params) == 1:
stagger_on = pivot_neighbour_substituent_site_params[0]
else:
# staggered with respect to the shortest
# pivot_neighbour - pivot_neighbour_substituent bond
#
# If the two bond lengths are similar, then the hydrogen could have a
# tendancy to flip between the positions. If this is the case, a
# staggered hydrogen constraint is probably unsuitable, and a freely
# rotatable constraint could be used.
#
uc = reparametrisation.structure.unit_cell()
x_s = col(pivot_neighbour_sites[0])
d_s = sorted((uc.distance(s.value, x_s), i)
for i, s in enumerate(pivot_neighbour_substituent_site_params))
stagger_on = pivot_neighbour_substituent_site_params[d_s[0][1]]
else:
for p in pivot_neighbour_substituent_site_params:
if p.index == self.stagger_on:
stagger_on = p
break
# The stagger_on atom must be one of the pivot_neighbour_substituents.
# If we reach here, this is not the case so an error is raised.
raise InvalidConstraint(_.bad_connectivity_msg %(
self.__class__.__name__, pivot_site_param.scatterers[0].label))
for j, ops in reparametrisation.pair_sym_table[self.pivot].items():
for k, ops in reparametrisation.pair_sym_table[self.pivot].items():
if j == k: continue
reparametrisation.fixed_angles.setdefault(
(j, self.pivot, k), tetrahedral_angle)
return reparametrisation.add(
getattr(_, self.__class__.__name__),
pivot=pivot_site_param,
pivot_neighbour=pivot_neighbour_site_params[0],
length=bond_length,
stagger_on=stagger_on,
hydrogen=hydrogens)
class staggered_terminal_tetrahedral_xh3_sites(
staggered_terminal_tetrahedral_xhn_sites):
n_constrained_sites = 3
room_temperature_bond_length = \
terminal_tetrahedral_xh3_sites.room_temperature_bond_length
class staggered_terminal_tetrahedral_xh_site(
staggered_terminal_tetrahedral_xhn_sites):
n_constrained_sites = 1
room_temperature_bond_length = \
terminal_tetrahedral_xh_site.room_temperature_bond_length
class polyhedral_bh_site(hydrogens):
n_constrained_sites = 5
room_temperature_bond_length = { 'B': 1.10,
'C': 1.10, }
def add_hydrogen_to(self, reparametrisation, bond_length,
pivot_site, pivot_neighbour_sites,
pivot_site_param, pivot_neighbour_site_params,
hydrogens, **kwds):
if len(pivot_neighbour_site_params) != 4 and\
len(pivot_neighbour_site_params) != 5:
raise InvalidConstraint(_.bad_connectivity_msg %(
self.__class__.__name__, pivot_site_param.scatterers[0].label))
return reparametrisation.add(
_.polyhedral_bh_site,
pivot=pivot_site_param,
pivot_neighbours=pivot_neighbour_site_params,
length=bond_length,
hydrogen=hydrogens[0])
| 40.903553 | 81 | 0.669211 |
acf3c891885c1e6185f1888853a4b3ad5665f37d | 633 | py | Python | mmedit/models/backbones/encoder_decoders/encoders/__init__.py | Jian137/mmediting-1 | e1ac6c93441ec96696d0b530f040b91b809015b6 | [
"Apache-2.0"
] | 1,884 | 2020-07-09T18:53:43.000Z | 2022-03-31T12:06:18.000Z | mmedit/models/backbones/encoder_decoders/encoders/__init__.py | Jian137/mmediting-1 | e1ac6c93441ec96696d0b530f040b91b809015b6 | [
"Apache-2.0"
] | 622 | 2020-07-09T18:52:27.000Z | 2022-03-31T14:41:09.000Z | mmedit/models/backbones/encoder_decoders/encoders/__init__.py | Jian137/mmediting-1 | e1ac6c93441ec96696d0b530f040b91b809015b6 | [
"Apache-2.0"
] | 361 | 2020-07-09T19:21:47.000Z | 2022-03-31T09:58:27.000Z | # Copyright (c) OpenMMLab. All rights reserved.
from .deepfill_encoder import DeepFillEncoder
from .fba_encoder import FBAResnetDilated
from .gl_encoder import GLEncoder
from .indexnet_encoder import (DepthwiseIndexBlock, HolisticIndexBlock,
IndexNetEncoder)
from .pconv_encoder import PConvEncoder
from .resnet_enc import ResGCAEncoder, ResNetEnc, ResShortcutEnc
from .vgg import VGG16
__all__ = [
'GLEncoder', 'VGG16', 'ResNetEnc', 'HolisticIndexBlock',
'DepthwiseIndexBlock', 'ResShortcutEnc', 'PConvEncoder', 'DeepFillEncoder',
'IndexNetEncoder', 'ResGCAEncoder', 'FBAResnetDilated'
]
| 39.5625 | 79 | 0.761453 |
acf3cb571c5010c476d78dcfe55814e27dcf61a6 | 436 | py | Python | bracket/model.py | henry-li-06/march-madness-bracket | 17b4eb83128153a8406893547c70198114f82636 | [
"MIT"
] | null | null | null | bracket/model.py | henry-li-06/march-madness-bracket | 17b4eb83128153a8406893547c70198114f82636 | [
"MIT"
] | null | null | null | bracket/model.py | henry-li-06/march-madness-bracket | 17b4eb83128153a8406893547c70198114f82636 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
all_games_19 = pd.read_csv("bracket/data/all_games_19_training_data.csv")
columns = []
for i in range(8):
columns.append(str(i))
features = all_games_19[columns]
target = all_games_19["Away Win"]
model = KNeighborsClassifier(n_neighbors=50)
model.fit(features, target)
| 25.647059 | 74 | 0.768349 |
acf3cb664da67644fbead7b0f4443005ce5045d4 | 9,804 | py | Python | src/data/transformers/extract_features.py | timweiland/bundestag_party_prediction | 603438e718ba714282628c5e5fc1ed861842fa3e | [
"MIT"
] | null | null | null | src/data/transformers/extract_features.py | timweiland/bundestag_party_prediction | 603438e718ba714282628c5e5fc1ed861842fa3e | [
"MIT"
] | null | null | null | src/data/transformers/extract_features.py | timweiland/bundestag_party_prediction | 603438e718ba714282628c5e5fc1ed861842fa3e | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import logging
import string
import re
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.feature_extraction.text import TfidfVectorizer
from nltk.corpus import stopwords
import nltk
import textstat
from pathlib import Path
def remove_punctuation(s):
return s.translate(str.maketrans("", "", string.punctuation))
def count_char(text, char):
return text.count(char)
custom_stop_words = ["herr", "herren", "dame", "damen", "kollege", "kollegen"]
stop_words = stopwords.words("german") + custom_stop_words
def is_stop_word(word):
return word in stop_words
def is_word(s):
return any(c.isalnum() for c in s)
class NumExclamationQuestionExtractor(BaseEstimator, TransformerMixin):
"""
Transformer that adds two features containing the relative number of exclamation marks and question marks.
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def count_exclamations(self, text):
return count_char(text, "!") / len(text)
def count_questions(self, text):
return count_char(text, "?") / len(text)
def transform(self, X):
X["relative_num_exclamations"] = X["text"].apply(self.count_exclamations)
X["relative_num_questions"] = X["text"].apply(self.count_questions)
return X
class Tokenizer(BaseEstimator, TransformerMixin):
"""
Transformer that tokenizes the speeches.
IMPORTANT: Note that this step changes the text in-place to save memory.
Make sure you do not need the original text anymore after you apply this transformer!
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def tokenize(self, text):
words = nltk.word_tokenize(text, language="german")
words = [word for word in words if is_word(word)]
return " ".join(words).lower()
def transform(self, X):
X["text"] = X["text"].apply(self.tokenize)
return X
class AvgWordLengthExtractor(BaseEstimator, TransformerMixin):
"""
Transformer that adds a feature containing the average word length of each text.
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def get_avg_word_length(self, text):
words = text.split()
return sum([len(word) for word in words]) / len(words)
def transform(self, X):
X["avg_word_length"] = X["text"].apply(self.get_avg_word_length)
return X
class StopWordFractionExtractor(BaseEstimator, TransformerMixin):
"""
Transformer that adds a feature containing the fraction of stop words.
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def get_stop_word_fraction(self, text):
words = text.split()
num_stop_words = len([word for word in words if is_stop_word(word)])
return num_stop_words / len(words)
def transform(self, X):
X["stop_word_fraction"] = X["text"].apply(self.get_stop_word_fraction)
return X
class StopWordRemover(BaseEstimator, TransformerMixin):
"""
Transformer that removes stop words.
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def remove_stop_words(self, text):
words = text.split()
words = [word for word in words if not is_stop_word(word)]
return " ".join(words)
def transform(self, X):
X["text"] = X["text"].apply(self.remove_stop_words)
return X
class TfidfScoreExtractor(BaseEstimator, TransformerMixin):
"""
Transformer that adds a feature containing the average tfidf score of each speech.
Should be used with tokenized text.
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def transform(self, X):
vectorizer = TfidfVectorizer()
score_matrix = vectorizer.fit_transform(X["text"])
X["avg_tfidf"] = score_matrix.mean(axis=1).ravel().A1
return X
class RemoveUnwantedFeaturesTransformer(BaseEstimator, TransformerMixin):
"""
Transformer that removes unwanted features/columns.
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def transform(self, X):
X = X.drop(
columns=[
"index",
"date",
"agenda",
"speechnumber",
"speaker",
"party.facts.id",
"chair",
"terms",
"text",
"parliament",
"iso3country",
],
errors="ignore",
)
return X
class TextLengthExtractor(BaseEstimator, TransformerMixin):
"""
Transformer that adds a feature containing text length.
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def text_length(self, text):
return len(text)
def transform(self, X):
X["text_length"] = X["text"].apply(self.text_length)
return X
class AvgSentenceLengthExtractor(BaseEstimator, TransformerMixin):
"""
Transformer that adds a feature containing the average sentence length.
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def avg_sentence_length(self, text):
text_length = len(text)
sentences = nltk.sent_tokenize(text, language="german")
n_sentences = len(sentences)
return text_length / n_sentences
def transform(self, X):
X["avg_sentence_length"] = X["text"].apply(self.avg_sentence_length)
return X
class NumOfProfanitiesExtractor(BaseEstimator, TransformerMixin):
"""
Transformer that adds a feature containing the number of profanities.
The profanities stem from a predefined list of unique strings, which also includes declinations.
"""
def __init__(self):
self.profanities = []
file_path = (Path(__file__).parent) / "profanities.txt"
with open(file_path, "r") as f:
self.profanities = f.read().split()
def fit(self, X, y=None):
return self
def count_profanities(self, text):
n_profanities = 0
# tokens = nltk.word_tokenize(text, language="german")
tokens = text.split()
for profanity in self.profanities:
n_profanities += tokens.count(profanity.lower())
return n_profanities
def transform(self, X):
X["num_profanities"] = X["text"].apply(self.count_profanities)
return X
class TTRExtractor(BaseEstimator, TransformerMixin):
"""
Transformer that adds a feature containing the type-to-token ratio (#unique words / #total words).
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def TTR(self, text):
# tokens = nltk.word_tokenize(text, language="german")
# tokens = [token.lower() for token in tokens if token.isalpha()]
tokens = text.split()
n_total = len(tokens)
n_unique = len(set(tokens))
return n_unique / n_total
def transform(self, X):
X["TTR"] = X["text"].apply(self.TTR)
return X
class ReadabilityExtractor(BaseEstimator, TransformerMixin):
"""
Transformer that adds a feature containing a readability score.
Readability is calculated as Flesch-Reading-Ease for the German language.
Interpretation: score of 0-30: very difficult, 30-50: difficult,
50-60: medium/difficult, 60-70: medium, 70-80: medium/easy, 80-90: easy,
90-100: very easy. (https://de.wikipedia.org/wiki/Lesbarkeitsindex#Flesch-Reading-Ease)
"""
def __init__(self):
pass
def fit(self, X, y=None):
return self
def readability(self, text):
textstat.set_lang("de")
return textstat.flesch_reading_ease(text)
def transform(self, X):
X["readability"] = X["text"].apply(self.readability)
return X
class SentimentExtractor(BaseEstimator, TransformerMixin):
"""
Transformer that adds a feature containing a sentiment score (range: -1 to +1) for the text,
calculated as average of sentiment scores for all words in the text which have an entry in the 'SentiWS' data set.
"""
def __init__(self):
self.sentiment_dict = {}
negative_file_path = (
Path(__file__).parent
) / "./SentiWS_v2.0/SentiWS_v2.0_Negative.txt"
positive_file_path = (
Path(__file__).parent
) / "./SentiWS_v2.0/SentiWS_v2.0_Positive.txt"
self.read_sentiments(negative_file_path)
self.read_sentiments(positive_file_path)
def fit(self, X, y=None):
return self
def read_sentiments(self, file_path):
with open(file_path) as f:
for line in f:
split = re.split("\||\s|,", line)
keys = [split[0]] + split[3:-1]
value = float(split[2])
for key in keys:
self.sentiment_dict[key] = value
def sentiment(self, text):
# tokens = nltk.word_tokenize(text, language='german')
# tokens = [token.lower() for token in tokens if token.isalpha()]
tokens = text.split()
sentiment_sum = 0
sentiment_n = 0
for token in tokens:
sentiment_score = self.sentiment_dict.get(token)
if sentiment_score != None:
sentiment_sum += sentiment_score
sentiment_n += 1
if sentiment_n > 0:
return sentiment_sum / sentiment_n
else:
return 0
def transform(self, X):
X["sentiment"] = X["text"].apply(self.sentiment)
return X
| 27.539326 | 118 | 0.622705 |
acf3cdae2c308c78b8ea38231dacb60e5e746da4 | 5,671 | py | Python | hong/nb4_test_custom.py | SherylHYX/Ro-SOS-Metric-Expression-Network-MEnet-for-Robust-Salient-Object-Segmentation | d774372e474161aae4dd26a033ae3858ed7255cf | [
"MIT"
] | 2 | 2021-12-21T03:20:28.000Z | 2022-02-08T12:35:34.000Z | hong/nb4_test_custom.py | SherylHYX/Ro-SOS-Metric-Expression-Network-MEnet-for-Robust-Salient-Object-Segmentation | d774372e474161aae4dd26a033ae3858ed7255cf | [
"MIT"
] | null | null | null | hong/nb4_test_custom.py | SherylHYX/Ro-SOS-Metric-Expression-Network-MEnet-for-Robust-Salient-Object-Segmentation | d774372e474161aae4dd26a033ae3858ed7255cf | [
"MIT"
] | null | null | null | import torch
torch.manual_seed(0)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
from nb2_launch import *
from nb3_load import *
import time
# read data:
# ----- CELL 1 -----
from skimage.transform import resize as imresize
from pdb import set_trace
from datasets import d_dict
class TestDataset(torch.utils.data.Dataset):
def __init__(self, name, dtype, device, resize_size=(352,352)):
self.filenames = d_dict[name]
# self.resize_size=(224,224)
self.resize_size = resize_size # 352-224 224-176
self.dtype = dtype
self.device = device
def __len__(self):
return len(self.filenames)
def __getitem__(self, idx):
data_dir = self.filenames[idx]
suffix = data_dir.split('.')[-1] # suffix = osp.join(data_dir, self.test_data_dir, self.test_images[self.test_timing]).split('.')[-1]
if suffix == 'png':
image_data = mping.imread(data_dir) * 255
else:
image_data = mping.imread(data_dir)
# try:
# image_labels = mping.imread(self.test_labels_dir)
# except:
# image_labels = mping.imread(self.test_labels_dir)
# image_labels.flags.writeable = True # Hong_add_this
# image_labels[np.where(image_labels>0.1)] = 1
# print(data_dir, self.test_labels_dir, self.test_images[self.test_timing], GV.data_name)
if len(image_data.shape) == 3:
if image_data.shape[2] != 3:
# GV.image = image_data
image_data = imresize(np.array(image_data[:,:,0] * 255, dtype = np.uint8), [self.resize_size[0], self.resize_size[1]])
image_data = np.tile(image_data[:,:,np.newaxis], [1,1,3])
else:
image_data = imresize(np.array(image_data, dtype = np.uint8), [self.resize_size[0], self.resize_size[1], 3])
elif len(image_data.shape) == 2:
image_data = imresize(np.array(image_data, dtype = np.uint8), [self.resize_size[0], self.resize_size[1]])
image_data = np.tile(image_data[:,:,np.newaxis], [1,1,3])
# set_trace()
# if len(image_labels.shape) == 3:
# # image_labels = imresize(np.array(image_labels[:,:,0], dtype = np.uint8), [self.resize_size[0], self.resize_size[1]])
# image_labels = imresize(image_labels[:,:,0], [self.resize_size[0], self.resize_size[1]])
# else:
# # image_labels = imresize(np.array(image_labels, dtype = np.uint8), [self.resize_size[0], self.resize_size[1]])
# image_labels = imresize(image_labels, [self.resize_size[0], self.resize_size[1]])
# set_trace()
image_data = np.uint8(np.round(image_data*255))
# GV.image = image_data
return image_data.transpose(2, 0, 1)
# self.data[i] = image_data.transpose(2, 0, 1)
# self.labels[i, 0] = image_labels
# (self.data, self.labels)
# set_trace()
# MAKE NET
device = torch.device('cuda:1')
dtype = torch.float32
ds = TestDataset('HKU_IS', dtype=dtype,device=device)
dl = torch.utils.data.DataLoader(ds, batch_size=1, shuffle=False, drop_last=False)
# x = torch.tensor(self.data,dtype=dtype,requires_grad=True, device=device)
state=load_state(device)
t2net = SimpleNN(device,dtype=dtype)
t2net=t2net.to(device)
t2net.load_state_dict(state_dict=state)
# from PIL import Image
# img = np.uint8(self.data[0].transpose(1,2,0))
# Image.fromarray(img).save('../'+data_dir.split('/')[-3]+'/'+data_dir.split('/')[-1])
# plt.imshow(img)
# set_trace()
# ------CELL 2-------
softmax = nn.Softmax(dim=1,)
def soft(test):
# test[test<1e-10]=1e-10 # clamp
return test
# ------CELL 3-------
# x(0-255) --model--> tsal --Softmax--> sm --C=1--> sm --min_max--> final saliency map
# sm=soft(tsal) # Softmax: B=1,C,H,W
class TestNet(torch.nn.Module):
def __init__(self):
super(TestNet, self).__init__()
self.t2net = t2net
self.softmax = softmax
def forward(self,x):
_, tsal = self.t2net(x)
tsal = torch.nn.functional.interpolate(tsal, size=(176, 176), scale_factor=None, mode='nearest',
align_corners=None)
# sm = soft(tsal)
test = tsal
test = test - torch.max(test, dim=1, keepdim=True)[0]
test = softmax(test)
sm = test.sum()
return sm
testnet = TestNet()
# tmetric, tsal = t2net.forward(x)
# tsal=torch.nn.functional.interpolate(tsal, size=(176,176), scale_factor=None, mode='nearest', align_corners=None)
# sm=sm[:,1,:,:].view(sm.shape[0],-1) # B=1,HW
# EPSILON=1e-8
# sm = (sm - sm.min(dim=1,keepdim=True)[0] + EPSILON) / (sm.max(dim=1,keepdim=True)[0] - sm.min(dim=1,keepdim=True)[0] + EPSILON) * 255
# x = torch.tensor(next(iter(dl)),dtype=dtype,requires_grad=True, device=device)
for i,d in enumerate(dl):
# t = torch.Tensor()
# x = t.new_tensor(d, dtype=dtype, requires_grad=True, device=device)
# x = torch.tensor(d,dtype=dtype,device=device,requires_grad=True)
x = d.type(dtype).to(device).clone().detach().requires_grad_(True)
testnet.zero_grad()
# backend.zero_grad()
# x=x.to(device)
start = time.time()
backend=testnet(x)
forw = time.time()
print('!')
backend.backward()
backw = time.time()
g = x.grad.data.clone()
testnet.t2net.clear()
# x.detach_()
print('calculated, forw:{}, backw:{}'.format(forw-start, backw-forw))
# except:
# print('oops')
# finally:
print(g.shape)
print(g.abs().max().item(), g.abs().min().item(), g.abs().median().item(), g.abs().mean().item(), g.abs().var().item())
| 33.755952 | 143 | 0.618409 |
acf3cdc89f5f73fe37218c0b35ac58f90aff85f5 | 126 | py | Python | flows/detectors/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py | tycoer/rfvision-1 | db6e28746d8251d1f394544c32b9e0af388d9964 | [
"Apache-2.0"
] | 6 | 2021-09-25T03:53:06.000Z | 2022-02-19T03:25:11.000Z | flows/detectors/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py | tycoer/rfvision-1 | db6e28746d8251d1f394544c32b9e0af388d9964 | [
"Apache-2.0"
] | 1 | 2021-07-21T13:14:54.000Z | 2021-07-21T13:14:54.000Z | flows/detectors/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py | tycoer/rfvision-1 | db6e28746d8251d1f394544c32b9e0af388d9964 | [
"Apache-2.0"
] | 2 | 2021-07-16T03:25:04.000Z | 2021-11-22T06:04:01.000Z | _base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py'
model = dict(init_cfg='torchvision://resnet101', backbone=dict(depth=101))
| 42 | 74 | 0.785714 |
acf3cf190a3c81334d8cb1706005d39148cce428 | 9,409 | py | Python | Functions.py | rtb1c13/calc_hdx | 10e89e1bf6a129016e7928ff771cab7b0747c4b5 | [
"BSD-3-Clause"
] | 4 | 2020-06-17T21:32:52.000Z | 2021-01-27T14:34:33.000Z | Functions.py | rtb1c13/calc_hdx | 10e89e1bf6a129016e7928ff771cab7b0747c4b5 | [
"BSD-3-Clause"
] | null | null | null | Functions.py | rtb1c13/calc_hdx | 10e89e1bf6a129016e7928ff771cab7b0747c4b5 | [
"BSD-3-Clause"
] | 2 | 2020-06-19T08:35:48.000Z | 2020-12-20T17:59:45.000Z | #!/usr/bin/env python
# Transferable functions for HDX analysis
#
import mdtraj as md
import numpy as np
import pickle
# Exception for HDX
class HDX_Error(Exception):
"""Exception in HDX module"""
# Functions
def load_fulltraj(traj, parm, start=1, stop=None, stride=1, standard_names=True, **kwargs):
"""Loads an MDtraj trajectory object with the desired topology
and coordinates.
Usage: setup_universe(parm,traj,[start=1,stop=None,stride=1,**kwargs])
Standard kwargs include atom_indices (an array of 0-indexed
atoms to keep) and stride (integer of every nth frame to keep).
'standard_names=False' (not the default here, or in MDTraj)
may also be useful for PDB topologies, otherwise amide H might
be renamed from the atom names provided to the standard PDB identifiers
(e.g. 'H', 'H2', 'H3' for the terminal NH3 group).
Returns a complete trajectory, which may be memory intensive.
See also load_trajchunks for an iterative load of large trajectories """
try:
parmobj = md.load_topology(parm, standard_names=standard_names)
except TypeError:
parmobj = md.load_topology(parm) # MDTraj only has standard_names kwarg for certain filetypes
t = md.load(traj, top=parmobj, **kwargs)
if stop is None:
stop = t.n_frames
return t[start-1:stop:stride] # Start is zero indexed
def load_trajchunks(traj, parm, start=1, stride=1, standard_names=True, **kwargs):
"""Loads a file into a generator of MDtraj trajectory chunks.
Useful for large/memory intensive trajectory files
Usage: load_trajchunks(traj, parm, [start=1, stride=1, **kwargs])
Standard kwargs include chunk (size of the trajectory chunks
to load per iteration), and atom_indices (an array of 0-indexed
atoms to keep).
'standard_names=False' (not the default here, or in MDTraj)
may also be useful for PDB topologies, otherwise amide H might
be renamed from the atom names provided to the standard PDB identifiers
(e.g. 'H', 'H2', 'H3' for the terminal NH3 group).
Returns a generator object with trajectory iterations."""
try:
parmobj = md.load_topology(parm, standard_names=standard_names)
except TypeError:
parmobj = md.load_topology(parm) # MDTraj only has standard_names kwarg for certain filetypes
return md.iterload(traj, top=parmobj, skip=start-1, stride=stride, **kwargs) # Start is zero indexed
def itertraj_slice(gen, chunk, end, stride=1):
"""Slices a generator (returned by load_trajchunks) of size chunk
to stop after a given number of frames. Ending frame should be
given with reference to the ORIGINAL trajectory, not the trajectory
resampled at interval traj[::stride]. This is consistent with the
'skip' kwarg for mdtraj.iterload
Usage: slice_itertraj(gen, chunk, end, [stride=1])
Yields: Trajectories of size chunk until original trajectory file
is exhausted or frame end is reached."""
yielded_frames = 0
end /= stride
end = int(end) # floor
while yielded_frames + chunk < end:
yielded_frames += chunk
x = next(gen)
yield x
x = next(gen)
yield x[:(end - yielded_frames)]
#raise StopIteration # RuntimeError in 3.7+
return
def select(traj, selection):
"""Strips a trajectory based on the MDTraj-format text selection
provided. By default this is "all"
Usage: select(traj, selection)
Returns: Trajectory with selected atoms"""
if selection == "all":
return traj
# try:
atms = traj.topology.select(selection)
new_t = traj.atom_slice(atms)
# Workaround as atom indices are not renumbered in mdtraj.Topology.subset
# But are in mdtraj.Topology.copy
_tmptop = traj.topology.subset(atms)
new_t.topology = _tmptop.copy()
return new_t
# except (ValueError, AttributeError):
# raise HDX_Error("Your selection of trajectory atoms hasn't been parsed properly - check the syntax")
# except IndexError:
# raise HDX_Error("You're attempting to select trajectory atoms that don't exist - check the syntax")
def list_prolines(traj, log="HDX_analysis.log"):
"""Creates a list of proline residues and appropriate resids.
Resids are output to HDX_analysis.log file by default.
Usage: list_prolines(traj, [log])
Returns: Numpy array of [[Proline_ID, Proline_index]]"""
prolist = [ r.resSeq for r in traj.topology.residues if r.name=='PRO' ]
proidx = [ r.index for r in traj.topology.residues if r.name=='PRO' ]
if len(prolist) > 0:
with open(log, 'a') as f:
f.write("Prolines identified at resid:\n"+ \
"%s\n" % ' '.join(str(i) for i in prolist))
return np.asarray(list(zip(prolist, proidx)))
else:
with open(log, 'a') as f:
f.write("No prolines found in topology.\n")
return None
def select_residxs(traj, reslist, protonly=True, invert=False):
"""Returns atom indices of atoms belonging to residues (0-indexed)
in the supplied list,
Options to restrict the selection to protein-only atoms
(default) and/or select all atoms NOT in residues in the supplied list.
(inversion of selection, off by default)
Usage: select_resids(traj, reslist, [protonly, invert])
Returns: Numpy array of selected atom indices"""
# The topology.select syntax is more restrictive than MDAnalysis here
# - use list comprehensions instead
if invert:
if protonly:
return np.asarray([ atom.index for atom in traj.topology.atoms if (atom.residue.is_protein and atom.residue.index not in reslist) ])
else:
return np.asarray([ atom.index for atom in traj.topology.atoms if (atom.residue.index not in reslist) ])
elif protonly:
return np.asarray([ atom.index for atom in traj.topology.atoms if (atom.residue.is_protein and atom.residue.index in reslist) ])
else:
return np.asarray([ atom.index for atom in traj.topology.atoms if (atom.residue.index in reslist) ])
def extract_HN(traj, prolines=None, atomselect="(name H or name HN)", log="HDX_analysis.log"):
"""Returns a list of backbone amide H atom indices, suitable
for use with 'calc_contacts'. Optionally takes an array of
resids/indices to skip (normally prolines) and by default returns
atom indices matching 'name H and backbone'
Usage: extract_NH(traj, [prolines, atomselect])"""
# Combine res name & ID to concatenated identifier
atm2res = lambda _: traj.topology.atom(_).residue.name + str(traj.topology.atom(_).residue.resSeq)
if prolines is not None:
# Syntax = "... and not (residue 1 or residue 2 or residue 3 ... )"
atomselect += " and not (residue %s" % ' or residue '.join(str(_) for _ in prolines[:,0]) + ")"
with open(log, 'a') as f:
f.write("Extracted HN from resids:\n"+ \
"%s\n" % '\n'.join(atm2res(i) for i in traj.topology.select(atomselect)))
return traj.topology.select(atomselect)
else:
with open(log, 'a') as f:
f.write("Extracted HN from resids:\n"+ \
"%s\n" % '\n'.join(atm2res(i) for i in traj.topology.select(atomselect)))
return traj.topology.select(atomselect)
### Switching functions for contacts etc. calculation
### 'Sigmoid': y = 1 / [ 1 + exp( -k * (x - d0) ) ]
def sigmoid(x, k=1., d0=0):
denom = 1 + np.exp( k * (x - d0) )
return 1./denom # Height 1 = d0 @ midpoint 0.5 contacts, Height 2 = d0 @ midpoint 1.0 contacts
### 'Rational_6_12': y = [ 1 - ( (x - d0) / x0 ) ** n ] / [ 1 - ( (x - d0) / x0 ) ** m ]
def rational_6_12(x, k, d0=0, n=6, m=12):
num = 1 - ( (x-d0) / k ) ** n
denom = 1 - ( (x-d0) / k ) ** m
return num/denom
### 'Exponential': y = exp( -( x - d0 ) / x0 )
def exponential(x, k, d0=0):
return np.exp( -(x-d0) / k )
### 'Gaussian': y = exp( -( x - d0 )**2 / 2*x0**2 )
def gaussian(x, k, d0=0):
num = -1 * (x - d0)**2
denom = 2 * k**2
return np.exp( num / denom )
### Pickling
def cacheobj(cachefn=None):
def pickle_decorator(func):
def pickle_wrapped_func(*args,**kwargs):
try:
fn = args[0].params['outprefix'] + kwargs.pop('cachefn')
cached_obj = pickle.load(open(fn,'rb'))
try:
# args[0] is 'self' for class methods
with open(args[0].params['logfile'],'a') as f:
f.write("Read cache from file %s\n" % fn)
except KeyError:
print("Read cache from file %s\n" % fn)
return cached_obj
except (KeyError, FileNotFoundError, EOFError, TypeError):
new_obj = func(*args, **kwargs)
pickle.dump(args[0], open(fn,'wb'), protocol=-1) # Highest protocol for size purposes
try:
with open(args[0].params['logfile'],'a') as f:
f.write("Saved cache to file %s\n" % fn)
except KeyError:
print("Saved cache to file %s\n" % fn)
return new_obj
return pickle_wrapped_func
return pickle_decorator
| 42.96347 | 144 | 0.631842 |
acf3cf5c2e73ac247dd944c92196b65cdc52e44d | 15,711 | py | Python | bin/search.py | zwei2008/cve | 75a3d21ca850b9d3d823e51ccf0878f8f3248bdb | [
"BSD-3-Clause"
] | null | null | null | bin/search.py | zwei2008/cve | 75a3d21ca850b9d3d823e51ccf0878f8f3248bdb | [
"BSD-3-Clause"
] | null | null | null | bin/search.py | zwei2008/cve | 75a3d21ca850b9d3d823e51ccf0878f8f3248bdb | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# search is the search component of cve-search querying the MongoDB database.
#
# Software is free software released under the "Modified BSD license"
#
# Copyright (c) 2012 Wim Remes
# Copyright (c) 2012-2015 Alexandre Dulaunoy - a@foo.be
# Copyright (c) 2015 Pieter-Jan Moreels - pieterjan.moreels@gmail.com
# Imports
import os
import sys
runPath = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(runPath, ".."))
import re
import argparse
import csv
from urllib.parse import urlparse
import json
from bson import json_util
from lib import CVEs
import lib.DatabaseLayer as db
from datetime import datetime, timedelta
# init control variables
csvOutput = 0
htmlOutput = 0
jsonOutput = 0
xmlOutput = 0
last_ndays = 0
nlimit = 0
# init various variables :-)
vSearch = ""
vOutput = ""
vFreeSearch = ""
summary_text = ""
# parse command-line arguments
argParser = argparse.ArgumentParser(description='Search for vulnerabilities in the National Vulnerability DB. Data from http://nvd.nist.org.')
argParser.add_argument('-p', type=str, help='S = search product, e.g. o:microsoft:windows_7 or o:cisco:ios:12.1')
argParser.add_argument('-cp', type=str, help='S = search product from cnnvd, e.g. o:microsoft:windows_7 or o:cisco:ios:12.1')
argParser.add_argument('-f', type=str, help='F = free text search in vulnerability summary')
argParser.add_argument('-c', action='append', help='search one or more CVE-ID')
argParser.add_argument('-cc', action='append', help='search one or more CVE-ID from cnnvd')
argParser.add_argument('-o', type=str, help='O = output format [csv|html|json|xml|cveid]')
argParser.add_argument('-l', action='store_true', help='sort in descending mode')
argParser.add_argument('-n', action='store_true', help='lookup complete cpe (Common Platform Enumeration) name for vulnerable configuration')
argParser.add_argument('-r', action='store_true', help='lookup ranking of vulnerable configuration')
argParser.add_argument('-a', default=False, action='store_true', help='Lookup CAPEC for related CWE weaknesses')
argParser.add_argument('-v', type=str, help='vendor name to lookup in reference URLs')
argParser.add_argument('-s', type=str, help='search in summary text')
argParser.add_argument('-t', type=int, help='search in last n day')
argParser.add_argument('-i', default=False, type=int, help='Limit output to n elements (default: unlimited)')
args = argParser.parse_args()
vSearch = args.p
vnSearch = args.cp
cveSearch = [x.upper() for x in args.c] if args.c else None
cnnvdSearch = [x.upper() for x in args.cc] if args.cc else None
vOutput = args.o
vFreeSearch = args.f
sLatest = args.l
namelookup = args.n
rankinglookup = args.r
capeclookup = args.a
last_ndays = args.t
summary_text= args.s
nlimit =args.i
cves = CVEs.last(rankinglookup=rankinglookup, namelookup=namelookup, capeclookup=capeclookup)
# replace special characters in vSearch with encoded version.
# Basically cuz I'm to lazy to handle conversion on DB creation ...
if vSearch:
vSearch = re.sub(r'\(', '%28', vSearch)
vSearch = re.sub(r'\)', '%29', vSearch)
# define which output to generate.
if vOutput == "csv":
csvOutput = 1
elif vOutput == "html":
htmlOutput = 1
elif vOutput == "xml":
xmlOutput = 1
from xml.etree.ElementTree import Element, SubElement, tostring
from xml.sax.saxutils import escape as SaxEscape
r = Element('cve-search')
elif vOutput == "json":
jsonOutput = 1
elif vOutput == "cveid":
cveidOutput = 1
else:
cveidOutput = False
# Print first line of html output
if htmlOutput and args.p is not None:
print("<html><body><h1>CVE search " + args.p + " </h1>")
elif htmlOutput and args.c is not None:
print("<html><body><h1>CVE-ID " + str(args.c) + " </h1>")
# search default is ascending mode
sorttype = 1
if sLatest:
sorttype = -1
def printCVE_json(item, indent=None):
date_fields = ['cvss-time', 'Modified', 'Published']
for field in date_fields:
if field in item:
item[field] = str(item[field])
if not namelookup and not rankinglookup and not capeclookup:
print(json.dumps(item, sort_keys=True, default=json_util.default, indent=indent))
else:
if "vulnerable_configuration" in item:
vulconf = []
ranking = []
for conf in item['vulnerable_configuration']:
if namelookup:
vulconf.append(cves.getcpe(cpeid=conf))
if rankinglookup:
rank = cves.getranking(cpeid=conf)
if rank and rank not in ranking:
ranking.append(rank)
if namelookup:
item['vulnerable_configuration'] = vulconf
if rankinglookup:
item['ranking'] = ranking
if "cwe" in item and capeclookup:
if item['cwe'].lower() != 'unknown':
item['capec'] = cves.getcapec(cweid=(item['cwe'].split('-')[1]))
print(json.dumps(item, sort_keys=True, default=json_util.default, indent=indent))
def printCVE_html(item):
print("<h2>" + item['id'] + "<br></h2>CVSS score: " + str(item['cvss']) + "<br>" + "<b>" + str(item['Published']) + "<b><br>" + item['summary'] + "<br>")
print("References:<br>")
for entry in item['references']:
print(entry + "<br>")
ranking =[]
for entry in item['vulnerable_configuration']:
if rankinglookup:
rank = cves.getranking(cpeid=entry)
if rank and rank not in ranking:
ranking.append(rank)
if rankinglookup:
print("Ranking:<br>")
for ra in ranking:
for e in ra:
for i in e:
print( i + ": " + str(e[i])+"<br>")
print("<hr><hr>")
def printCVE_csv(item):
# We assume that the vendor name is usually in the hostame of the
# URL to avoid any match on the resource part
refs = []
for entry in item['references']:
if args.v is not None:
url = urlparse(entry)
hostname = url.netloc
if re.search(args.v, hostname):
refs.append(entry)
if not refs:
refs = "[no vendor link found]"
if namelookup:
nl = " ".join(item['vulnerable_configuration'])
ranking =[]
ranking_=[]
for entry in item['vulnerable_configuration']:
if rankinglookup:
rank = cves.getranking(cpeid=entry)
if rank and rank not in ranking:
ranking.append(rank)
if rankinglookup:
for r in ranking:
for e in r:
for i in e:
ranking_.append(i+":"+str(e[i]))
if not ranking_:
ranking_="[No Ranking Found]"
else:
ranking_ = " ".join(ranking_)
csvoutput = csv.writer(sys.stdout, delimiter='|', quotechar='|', quoting=csv.QUOTE_MINIMAL)
if not rankinglookup:
if not namelookup:
csvoutput.writerow([item['id'], str(item['Published']), item['cvss'], item['summary'], refs])
else:
csvoutput.writerow([item['id'], str(item['Published']), item['cvss'], item['summary'], refs, nl])
else:
if not namelookup:
csvoutput.writerow([item['id'], str(item['Published']), item['cvss'], item['summary'], refs,ranking_])
else:
csvoutput.writerow([item['id'], str(item['Published']), item['cvss'], item['summary'], refs, nl,ranking_ ])
def printCVE_xml(item):
c = SubElement(r, 'id')
c.text = item['id']
c = SubElement(r, 'Published')
c.text = str(item['Published'])
c = SubElement(r, 'cvss')
c.text = str(item['cvss'])
c = SubElement(r, 'summary')
c.text = SaxEscape(item['summary'])
for e in item['references']:
c = SubElement(r, 'references')
c.text = SaxEscape(e)
ranking=[]
for e in item['vulnerable_configuration']:
c = SubElement(r, 'vulnerable_configuration')
c.text = SaxEscape(e)
if rankinglookup:
rank = cves.getranking(cpeid=e)
if rank and rank not in ranking:
ranking.append(rank)
if rankinglookup:
for ra in ranking:
for e in ra:
for i in e:
c = SubElement(r, i)
c.text =str(e[i])
def printCVE_id(item):
print(item['id'])
def printCVE_human(item):
print("CVE\t: " + item['id'])
print("DATE\t: " + str(item['Published']))
print("CVSS\t: " + str(item['cvss']))
print(item['summary'])
print("\nReferences:")
print("-----------")
for entry in item['references']:
print(entry)
print("\nVulnerable Configs:")
print("-------------------")
ranking=[]
for entry in item['vulnerable_configuration']:
if not namelookup:
print(entry)
else:
print(cves.getcpe(cpeid=entry))
if rankinglookup:
rank = cves.getranking(cpeid=entry)
if rank and rank not in ranking:
ranking.append(rank)
if rankinglookup:
print("\nRanking: ")
print("--------")
for ra in ranking:
for e in ra:
for i in e:
print( i + ": " + str(e[i]))
print("\n\n")
def printCNNVD_human(item):
print("name\t: " + item['name'])
print("CVE\t: " + item['cve_id'])
print("CNNVD\t: " + item['vuln_id'])
print("DATE\t: " + str(item['published']))
print("severity\t: " + str(item['severity']))
print("-------------------")
print(item['vuln_solution'])
print("\nVulnerable Configs:")
print("-------------------")
ranking=[]
for entry in item['vulnerable_configuration']:
if not namelookup:
print(entry)
else:
print(cves.getcpe(cpeid=entry))
if rankinglookup:
rank = cves.getranking(cpeid=entry)
if rank and rank not in ranking:
ranking.append(rank)
if rankinglookup:
print("\nRanking: ")
print("--------")
for ra in ranking:
for e in ra:
for i in e:
print( i + ": " + str(e[i]))
print("\n\n")
# Search in summary text
def search_in_summary(item):
print(item['summary'])
#if args.a in str(item['summary']):
# printCVE_json(item)
if cveSearch:
for item in db.getCVEs(cves=cveSearch):
if csvOutput:
printCVE_csv(item)
elif htmlOutput:
printCVE_html(item)
# bson straight from the MongoDB db - converted to JSON default
# representation
elif jsonOutput:
printCVE_json(item)
elif xmlOutput:
printCVE_xml(item)
elif cveidOutput:
printCVE_id(item)
else:
printCVE_human(item)
if htmlOutput:
print("</body></html>")
sys.exit(0)
if cnnvdSearch:
for item in db.getCNNVDs(cnnvd=cnnvdSearch):
printCNNVD_human(item)
if htmlOutput:
print("</body></html>")
sys.exit(0)
# Basic freetext search (in vulnerability summary).
# Full-text indexing is more efficient to search across all CVEs.
if vFreeSearch:
try:
for item in db.getFreeText(vFreeSearch):
printCVE_json(item, indent=2)
except:
sys.exit("Free text search not enabled on the database!")
sys.exit(0)
# Search Product (best to use CPE notation, e.g. cisco:ios:12.2
if vSearch:
for item in db.cvesForCPE(vSearch):
if not last_ndays:
if csvOutput:
printCVE_csv(item)
elif htmlOutput:
printCVE_html(item)
# bson straight from the MongoDB db - converted to JSON default
# representation
elif jsonOutput:
printCVE_json(item)
elif xmlOutput:
printCVE_xml(item)
elif cveidOutput:
printCVE_id(item)
else:
printCVE_human(item)
else:
date_n_days_ago = datetime.now() - timedelta(days=last_ndays)
if item['Published'] > date_n_days_ago:
if csvOutput:
printCVE_csv(item)
elif htmlOutput:
printCVE_html(item)
# bson straight from the MongoDB db - converted to JSON default
# representation
elif jsonOutput:
printCVE_json(item)
elif xmlOutput:
printCVE_xml(item)
elif cveidOutput:
printCVE_id(item)
else:
printCVE_human(item)
if htmlOutput:
print("</body></html>")
sys.exit(0)
# Search Product (best to use CPE notation, e.g. cisco:ios:12.2
if vnSearch:
for item in db.cnnvdForCPE(vnSearch):
if not last_ndays:
printCNNVD_human(item)
else:
date_n_days_ago = datetime.now() - timedelta(days=last_ndays)
if item['Published'] > date_n_days_ago:
if csvOutput:
printCVE_csv(item)
elif htmlOutput:
printCVE_html(item)
# bson straight from the MongoDB db - converted to JSON default
# representation
elif jsonOutput:
printCVE_json(item)
elif xmlOutput:
printCVE_xml(item)
elif cveidOutput:
printCVE_id(item)
else:
printCVE_human(item)
if htmlOutput:
print("</body></html>")
sys.exit(0)
# Search text in summary
if summary_text:
import lib.CVEs as cves
l = cves.last(rankinglookup=rankinglookup, namelookup=namelookup, capeclookup=capeclookup)
for cveid in db.getCVEIDs(limit=nlimit):
item = l.getcve(cveid=cveid)
if 'cvss' in item:
if type(item['cvss']) == str:
item['cvss'] = float(item['cvss'])
date_fields = ['cvss-time', 'Modified', 'Published']
for field in date_fields:
if field in item:
item[field] = str(item[field])
if summary_text.upper() in item['summary'].upper():
if not last_ndays:
if vOutput:
printCVE_id(item)
else:
print(json.dumps(item, sort_keys=True, default=json_util.default))
else:
date_n_days_ago = datetime.now() - timedelta(days=last_ndays)
# print(item['Published'])
# print(type (item['Published']))
# print("Last n day " +str(last_ndays))
try:
if datetime.strptime(item['Published'], '%Y-%m-%d %H:%M:%S.%f') > date_n_days_ago:
if vOutput:
printCVE_id(item)
else:
print(json.dumps(item, sort_keys=True, default=json_util.default))
except:
pass
if htmlOutput:
print("</body></html>")
sys.exit(0)
if xmlOutput:
# default encoding is UTF-8. Should this be detected on the terminal?
s = tostring(r).decode("utf-8")
print(s)
sys.exit(0)
else:
argParser.print_help()
argParser.exit() | 34.08026 | 157 | 0.571765 |
acf3cfaf9c1e8a0c9925e2fac1ee830ba2b36be0 | 696 | py | Python | stgallenDeduct/stgallenDeduct/apps/accounts/migrations/0001_initial.py | Tobzz123/StartHackRepository | da71a9d9edddfcd9faa81b463deb8f0c0834cf3d | [
"Apache-2.0"
] | null | null | null | stgallenDeduct/stgallenDeduct/apps/accounts/migrations/0001_initial.py | Tobzz123/StartHackRepository | da71a9d9edddfcd9faa81b463deb8f0c0834cf3d | [
"Apache-2.0"
] | null | null | null | stgallenDeduct/stgallenDeduct/apps/accounts/migrations/0001_initial.py | Tobzz123/StartHackRepository | da71a9d9edddfcd9faa81b463deb8f0c0834cf3d | [
"Apache-2.0"
] | 2 | 2021-03-19T16:40:52.000Z | 2021-03-20T13:01:41.000Z | # Generated by Django 3.1.7 on 2021-03-20 12:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserRegistrationModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 27.84 | 121 | 0.656609 |
acf3d0576ea3e1f1649e61e6e2ad04cb2888d595 | 3,108 | py | Python | python/compute_cycles.py | DaVisLab/TopoEmbedding | 7b9a64ba66aabafa2a443a915f2760bf5c5e0fe5 | [
"MIT"
] | null | null | null | python/compute_cycles.py | DaVisLab/TopoEmbedding | 7b9a64ba66aabafa2a443a915f2760bf5c5e0fe5 | [
"MIT"
] | null | null | null | python/compute_cycles.py | DaVisLab/TopoEmbedding | 7b9a64ba66aabafa2a443a915f2760bf5c5e0fe5 | [
"MIT"
] | null | null | null | import os
import math
import meshio
import shutil
import json
import struct, csv
import numpy as np
def searchNext(all_lines, visited, point, prev):
for i in range(len(all_lines)):
if(not visited[i]):
line = all_lines[i]
if(point == line[0] and prev != line[1]):
visited[i] = True
return line[1]
elif(point == line[1] and prev != line[0]):
visited[i] = True
return line[0]
DATA_DIR = os.path.join(os.getcwd(), "data")
CYCL_DIR = os.path.join(DATA_DIR, "mnist_cycles")
if(not os.path.exists(CYCL_DIR)):
os.mkdir(CYCL_DIR)
for i in range(10):
os.mkdir(os.path.join(CYCL_DIR, str(i)))
# for each subfolder
for fold_name in os.listdir("./data/mnist/"):
print(fold_name)
if(fold_name != ".DS_Store"):
# for each file in the subfolder
for filename in os.listdir("./data/mnist/"+fold_name+"/"):
if("_cycles" in filename):
# print(filename)
# open the file produced by paraview and ttk
ordered_cycles = {}
try:
im = meshio.read("./data/mnist/"+fold_name+"/"+filename)
# for each cycle id, save the list of edges
array_of_cycles = {}
for i in range(len(im.cell_data['CycleId'][0])):
index = str(im.cell_data['CycleId'][0][i]);
if(index not in array_of_cycles):
array_of_cycles[index] = []
line = im.cells_dict['line'][i]
array_of_cycles[index].append([line[0].tolist(), line[1].tolist()])
# for each cycle reorder the vertices so to have them ready to be plotted with THREE.js
for k in array_of_cycles:
val = array_of_cycles[k]
visited = np.zeros(len(val))
new_array = []
first_point= val[0][0]
prevP = val[0][0]
nextP = val[0][1]
visited[0] = True
new_array.append(im.points[first_point].tolist())
while(nextP != first_point):
new_array.append(im.points[nextP].tolist())
newP = searchNext(val, visited, nextP, prevP)
prevP = nextP
nextP = newP
new_array.append(im.points[first_point].tolist())
ordered_cycles[k] = new_array
except:
print("No cycles for "+filename)
# create the output json file
id = filename.split("_")[0]
with open("./data/mnist_cycles/"+fold_name+"/"+id+"_cycle.json", 'w') as fp:
json.dump(ordered_cycles, fp) | 34.921348 | 107 | 0.466538 |
acf3d13c67eccc94a462fa396783caba84fbee63 | 2,693 | py | Python | c2cgeoportal/lib/functionality.py | pgiraud/c2cgeoportal | 3ec955c5c67d16256af726a62d586b3f4ec3b500 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | c2cgeoportal/lib/functionality.py | pgiraud/c2cgeoportal | 3ec955c5c67d16256af726a62d586b3f4ec3b500 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | c2cgeoportal/lib/functionality.py | pgiraud/c2cgeoportal | 3ec955c5c67d16256af726a62d586b3f4ec3b500 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2013, Camptocamp SA
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the FreeBSD Project.
from c2cgeoportal.lib import get_setting
def _get_config_functionality(name, registered, config):
result = None
if registered:
result = get_setting(
config, ('functionalities', 'registered', name))
if result is None:
result = get_setting(
config, ('functionalities', 'anonymous', name))
if result is None:
result = []
elif not isinstance(result, list):
result = [result]
return result
def _get_db_functionality(name, user):
result = [
functionality.value for
functionality in user.functionalities
if functionality.name == name]
if len(result) == 0:
result = [
functionality.value for
functionality in user.role.functionalities
if functionality.name == name]
return result
def get_functionality(name, config, request):
result = []
if request.user:
result = _get_db_functionality(name, request.user)
if len(result) == 0:
result = _get_config_functionality(name, request.user is not None, config)
return result
| 37.402778 | 82 | 0.722986 |
acf3d1dd2ef8e2cf756d6d73dd7226d286a3e414 | 8,313 | py | Python | simpletag.py | yangacer/simpletag | 6214c964915071c43ad8a32d4058c18cce2d05eb | [
"MIT"
] | null | null | null | simpletag.py | yangacer/simpletag | 6214c964915071c43ad8a32d4058c18cce2d05eb | [
"MIT"
] | null | null | null | simpletag.py | yangacer/simpletag | 6214c964915071c43ad8a32d4058c18cce2d05eb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import sqlite3
__author__ = 'Acer.Yang <yangacer@gmail.com>'
__version__ = '0.1.6'
def get_token(text):
out = ''
for c in text:
if c.isalnum() or ord(c) >= 128:
out += c
elif len(out):
yield out
out = ''
if len(out):
yield out
pass
class __CONSTS__(object):
SQL_COL_INFO = 'PRAGMA table_info({});'
SQL_PURGE_TBL = 'DELETE FROM {};'
SQL_STATS = '''
SELECT term, documents, occurrences FROM {}_terms WHERE col=0;
'''
pass
class ns(object):
dbfile = 'simpletag.db'
table = None
conn = None
using_parenthesis_query = False
def resolve_supported_level(self):
sql = 'PRAGMA compile_options;'
csr = self.conn.cursor()
opts = [row[0] for row in csr.execute(sql)]
if 'ENABLE_FTS3' not in opts:
raise RuntimeError('SQLite''s FTS is not enabled')
if 'ENABLE_FTS3_PARENTHESIS' in opts:
self.using_parenthesis_query = True
pass
def get_existing_tbl_type(self, name):
sql = 'select name from sqlite_master where type = "table";'
csr = self.conn.cursor()
return [row[0] for row in csr.execute(sql)]
def __init__(self):
raise NotImplementedError()
def __priv_init__(self):
conn = sqlite3.connect(self.dbfile)
conn.row_factory = sqlite3.Row
self.conn = conn
self.resolve_supported_level()
pass
def open_table_(self, name):
csr = self.conn.cursor()
sql = self.__sql__['SQL_CREATE_TBL'].format(name)
csr.executescript(sql)
self.conn.commit()
for k, v in self.__sql__.iteritems():
self.__sql__[k] = v.format(name)
self.table = name
pass
def purge(self):
csr = self.conn.cursor()
csr.execute(__CONSTS__.SQL_PURGE_TBL.format(self.table))
self.conn.commit()
pass
def query_ids(self, query_str):
sql = self.__sql__['SQL_QUERY_IDS']
csr = self.conn.cursor()
for row in csr.execute(sql, (query_str,)):
yield row[0]
def query_by_tags(self, query_str, tokenize=True):
sql = self.__sql__['SQL_QUERY_BY_TAGS']
csr = self.conn.cursor()
for row in csr.execute(sql, (query_str, )):
if tokenize is True:
yield row[0], [tok for tok in get_token(row[1])]
else:
yield row[0], row[1]
def query_tags(self, docid):
sql = self.__sql__['SQL_QUERY_TAGS']
csr = self.conn.cursor()
for row in csr.execute(sql, (docid,)):
for tok in get_token(row[0]):
yield tok
def stats(self):
csr = self.conn.cursor()
for row in csr.execute(__CONSTS__.SQL_STATS.format(self.table)):
yield dict(((key, row[key]) for key in row.keys()))
pass
class TextNS(ns):
"""
>>> import simpletag
>>> ns = simpletag.TextNS('myTextTagSpace')
>>> doc_1 = '/a/b'
>>> tags_1 = ['tag']
>>> doc_2 = '/b/a'
>>> tags_2 = 'tag simple!'
>>> ns.update(doc_1, tags_1)
>>> ns.update(doc_2, tags_2)
>>> print [ doc for doc in ns.query_ids('tag') ]
[u'/a/b', u'/b/a']
>>> print [ tag for tag in ns.query_tags(doc_1) ]
[u'tag']
>>> print [ st for st in ns.stats() ] # doctest: +NORMALIZE_WHITESPACE
[{'term': u'simple', 'documents': 1, 'occurrences': 1},
{'term': u'tag', 'documents': 2, 'occurrences': 2}]
>>> ns.purge()
"""
def __init__(self, name):
super(TextNS, self).__priv_init__()
tbls = self.get_existing_tbl_type(name)
if name in tbls and (name + '_text_id') not in tbls:
raise TypeError(name)
self.__sql__ = dict(
SQL_CREATE_TBL='''
PRAGMA recursive_triggers='ON';
CREATE VIRTUAL TABLE IF NOT EXISTS {0} USING FTS4(tags);
CREATE TABLE IF NOT EXISTS {0}_text_id (
textid TEXT UNIQUE PRIMARY KEY NOT NULL);
CREATE VIRTUAL TABLE IF NOT EXISTS {0}_terms USING fts4aux({0});
CREATE TRIGGER IF NOT EXISTS {0}_del_text_id
AFTER DELETE ON {0}_text_id
BEGIN
DELETE FROM {0} WHERE docid=OLD.rowid;
END;
''',
SQL_INSERT='INSERT OR REPLACE INTO {}_text_id VALUES(?);',
SQL_UPDATE_1='DELETE FROM {} WHERE docid=?;',
SQL_UPDATE_2='INSERT INTO {} (docid, tags) VALUES (?, ?);',
SQL_PURGE_TBL='DELETE FROM {0}; DELETE FROM {0}_text_id;',
SQL_DEL='''
DELETE FROM {0} WHERE docid=(
SELECT rowid FROM {0}_text_id WHERE textid=?);
''',
SQL_QUERY_IDS='''
SELECT * FROM {0}_text_id AS lhs
JOIN (SELECT docid FROM {0} WHERE tags MATCH ?) AS rhs
ON (lhs.rowid=rhs.docid);
''',
SQL_QUERY_BY_TAGS='''
SELECT * FROM {0}_text_id, {0} WHERE {0}.tags MATCH ? AND
{0}_text_id.rowid = {0}.docid;
''',
SQL_QUERY_TAGS='''
SELECT tags FROM {0} WHERE docid=(
SELECT rowid FROM {0}_text_id WHERE textid=?);
''',
SQL_STATS='''
SELECT term, documents, occurrences FROM {}_terms WHERE col=0;
'''
)
self.open_table_(name)
pass
def update(self, ident, tags):
if not isinstance(ident, str) and not isinstance(ident, unicode):
raise TypeError('Invalid ident type')
if not isinstance(tags, str) and not isinstance(tags, unicode):
tags = ' '.join(tags)
csr = self.conn.cursor()
sql = self.__sql__['SQL_INSERT']
csr.execute(sql, (ident, ))
rowid = csr.lastrowid
sql = self.__sql__['SQL_UPDATE_1']
csr.execute(sql, (rowid, ))
sql = self.__sql__['SQL_UPDATE_2']
csr.execute(sql, (rowid, tags))
self.conn.commit()
pass
pass
class IntNS(ns):
"""
>>> import simpletag
>>> ns = simpletag.IntNS('myIntTagSpace')
>>> doc_1 = 1
>>> tags_1 = ['tag']
>>> doc_2 = 2
>>> tags_2 = 'tag simple!'
>>> ns.update(doc_1, tags_1)
>>> ns.update(doc_2, tags_2)
>>> print [ doc for doc in ns.query_ids('tag') ]
[1, 2]
>>> print [ tag for tag in ns.query_tags(doc_1) ]
[u'tag']
>>> print [ st for st in ns.stats() ] # doctest: +NORMALIZE_WHITESPACE
[{'term': u'simple', 'documents': 1, 'occurrences': 1},
{'term': u'tag', 'documents': 2, 'occurrences': 2}]
>>> ns.purge()
"""
def __init__(self, name, conn=None):
super(IntNS, self).__priv_init__()
tbls = self.get_existing_tbl_type(name)
if name in tbls and (name + '_text_id') in tbls:
raise TypeError(name)
self.__sql__ = dict(
SQL_CREATE_TBL='''
CREATE VIRTUAL TABLE IF NOT EXISTS {0} USING FTS4(tags);
CREATE VIRTUAL TABLE IF NOT EXISTS {0}_terms USING fts4aux({0});
''',
SQL_UPDATE_1='DELETE FROM {} WHERE docid=?;',
SQL_UPDATE_2='INSERT INTO {} (docid, tags) VALUES (?, ?);',
SQL_PURGE_TBL='DELETE FROM {0};',
SQL_DEL='DELETE FROM {} WHERE docid=?;',
SQL_QUERY_IDS='SELECT docid FROM {0} WHERE tags MATCH ?;',
SQL_QUERY_BY_TAGS='''
SELECT docid, tags FROM {0} WHERE tags MATCH ?;
''',
SQL_QUERY_TAGS='SELECT tags FROM {} WHERE docid=?;',
SQL_STATS='''
SELECT term, documents, occurrences FROM {}_terms WHERE col=0;
''',
)
self.open_table_(name)
pass
def update(self, ident, tags):
if not isinstance(ident, int):
raise TypeError('Invalid ident type')
if not isinstance(tags, str) and not isinstance(tags, unicode):
tags = ' '.join(tags)
csr = self.conn.cursor()
sql = self.__sql__['SQL_UPDATE_1']
csr.execute(sql, (ident, ))
sql = self.__sql__['SQL_UPDATE_2']
csr.execute(sql, (ident, tags))
self.conn.commit()
pass
pass
| 29.271127 | 76 | 0.544088 |
acf3d26133422d0840aee4a89f6e9d6973db8be5 | 209 | py | Python | erpnext_ebay/erpnext_ebay/doctype/ebay_order/ebay_order.py | bglazier/erpext_ebay | a94744a9f8bd2a86aab2b4d47b7c14f0065cccaf | [
"MIT"
] | 19 | 2018-09-12T08:58:15.000Z | 2022-02-22T18:51:19.000Z | erpnext_ebay/erpnext_ebay/doctype/ebay_order/ebay_order.py | bglazier/erpext_ebay | a94744a9f8bd2a86aab2b4d47b7c14f0065cccaf | [
"MIT"
] | 29 | 2018-06-14T09:47:07.000Z | 2021-12-20T07:18:57.000Z | erpnext_ebay/erpnext_ebay/doctype/ebay_order/ebay_order.py | bglazier/erpext_ebay | a94744a9f8bd2a86aab2b4d47b7c14f0065cccaf | [
"MIT"
] | 26 | 2016-09-08T05:07:31.000Z | 2022-02-22T17:10:33.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Ben Glazier and contributors
# For license information, please see license.txt
from frappe.model.document import Document
class eBayorder(Document):
pass
| 17.416667 | 50 | 0.727273 |
acf3d2802d87398b2455ec153c869ec0210dc255 | 567 | py | Python | tests/test_utils.py | mostau1/netmiko | 5b5463fb01e39e771be553281748477a48c7391c | [
"MIT"
] | 1 | 2015-07-23T15:01:11.000Z | 2015-07-23T15:01:11.000Z | tests/test_utils.py | mostau1/netmiko | 5b5463fb01e39e771be553281748477a48c7391c | [
"MIT"
] | 1 | 2021-03-24T22:06:02.000Z | 2021-03-24T22:06:02.000Z | tests/test_utils.py | mostau1/netmiko | 5b5463fb01e39e771be553281748477a48c7391c | [
"MIT"
] | 3 | 2017-09-08T21:47:07.000Z | 2018-07-05T02:07:28.000Z | #!/usr/bin/env python
"""
Implement common functions for tests
"""
from __future__ import print_function
from __future__ import unicode_literals
import io
import sys
def parse_yaml(yaml_file):
"""
Parses a yaml file, returning its contents as a dict.
"""
try:
import yaml
except ImportError:
sys.exit("Unable to import yaml module.")
try:
with io.open(yaml_file, encoding='utf-8') as fname:
return yaml.load(fname)
except IOError:
sys.exit("Unable to open YAML file: {0}".format(yaml_file))
| 21.807692 | 67 | 0.661376 |
acf3d3b51e7c0b916f4bf3f2d528453821b34e5d | 45,453 | py | Python | Cython/Build/Dependencies.py | facaiy/cython | 4a0750d02ebb29efd13b0337ad9fb7e193b7a798 | [
"Apache-2.0"
] | 1 | 2020-10-15T22:44:11.000Z | 2020-10-15T22:44:11.000Z | Cython/Build/Dependencies.py | facaiy/cython | 4a0750d02ebb29efd13b0337ad9fb7e193b7a798 | [
"Apache-2.0"
] | null | null | null | Cython/Build/Dependencies.py | facaiy/cython | 4a0750d02ebb29efd13b0337ad9fb7e193b7a798 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import, print_function
import cython
from .. import __version__
import collections
import contextlib
import hashlib
import os
import shutil
import subprocess
import re, sys, time
from glob import iglob
from io import open as io_open
from os.path import relpath as _relpath
from distutils.extension import Extension
from distutils.util import strtobool
import zipfile
try:
import gzip
gzip_open = gzip.open
gzip_ext = '.gz'
except ImportError:
gzip_open = open
gzip_ext = ''
try:
import zlib
zipfile_compression_mode = zipfile.ZIP_DEFLATED
except ImportError:
zipfile_compression_mode = zipfile.ZIP_STORED
try:
import pythran
import pythran.config
PythranAvailable = True
except:
PythranAvailable = False
from .. import Utils
from ..Utils import (cached_function, cached_method, path_exists,
safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, replace_suffix)
from ..Compiler.Main import Context, CompilationOptions, default_options
join_path = cached_function(os.path.join)
copy_once_if_newer = cached_function(copy_file_to_dir_if_newer)
safe_makedirs_once = cached_function(safe_makedirs)
if sys.version_info[0] < 3:
# stupid Py2 distutils enforces str type in list of sources
_fs_encoding = sys.getfilesystemencoding()
if _fs_encoding is None:
_fs_encoding = sys.getdefaultencoding()
def encode_filename_in_py2(filename):
if not isinstance(filename, bytes):
return filename.encode(_fs_encoding)
return filename
else:
def encode_filename_in_py2(filename):
return filename
basestring = str
def _make_relative(file_paths, base=None):
if not base:
base = os.getcwd()
if base[-1] != os.path.sep:
base += os.path.sep
return [_relpath(path, base) if path.startswith(base) else path
for path in file_paths]
def extended_iglob(pattern):
if '{' in pattern:
m = re.match('(.*){([^}]+)}(.*)', pattern)
if m:
before, switch, after = m.groups()
for case in switch.split(','):
for path in extended_iglob(before + case + after):
yield path
return
if '**/' in pattern:
seen = set()
first, rest = pattern.split('**/', 1)
if first:
first = iglob(first+'/')
else:
first = ['']
for root in first:
for path in extended_iglob(join_path(root, rest)):
if path not in seen:
seen.add(path)
yield path
for path in extended_iglob(join_path(root, '*', '**/' + rest)):
if path not in seen:
seen.add(path)
yield path
else:
for path in iglob(pattern):
yield path
def nonempty(it, error_msg="expected non-empty iterator"):
empty = True
for value in it:
empty = False
yield value
if empty:
raise ValueError(error_msg)
@cached_function
def file_hash(filename):
path = os.path.normpath(filename.encode("UTF-8"))
prefix = (str(len(path)) + ":").encode("UTF-8")
m = hashlib.md5(prefix)
m.update(path)
f = open(filename, 'rb')
try:
data = f.read(65000)
while data:
m.update(data)
data = f.read(65000)
finally:
f.close()
return m.hexdigest()
def parse_list(s):
"""
>>> parse_list("")
[]
>>> parse_list("a")
['a']
>>> parse_list("a b c")
['a', 'b', 'c']
>>> parse_list("[a, b, c]")
['a', 'b', 'c']
>>> parse_list('a " " b')
['a', ' ', 'b']
>>> parse_list('[a, ",a", "a,", ",", ]')
['a', ',a', 'a,', ',']
"""
if len(s) >= 2 and s[0] == '[' and s[-1] == ']':
s = s[1:-1]
delimiter = ','
else:
delimiter = ' '
s, literals = strip_string_literals(s)
def unquote(literal):
literal = literal.strip()
if literal[0] in "'\"":
return literals[literal[1:-1]]
else:
return literal
return [unquote(item) for item in s.split(delimiter) if item.strip()]
transitive_str = object()
transitive_list = object()
bool_or = object()
distutils_settings = {
'name': str,
'sources': list,
'define_macros': list,
'undef_macros': list,
'libraries': transitive_list,
'library_dirs': transitive_list,
'runtime_library_dirs': transitive_list,
'include_dirs': transitive_list,
'extra_objects': list,
'extra_compile_args': transitive_list,
'extra_link_args': transitive_list,
'export_symbols': list,
'depends': transitive_list,
'language': transitive_str,
'np_pythran': bool_or
}
def update_pythran_extension(ext):
if not PythranAvailable:
raise RuntimeError("You first need to install Pythran to use the np_pythran directive.")
pythran_ext = pythran.config.make_extension()
ext.include_dirs.extend(pythran_ext['include_dirs'])
ext.extra_compile_args.extend(pythran_ext['extra_compile_args'])
ext.extra_link_args.extend(pythran_ext['extra_link_args'])
ext.define_macros.extend(pythran_ext['define_macros'])
ext.undef_macros.extend(pythran_ext['undef_macros'])
ext.library_dirs.extend(pythran_ext['library_dirs'])
ext.libraries.extend(pythran_ext['libraries'])
ext.language = 'c++'
# These options are not compatible with the way normal Cython extensions work
for bad_option in ["-fwhole-program", "-fvisibility=hidden"]:
try:
ext.extra_compile_args.remove(bad_option)
except ValueError:
pass
@cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t)
def line_iter(source):
if isinstance(source, basestring):
start = 0
while True:
end = source.find('\n', start)
if end == -1:
yield source[start:]
return
yield source[start:end]
start = end+1
else:
for line in source:
yield line
class DistutilsInfo(object):
def __init__(self, source=None, exn=None):
self.values = {}
if source is not None:
for line in line_iter(source):
line = line.lstrip()
if not line:
continue
if line[0] != '#':
break
line = line[1:].lstrip()
kind = next((k for k in ("distutils:","cython:") if line.startswith(k)), None)
if not kind is None:
key, _, value = [s.strip() for s in line[len(kind):].partition('=')]
type = distutils_settings.get(key, None)
if line.startswith("cython:") and type is None: continue
if type in (list, transitive_list):
value = parse_list(value)
if key == 'define_macros':
value = [tuple(macro.split('=', 1))
if '=' in macro else (macro, None)
for macro in value]
if type is bool_or:
value = strtobool(value)
self.values[key] = value
elif exn is not None:
for key in distutils_settings:
if key in ('name', 'sources','np_pythran'):
continue
value = getattr(exn, key, None)
if value:
self.values[key] = value
def merge(self, other):
if other is None:
return self
for key, value in other.values.items():
type = distutils_settings[key]
if type is transitive_str and key not in self.values:
self.values[key] = value
elif type is transitive_list:
if key in self.values:
# Change a *copy* of the list (Trac #845)
all = self.values[key][:]
for v in value:
if v not in all:
all.append(v)
value = all
self.values[key] = value
elif type is bool_or:
self.values[key] = self.values.get(key, False) | value
return self
def subs(self, aliases):
if aliases is None:
return self
resolved = DistutilsInfo()
for key, value in self.values.items():
type = distutils_settings[key]
if type in [list, transitive_list]:
new_value_list = []
for v in value:
if v in aliases:
v = aliases[v]
if isinstance(v, list):
new_value_list += v
else:
new_value_list.append(v)
value = new_value_list
else:
if value in aliases:
value = aliases[value]
resolved.values[key] = value
return resolved
def apply(self, extension):
for key, value in self.values.items():
type = distutils_settings[key]
if type in [list, transitive_list]:
value = getattr(extension, key) + list(value)
setattr(extension, key, value)
@cython.locals(start=cython.Py_ssize_t, q=cython.Py_ssize_t,
single_q=cython.Py_ssize_t, double_q=cython.Py_ssize_t,
hash_mark=cython.Py_ssize_t, end=cython.Py_ssize_t,
k=cython.Py_ssize_t, counter=cython.Py_ssize_t, quote_len=cython.Py_ssize_t)
def strip_string_literals(code, prefix='__Pyx_L'):
"""
Normalizes every string literal to be of the form '__Pyx_Lxxx',
returning the normalized code and a mapping of labels to
string literals.
"""
new_code = []
literals = {}
counter = 0
start = q = 0
in_quote = False
hash_mark = single_q = double_q = -1
code_len = len(code)
quote_type = quote_len = None
while True:
if hash_mark < q:
hash_mark = code.find('#', q)
if single_q < q:
single_q = code.find("'", q)
if double_q < q:
double_q = code.find('"', q)
q = min(single_q, double_q)
if q == -1:
q = max(single_q, double_q)
# We're done.
if q == -1 and hash_mark == -1:
new_code.append(code[start:])
break
# Try to close the quote.
elif in_quote:
if code[q-1] == u'\\':
k = 2
while q >= k and code[q-k] == u'\\':
k += 1
if k % 2 == 0:
q += 1
continue
if code[q] == quote_type and (
quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])):
counter += 1
label = "%s%s_" % (prefix, counter)
literals[label] = code[start+quote_len:q]
full_quote = code[q:q+quote_len]
new_code.append(full_quote)
new_code.append(label)
new_code.append(full_quote)
q += quote_len
in_quote = False
start = q
else:
q += 1
# Process comment.
elif -1 != hash_mark and (hash_mark < q or q == -1):
new_code.append(code[start:hash_mark+1])
end = code.find('\n', hash_mark)
counter += 1
label = "%s%s_" % (prefix, counter)
if end == -1:
end_or_none = None
else:
end_or_none = end
literals[label] = code[hash_mark+1:end_or_none]
new_code.append(label)
if end == -1:
break
start = q = end
# Open the quote.
else:
if code_len >= q+3 and (code[q] == code[q+1] == code[q+2]):
quote_len = 3
else:
quote_len = 1
in_quote = True
quote_type = code[q]
new_code.append(code[start:q])
start = q
q += quote_len
return "".join(new_code), literals
# We need to allow spaces to allow for conditional compilation like
# IF ...:
# cimport ...
dependency_regex = re.compile(r"(?:^\s*from +([0-9a-zA-Z_.]+) +cimport)|"
r"(?:^\s*cimport +([0-9a-zA-Z_.]+(?: *, *[0-9a-zA-Z_.]+)*))|"
r"(?:^\s*cdef +extern +from +['\"]([^'\"]+)['\"])|"
r"(?:^\s*include +['\"]([^'\"]+)['\"])", re.M)
def normalize_existing(base_path, rel_paths):
return normalize_existing0(os.path.dirname(base_path), tuple(set(rel_paths)))
@cached_function
def normalize_existing0(base_dir, rel_paths):
"""
Given some base directory ``base_dir`` and a list of path names
``rel_paths``, normalize each relative path name ``rel`` by
replacing it by ``os.path.join(base, rel)`` if that file exists.
Return a couple ``(normalized, needed_base)`` where ``normalized``
if the list of normalized file names and ``needed_base`` is
``base_dir`` if we actually needed ``base_dir``. If no paths were
changed (for example, if all paths were already absolute), then
``needed_base`` is ``None``.
"""
normalized = []
needed_base = None
for rel in rel_paths:
if os.path.isabs(rel):
normalized.append(rel)
continue
path = join_path(base_dir, rel)
if path_exists(path):
normalized.append(os.path.normpath(path))
needed_base = base_dir
else:
normalized.append(rel)
return (normalized, needed_base)
def resolve_depends(depends, include_dirs):
include_dirs = tuple(include_dirs)
resolved = []
for depend in depends:
path = resolve_depend(depend, include_dirs)
if path is not None:
resolved.append(path)
return resolved
@cached_function
def resolve_depend(depend, include_dirs):
if depend[0] == '<' and depend[-1] == '>':
return None
for dir in include_dirs:
path = join_path(dir, depend)
if path_exists(path):
return os.path.normpath(path)
return None
@cached_function
def package(filename):
dir = os.path.dirname(os.path.abspath(str(filename)))
if dir != filename and is_package_dir(dir):
return package(dir) + (os.path.basename(dir),)
else:
return ()
@cached_function
def fully_qualified_name(filename):
module = os.path.splitext(os.path.basename(filename))[0]
return '.'.join(package(filename) + (module,))
@cached_function
def parse_dependencies(source_filename):
# Actual parsing is way too slow, so we use regular expressions.
# The only catch is that we must strip comments and string
# literals ahead of time.
fh = Utils.open_source_file(source_filename, error_handling='ignore')
try:
source = fh.read()
finally:
fh.close()
distutils_info = DistutilsInfo(source)
source, literals = strip_string_literals(source)
source = source.replace('\\\n', ' ').replace('\t', ' ')
# TODO: pure mode
cimports = []
includes = []
externs = []
for m in dependency_regex.finditer(source):
cimport_from, cimport_list, extern, include = m.groups()
if cimport_from:
cimports.append(cimport_from)
elif cimport_list:
cimports.extend(x.strip() for x in cimport_list.split(","))
elif extern:
externs.append(literals[extern])
else:
includes.append(literals[include])
return cimports, includes, externs, distutils_info
class DependencyTree(object):
def __init__(self, context, quiet=False):
self.context = context
self.quiet = quiet
self._transitive_cache = {}
def parse_dependencies(self, source_filename):
if path_exists(source_filename):
source_filename = os.path.normpath(source_filename)
return parse_dependencies(source_filename)
@cached_method
def included_files(self, filename):
# This is messy because included files are textually included, resolving
# cimports (but not includes) relative to the including file.
all = set()
for include in self.parse_dependencies(filename)[1]:
include_path = join_path(os.path.dirname(filename), include)
if not path_exists(include_path):
include_path = self.context.find_include_file(include, None)
if include_path:
if '.' + os.path.sep in include_path:
include_path = os.path.normpath(include_path)
all.add(include_path)
all.update(self.included_files(include_path))
elif not self.quiet:
print("Unable to locate '%s' referenced from '%s'" % (filename, include))
return all
@cached_method
def cimports_externs_incdirs(self, filename):
# This is really ugly. Nested cimports are resolved with respect to the
# includer, but includes are resolved with respect to the includee.
cimports, includes, externs = self.parse_dependencies(filename)[:3]
cimports = set(cimports)
externs = set(externs)
incdirs = set()
for include in self.included_files(filename):
included_cimports, included_externs, included_incdirs = self.cimports_externs_incdirs(include)
cimports.update(included_cimports)
externs.update(included_externs)
incdirs.update(included_incdirs)
externs, incdir = normalize_existing(filename, externs)
if incdir:
incdirs.add(incdir)
return tuple(cimports), externs, incdirs
def cimports(self, filename):
return self.cimports_externs_incdirs(filename)[0]
def package(self, filename):
return package(filename)
def fully_qualified_name(self, filename):
return fully_qualified_name(filename)
@cached_method
def find_pxd(self, module, filename=None):
is_relative = module[0] == '.'
if is_relative and not filename:
raise NotImplementedError("New relative imports.")
if filename is not None:
module_path = module.split('.')
if is_relative:
module_path.pop(0) # just explicitly relative
package_path = list(self.package(filename))
while module_path and not module_path[0]:
try:
package_path.pop()
except IndexError:
return None # FIXME: error?
module_path.pop(0)
relative = '.'.join(package_path + module_path)
pxd = self.context.find_pxd_file(relative, None)
if pxd:
return pxd
if is_relative:
return None # FIXME: error?
return self.context.find_pxd_file(module, None)
@cached_method
def cimported_files(self, filename):
if filename[-4:] == '.pyx' and path_exists(filename[:-4] + '.pxd'):
pxd_list = [filename[:-4] + '.pxd']
else:
pxd_list = []
for module in self.cimports(filename):
if module[:7] == 'cython.' or module == 'cython':
continue
pxd_file = self.find_pxd(module, filename)
if pxd_file is not None:
pxd_list.append(pxd_file)
elif not self.quiet:
print("%s: cannot find cimported module '%s'" % (filename, module))
return tuple(pxd_list)
@cached_method
def immediate_dependencies(self, filename):
all = set([filename])
all.update(self.cimported_files(filename))
all.update(self.included_files(filename))
return all
def all_dependencies(self, filename):
return self.transitive_merge(filename, self.immediate_dependencies, set.union)
@cached_method
def timestamp(self, filename):
return os.path.getmtime(filename)
def extract_timestamp(self, filename):
return self.timestamp(filename), filename
def newest_dependency(self, filename):
return max([self.extract_timestamp(f) for f in self.all_dependencies(filename)])
def transitive_fingerprint(self, filename, module, compilation_options):
r"""
Return a fingerprint of a cython file that is about to be cythonized.
Fingerprints are looked up in future compilations. If the fingerprint
is found, the cythonization can be skipped. The fingerprint must
incorporate everything that has an influence on the generated code.
"""
try:
m = hashlib.md5(__version__.encode('UTF-8'))
m.update(file_hash(filename).encode('UTF-8'))
for x in sorted(self.all_dependencies(filename)):
if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'):
m.update(file_hash(x).encode('UTF-8'))
# Include the module attributes that change the compilation result
# in the fingerprint. We do not iterate over module.__dict__ and
# include almost everything here as users might extend Extension
# with arbitrary (random) attributes that would lead to cache
# misses.
m.update(str((module.language, module.py_limited_api, module.np_pythran)).encode('UTF-8'))
m.update(compilation_options.get_fingerprint().encode('UTF-8'))
return m.hexdigest()
except IOError:
return None
def distutils_info0(self, filename):
info = self.parse_dependencies(filename)[3]
kwds = info.values
cimports, externs, incdirs = self.cimports_externs_incdirs(filename)
basedir = os.getcwd()
# Add dependencies on "cdef extern from ..." files
if externs:
externs = _make_relative(externs, basedir)
if 'depends' in kwds:
kwds['depends'] = list(set(kwds['depends']).union(externs))
else:
kwds['depends'] = list(externs)
# Add include_dirs to ensure that the C compiler will find the
# "cdef extern from ..." files
if incdirs:
include_dirs = list(kwds.get('include_dirs', []))
for inc in _make_relative(incdirs, basedir):
if inc not in include_dirs:
include_dirs.append(inc)
kwds['include_dirs'] = include_dirs
return info
def distutils_info(self, filename, aliases=None, base=None):
return (self.transitive_merge(filename, self.distutils_info0, DistutilsInfo.merge)
.subs(aliases)
.merge(base))
def transitive_merge(self, node, extract, merge):
try:
seen = self._transitive_cache[extract, merge]
except KeyError:
seen = self._transitive_cache[extract, merge] = {}
return self.transitive_merge_helper(
node, extract, merge, seen, {}, self.cimported_files)[0]
def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoing):
if node in seen:
return seen[node], None
deps = extract(node)
if node in stack:
return deps, node
try:
stack[node] = len(stack)
loop = None
for next in outgoing(node):
sub_deps, sub_loop = self.transitive_merge_helper(next, extract, merge, seen, stack, outgoing)
if sub_loop is not None:
if loop is not None and stack[loop] < stack[sub_loop]:
pass
else:
loop = sub_loop
deps = merge(deps, sub_deps)
if loop == node:
loop = None
if loop is None:
seen[node] = deps
return deps, loop
finally:
del stack[node]
_dep_tree = None
def create_dependency_tree(ctx=None, quiet=False):
global _dep_tree
if _dep_tree is None:
if ctx is None:
ctx = Context(["."], CompilationOptions(default_options))
_dep_tree = DependencyTree(ctx, quiet=quiet)
return _dep_tree
# If this changes, change also docs/src/reference/compilation.rst
# which mentions this function
def default_create_extension(template, kwds):
if 'depends' in kwds:
include_dirs = kwds.get('include_dirs', []) + ["."]
depends = resolve_depends(kwds['depends'], include_dirs)
kwds['depends'] = sorted(set(depends + template.depends))
t = template.__class__
ext = t(**kwds)
metadata = dict(distutils=kwds, module_name=kwds['name'])
return (ext, metadata)
# This may be useful for advanced users?
def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None,
exclude_failures=False):
if language is not None:
print('Please put "# distutils: language=%s" in your .pyx or .pxd file(s)' % language)
if exclude is None:
exclude = []
if patterns is None:
return [], {}
elif isinstance(patterns, basestring) or not isinstance(patterns, collections.Iterable):
patterns = [patterns]
explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)])
seen = set()
deps = create_dependency_tree(ctx, quiet=quiet)
to_exclude = set()
if not isinstance(exclude, list):
exclude = [exclude]
for pattern in exclude:
to_exclude.update(map(os.path.abspath, extended_iglob(pattern)))
module_list = []
module_metadata = {}
# workaround for setuptools
if 'setuptools' in sys.modules:
Extension_distutils = sys.modules['setuptools.extension']._Extension
Extension_setuptools = sys.modules['setuptools'].Extension
else:
# dummy class, in case we do not have setuptools
Extension_distutils = Extension
class Extension_setuptools(Extension): pass
# if no create_extension() function is defined, use a simple
# default function.
create_extension = ctx.options.create_extension or default_create_extension
for pattern in patterns:
if isinstance(pattern, str):
filepattern = pattern
template = Extension(pattern, []) # Fake Extension without sources
name = '*'
base = None
ext_language = language
elif isinstance(pattern, (Extension_distutils, Extension_setuptools)):
cython_sources = [s for s in pattern.sources
if os.path.splitext(s)[1] in ('.py', '.pyx')]
if cython_sources:
filepattern = cython_sources[0]
if len(cython_sources) > 1:
print("Warning: Multiple cython sources found for extension '%s': %s\n"
"See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
"for sharing declarations among Cython files." % (pattern.name, cython_sources))
else:
# ignore non-cython modules
module_list.append(pattern)
continue
template = pattern
name = template.name
base = DistutilsInfo(exn=template)
ext_language = None # do not override whatever the Extension says
else:
msg = str("pattern is not of type str nor subclass of Extension (%s)"
" but of type %s and class %s" % (repr(Extension),
type(pattern),
pattern.__class__))
raise TypeError(msg)
for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern):
if os.path.abspath(file) in to_exclude:
continue
module_name = deps.fully_qualified_name(file)
if '*' in name:
if module_name in explicit_modules:
continue
elif name:
module_name = name
if module_name == 'cython':
raise ValueError('cython is a special module, cannot be used as a module name')
if module_name not in seen:
try:
kwds = deps.distutils_info(file, aliases, base).values
except Exception:
if exclude_failures:
continue
raise
if base is not None:
for key, value in base.values.items():
if key not in kwds:
kwds[key] = value
kwds['name'] = module_name
sources = [file] + [m for m in template.sources if m != filepattern]
if 'sources' in kwds:
# allow users to add .c files etc.
for source in kwds['sources']:
source = encode_filename_in_py2(source)
if source not in sources:
sources.append(source)
kwds['sources'] = sources
if ext_language and 'language' not in kwds:
kwds['language'] = ext_language
np_pythran = kwds.pop('np_pythran', False)
# Create the new extension
m, metadata = create_extension(template, kwds)
m.np_pythran = np_pythran or getattr(m, 'np_pythran', False)
if m.np_pythran:
update_pythran_extension(m)
module_list.append(m)
# Store metadata (this will be written as JSON in the
# generated C file but otherwise has no purpose)
module_metadata[module_name] = metadata
if file not in m.sources:
# Old setuptools unconditionally replaces .pyx with .c/.cpp
target_file = os.path.splitext(file)[0] + ('.cpp' if m.language == 'c++' else '.c')
try:
m.sources.remove(target_file)
except ValueError:
# never seen this in the wild, but probably better to warn about this unexpected case
print("Warning: Cython source file not found in sources list, adding %s" % file)
m.sources.insert(0, file)
seen.add(name)
return module_list, module_metadata
# This is the user-exposed entry point.
def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=False, language=None,
exclude_failures=False, **options):
"""
Compile a set of source modules into C/C++ files and return a list of distutils
Extension objects for them.
As module list, pass either a glob pattern, a list of glob patterns or a list of
Extension objects. The latter allows you to configure the extensions separately
through the normal distutils options.
When using glob patterns, you can exclude certain module names explicitly
by passing them into the 'exclude' option.
To globally enable C++ mode, you can pass language='c++'. Otherwise, this
will be determined at a per-file level based on compiler directives. This
affects only modules found based on file names. Extension instances passed
into cythonize() will not be changed.
For parallel compilation, set the 'nthreads' option to the number of
concurrent builds.
For a broad 'try to compile' mode that ignores compilation failures and
simply excludes the failed extensions, pass 'exclude_failures=True'. Note
that this only really makes sense for compiling .py files which can also
be used without compilation.
Additional compilation options can be passed as keyword arguments.
"""
if exclude is None:
exclude = []
if 'include_path' not in options:
options['include_path'] = ['.']
if 'common_utility_include_dir' in options:
safe_makedirs(options['common_utility_include_dir'])
pythran_options = None
if PythranAvailable:
pythran_options = CompilationOptions(**options)
pythran_options.cplus = True
pythran_options.np_pythran = True
c_options = CompilationOptions(**options)
cpp_options = CompilationOptions(**options); cpp_options.cplus = True
ctx = c_options.create_context()
options = c_options
module_list, module_metadata = create_extension_list(
module_list,
exclude=exclude,
ctx=ctx,
quiet=quiet,
exclude_failures=exclude_failures,
language=language,
aliases=aliases)
deps = create_dependency_tree(ctx, quiet=quiet)
build_dir = getattr(options, 'build_dir', None)
def copy_to_build_dir(filepath, root=os.getcwd()):
filepath_abs = os.path.abspath(filepath)
if os.path.isabs(filepath):
filepath = filepath_abs
if filepath_abs.startswith(root):
# distutil extension depends are relative to cwd
mod_dir = join_path(build_dir,
os.path.dirname(_relpath(filepath, root)))
copy_once_if_newer(filepath_abs, mod_dir)
modules_by_cfile = collections.defaultdict(list)
to_compile = []
for m in module_list:
if build_dir:
for dep in m.depends:
copy_to_build_dir(dep)
cy_sources = [
source for source in m.sources
if os.path.splitext(source)[1] in ('.pyx', '.py')]
if len(cy_sources) == 1:
# normal "special" case: believe the Extension module name to allow user overrides
full_module_name = m.name
else:
# infer FQMN from source files
full_module_name = None
new_sources = []
for source in m.sources:
base, ext = os.path.splitext(source)
if ext in ('.pyx', '.py'):
if m.np_pythran:
c_file = base + '.cpp'
options = pythran_options
elif m.language == 'c++':
c_file = base + '.cpp'
options = cpp_options
else:
c_file = base + '.c'
options = c_options
# setup for out of place build directory if enabled
if build_dir:
c_file = os.path.join(build_dir, c_file)
dir = os.path.dirname(c_file)
safe_makedirs_once(dir)
if os.path.exists(c_file):
c_timestamp = os.path.getmtime(c_file)
else:
c_timestamp = -1
# Priority goes first to modified files, second to direct
# dependents, and finally to indirect dependents.
if c_timestamp < deps.timestamp(source):
dep_timestamp, dep = deps.timestamp(source), source
priority = 0
else:
dep_timestamp, dep = deps.newest_dependency(source)
priority = 2 - (dep in deps.immediate_dependencies(source))
if force or c_timestamp < dep_timestamp:
if not quiet and not force:
if source == dep:
print("Compiling %s because it changed." % source)
else:
print("Compiling %s because it depends on %s." % (source, dep))
if not force and options.cache:
fingerprint = deps.transitive_fingerprint(source, m, options)
else:
fingerprint = None
to_compile.append((
priority, source, c_file, fingerprint, quiet,
options, not exclude_failures, module_metadata.get(m.name),
full_module_name))
new_sources.append(c_file)
modules_by_cfile[c_file].append(m)
else:
new_sources.append(source)
if build_dir:
copy_to_build_dir(source)
m.sources = new_sources
if options.cache:
if not os.path.exists(options.cache):
os.makedirs(options.cache)
to_compile.sort()
# Drop "priority" component of "to_compile" entries and add a
# simple progress indicator.
N = len(to_compile)
progress_fmt = "[{0:%d}/{1}] " % len(str(N))
for i in range(N):
progress = progress_fmt.format(i+1, N)
to_compile[i] = to_compile[i][1:] + (progress,)
if N <= 1:
nthreads = 0
if nthreads:
# Requires multiprocessing (or Python >= 2.6)
try:
import multiprocessing
pool = multiprocessing.Pool(
nthreads, initializer=_init_multiprocessing_helper)
except (ImportError, OSError):
print("multiprocessing required for parallel cythonization")
nthreads = 0
else:
# This is a bit more involved than it should be, because KeyboardInterrupts
# break the multiprocessing workers when using a normal pool.map().
# See, for example:
# http://noswap.com/blog/python-multiprocessing-keyboardinterrupt
try:
result = pool.map_async(cythonize_one_helper, to_compile, chunksize=1)
pool.close()
while not result.ready():
try:
result.get(99999) # seconds
except multiprocessing.TimeoutError:
pass
except KeyboardInterrupt:
pool.terminate()
raise
pool.join()
if not nthreads:
for args in to_compile:
cythonize_one(*args)
if exclude_failures:
failed_modules = set()
for c_file, modules in modules_by_cfile.items():
if not os.path.exists(c_file):
failed_modules.update(modules)
elif os.path.getsize(c_file) < 200:
f = io_open(c_file, 'r', encoding='iso8859-1')
try:
if f.read(len('#error ')) == '#error ':
# dead compilation result
failed_modules.update(modules)
finally:
f.close()
if failed_modules:
for module in failed_modules:
module_list.remove(module)
print("Failed compilations: %s" % ', '.join(sorted([
module.name for module in failed_modules])))
if options.cache:
cleanup_cache(options.cache, getattr(options, 'cache_size', 1024 * 1024 * 100))
# cythonize() is often followed by the (non-Python-buffered)
# compiler output, flush now to avoid interleaving output.
sys.stdout.flush()
return module_list
if os.environ.get('XML_RESULTS'):
compile_result_dir = os.environ['XML_RESULTS']
def record_results(func):
def with_record(*args):
t = time.time()
success = True
try:
try:
func(*args)
except:
success = False
finally:
t = time.time() - t
module = fully_qualified_name(args[0])
name = "cythonize." + module
failures = 1 - success
if success:
failure_item = ""
else:
failure_item = "failure"
output = open(os.path.join(compile_result_dir, name + ".xml"), "w")
output.write("""
<?xml version="1.0" ?>
<testsuite name="%(name)s" errors="0" failures="%(failures)s" tests="1" time="%(t)s">
<testcase classname="%(name)s" name="cythonize">
%(failure_item)s
</testcase>
</testsuite>
""".strip() % locals())
output.close()
return with_record
else:
def record_results(func):
return func
# TODO: Share context? Issue: pyx processing leaks into pxd module
@record_results
def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
raise_on_failure=True, embedded_metadata=None, full_module_name=None,
progress=""):
from ..Compiler.Main import compile_single, default_options
from ..Compiler.Errors import CompileError, PyrexError
if fingerprint:
if not os.path.exists(options.cache):
safe_makedirs(options.cache)
# Cython-generated c files are highly compressible.
# (E.g. a compression ratio of about 10 for Sage).
fingerprint_file_base = join_path(
options.cache, "%s-%s" % (os.path.basename(c_file), fingerprint))
gz_fingerprint_file = fingerprint_file_base + gzip_ext
zip_fingerprint_file = fingerprint_file_base + '.zip'
if os.path.exists(gz_fingerprint_file) or os.path.exists(zip_fingerprint_file):
if not quiet:
print("%sFound compiled %s in cache" % (progress, pyx_file))
if os.path.exists(gz_fingerprint_file):
os.utime(gz_fingerprint_file, None)
with contextlib.closing(gzip_open(gz_fingerprint_file, 'rb')) as g:
with contextlib.closing(open(c_file, 'wb')) as f:
shutil.copyfileobj(g, f)
else:
os.utime(zip_fingerprint_file, None)
dirname = os.path.dirname(c_file)
with contextlib.closing(zipfile.ZipFile(zip_fingerprint_file)) as z:
for artifact in z.namelist():
z.extract(artifact, os.path.join(dirname, artifact))
return
if not quiet:
print("%sCythonizing %s" % (progress, pyx_file))
if options is None:
options = CompilationOptions(default_options)
options.output_file = c_file
options.embedded_metadata = embedded_metadata
any_failures = 0
try:
result = compile_single(pyx_file, options, full_module_name=full_module_name)
if result.num_errors > 0:
any_failures = 1
except (EnvironmentError, PyrexError) as e:
sys.stderr.write('%s\n' % e)
any_failures = 1
# XXX
import traceback
traceback.print_exc()
except Exception:
if raise_on_failure:
raise
import traceback
traceback.print_exc()
any_failures = 1
if any_failures:
if raise_on_failure:
raise CompileError(None, pyx_file)
elif os.path.exists(c_file):
os.remove(c_file)
elif fingerprint:
artifacts = list(filter(None, [
getattr(result, attr, None)
for attr in ('c_file', 'h_file', 'api_file', 'i_file')]))
if len(artifacts) == 1:
fingerprint_file = gz_fingerprint_file
with contextlib.closing(open(c_file, 'rb')) as f:
with contextlib.closing(gzip_open(fingerprint_file + '.tmp', 'wb')) as g:
shutil.copyfileobj(f, g)
else:
fingerprint_file = zip_fingerprint_file
with contextlib.closing(zipfile.ZipFile(
fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip:
for artifact in artifacts:
zip.write(artifact, os.path.basename(artifact))
os.rename(fingerprint_file + '.tmp', fingerprint_file)
def cythonize_one_helper(m):
import traceback
try:
return cythonize_one(*m)
except Exception:
traceback.print_exc()
raise
def _init_multiprocessing_helper():
# KeyboardInterrupt kills workers, so don't let them get it
import signal
signal.signal(signal.SIGINT, signal.SIG_IGN)
def cleanup_cache(cache, target_size, ratio=.85):
try:
p = subprocess.Popen(['du', '-s', '-k', os.path.abspath(cache)], stdout=subprocess.PIPE)
res = p.wait()
if res == 0:
total_size = 1024 * int(p.stdout.read().strip().split()[0])
if total_size < target_size:
return
except (OSError, ValueError):
pass
total_size = 0
all = []
for file in os.listdir(cache):
path = join_path(cache, file)
s = os.stat(path)
total_size += s.st_size
all.append((s.st_atime, s.st_size, path))
if total_size > target_size:
for time, size, file in reversed(sorted(all)):
os.unlink(file)
total_size -= size
if total_size < target_size * ratio:
break
| 37.074225 | 112 | 0.570391 |
acf3d4e45a3248a3d32b3446ed45687231ed275f | 4,344 | py | Python | epo_ops/middlewares/throttle/storages/sqlite.py | MMustafa53/python-epo-ops-client | 3d3afb966bc3d438b2b120ed8484a812e161eba4 | [
"Apache-2.0"
] | null | null | null | epo_ops/middlewares/throttle/storages/sqlite.py | MMustafa53/python-epo-ops-client | 3d3afb966bc3d438b2b120ed8484a812e161eba4 | [
"Apache-2.0"
] | null | null | null | epo_ops/middlewares/throttle/storages/sqlite.py | MMustafa53/python-epo-ops-client | 3d3afb966bc3d438b2b120ed8484a812e161eba4 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import division
import logging
import os
import re
import sqlite3
from datetime import timedelta
from itertools import cycle
from dateutil.parser import parse
from ....utils import makedirs, now
from .storage import Storage
log = logging.getLogger(__name__)
def convert_timestamp(ts):
return parse(ts)
sqlite3.register_converter("timestamp", convert_timestamp)
DEFAULT_DB_PATH = "/var/tmp/python-epo-ops-client/throttle_history.db"
class SQLite(Storage):
SERVICES = ("images", "inpadoc", "other", "retrieval", "search")
def __init__(self, db_path=DEFAULT_DB_PATH):
self.db_path = db_path
makedirs(os.path.dirname(db_path))
self.db = sqlite3.connect(db_path, detect_types=sqlite3.PARSE_DECLTYPES)
self.db.row_factory = sqlite3.Row
self.prepare()
def service_columns(self, include_type=False):
columns = []
for service in self.SERVICES:
columns.extend(
[
"{0}_status".format(service),
"{0}_limit".format(service),
"{0}_retry_after".format(service),
]
)
if include_type:
for i, pair in enumerate(
zip(columns, cycle(["text", "integer", "integer"]))
):
columns[i] = "{0} {1}".format(*pair)
return columns
def prepare(self):
sql = """\
CREATE TABLE throttle_history(
timestamp timestamp primary key,
system_status text, {0}
)
"""
try:
with self.db:
self.db.execute(sql.format(", ".join(self.service_columns(True))))
except sqlite3.OperationalError:
pass
def prune(self):
sql = """\
DELETE FROM throttle_history
WHERE timestamp < datetime('now', '-1 minute')
"""
with self.db:
self.db.execute(sql)
def parse_throttle(self, throttle):
re_str = r"{0}=(\w+):(\d+)"
status = {"services": {}}
status["system_status"] = re.search("^(\\w+) \\(", throttle).group(1)
for service in self.SERVICES:
match = re.search(re_str.format(service), throttle)
status["services"][service] = {
"status": match.group(1),
"limit": int(match.group(2)),
}
return status
def convert(self, status, retry):
sql = (
"INSERT INTO throttle_history(timestamp, system_status, {0}) "
"VALUES ({1})"
).format(", ".join(self.service_columns()), ", ".join(["?"] * 17))
values = [now(), status["system_status"]]
for service in self.SERVICES:
service_status = status["services"][service]["status"]
service_limit = status["services"][service]["limit"]
service_retry = 0
if service_status.lower() == "black":
service_retry = retry
values.extend([service_status, service_limit, service_retry])
return sql, values
def delay_for(self, service):
"This method is a public interface for a throttle storage class"
_now = now()
limit = "{0}_limit".format(service)
self.prune()
sql = ("SELECT * FROM throttle_history ORDER BY {0} limit 1").format(limit)
with self.db:
r = self.db.execute(sql).fetchone()
if not r: # If there are no rows
next_run = _now
elif r[limit] == 0:
next_run = r["timestamp"] + timedelta(
milliseconds=r["{0}_retry_after".format(service)]
)
else:
next_run = _now + timedelta(seconds=60.0 / r[limit])
td = next_run - _now
ts = td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6
return ts / 10 ** 6
def update(self, headers):
"This method is a public interface for a throttle storage class"
self.prune()
if "x-throttling-control" not in headers:
return
status = self.parse_throttle(headers["x-throttling-control"])
retry_after = int(headers.get("retry-after", 0))
sql, values = self.convert(status, retry_after)
with self.db:
self.db.execute(sql, values)
| 31.478261 | 83 | 0.564457 |
acf3d708d63e659047b070c9fd247f6bd8d3bae5 | 2,664 | py | Python | tests/parsers/eft.py | evepraisal/evepaste | 17df80ef22ea5f7122a1995338abe05d4992b905 | [
"MIT"
] | 10 | 2015-04-29T18:14:25.000Z | 2017-09-25T13:01:48.000Z | tests/parsers/eft.py | evepraisal/evepaste | 17df80ef22ea5f7122a1995338abe05d4992b905 | [
"MIT"
] | 2 | 2017-03-02T09:03:16.000Z | 2017-03-02T09:04:09.000Z | tests/parsers/eft.py | evepraisal/evepaste | 17df80ef22ea5f7122a1995338abe05d4992b905 | [
"MIT"
] | 10 | 2015-04-03T03:06:45.000Z | 2022-02-25T22:20:16.000Z | """
tests.parsers.eft
~~~~~~~~~~~~~~~~~
EFT table tests
"""
from evepaste import parse_eft
from evepaste.exceptions import Unparsable
from tests import TableTestGroup
EFT_TABLE = TableTestGroup(parse_eft)
EFT_TABLE.add_test('''[Rifter, Fleet Tackle]
Nanofiber Internal Structure I
Nanofiber Internal Structure I
Overdrive Injector System I
Stasis Webifier I
Warp Disruptor I
1MN Microwarpdrive I
200mm AutoCannon I, EMP S
200mm AutoCannon I, EMP S
200mm AutoCannon I, EMP S
[empty high slot]
[empty high slot]
Garde I x5
''', ({'modules': [{'ammo': 'EMP S', 'name': '200mm AutoCannon I',
'quantity': 3},
{'name': '1MN Microwarpdrive I', 'quantity': 1},
{'name': 'Garde I', 'quantity': 5},
{'name': 'Nanofiber Internal Structure I', 'quantity': 2},
{'name': 'Overdrive Injector System I', 'quantity': 1},
{'name': 'Stasis Webifier I', 'quantity': 1},
{'name': 'Warp Disruptor I', 'quantity': 1}],
'name': 'Fleet Tackle',
'ship': 'Rifter'}, []))
EFT_TABLE.add_test('''
[Apocalypse, Pimpin' Sniper Fit]
Heat Sink II
Heat Sink II
Heat Sink II
Tracking Enhancer II
Tracking Enhancer II
Reactor Control Unit II
Beta Reactor Control: Reaction Control I
100MN Microwarpdrive I
Sensor Booster II, Targeting Range Script
Sensor Booster II, Targeting Range Script
F-90 Positional Sensor Subroutines
Tachyon Beam Laser II, Aurora L
Tachyon Beam Laser II, Aurora L
Tachyon Beam Laser II, Aurora L
Tachyon Beam Laser II, Aurora L
Tachyon Beam Laser II, Aurora L
Tachyon Beam Laser II, Aurora L
Tachyon Beam Laser II, Aurora L
Tachyon Beam Laser II, Aurora L''', (
{'modules': [{'ammo': 'Targeting Range Script',
'name': 'Sensor Booster II',
'quantity': 2},
{'ammo': 'Aurora L',
'name': 'Tachyon Beam Laser II',
'quantity': 8},
{'name': '100MN Microwarpdrive I', 'quantity': 1},
{'name': 'Beta Reactor Control: Reaction Control I',
'quantity': 1},
{'name': 'F-90 Positional Sensor Subroutines', 'quantity': 1},
{'name': 'Heat Sink II', 'quantity': 3},
{'name': 'Reactor Control Unit II', 'quantity': 1},
{'name': 'Tracking Enhancer II', 'quantity': 2}],
'name': "Pimpin' Sniper Fit",
'ship': 'Apocalypse'}, []))
EFT_TABLE.add_test('[Rifter,test]',
({'modules': [], 'name': 'test', 'ship': 'Rifter'}, []))
EFT_TABLE.add_test('', Unparsable)
EFT_TABLE.add_test('[test]', Unparsable)
| 32.888889 | 79 | 0.598348 |
acf3d9e7ac27a417671907c16c8d1684fac2f385 | 2,906 | py | Python | src/pydocstyle/cli.py | jobevers/pydocstyle | 8bc051181171903d91a898415faa3272c46f461e | [
"MIT"
] | null | null | null | src/pydocstyle/cli.py | jobevers/pydocstyle | 8bc051181171903d91a898415faa3272c46f461e | [
"MIT"
] | null | null | null | src/pydocstyle/cli.py | jobevers/pydocstyle | 8bc051181171903d91a898415faa3272c46f461e | [
"MIT"
] | null | null | null | """Command line interface for pydocstyle."""
import itertools
import logging
import multiprocessing
import sys
from .utils import log
from .violations import Error
from .config import ConfigurationParser, IllegalConfiguration
from .checker import check
__all__ = ('main', )
class ReturnCode(object):
no_violations_found = 0
violations_found = 1
invalid_options = 2
# a helper function around `check` for multiprocessing that unpacks
# the args and consumes the iterable.
def _check(args):
filename, checked_codes, ignore_decorators = args
results = list(check((filename,), select=checked_codes, ignore_decorators=ignore_decorators))
return results
def run_pydocstyle():
log.setLevel(logging.DEBUG)
conf = ConfigurationParser()
setup_stream_handlers(conf.get_default_run_configuration())
try:
conf.parse()
except IllegalConfiguration:
return ReturnCode.invalid_options
run_conf = conf.get_user_run_configuration()
# Reset the logger according to the command line arguments
setup_stream_handlers(run_conf)
log.debug("starting in debug mode.")
Error.explain = run_conf.explain
Error.source = run_conf.source
pool = multiprocessing.Pool()
args = conf.get_files_to_check()
try:
errors = itertools.chain.from_iterable(pool.map(_check, args))
except IllegalConfiguration as error:
# An illegal configuration file was found during file generation.
log.error(error.args[0])
return ReturnCode.invalid_options
count = 0
for error in errors:
if hasattr(error, 'code'):
sys.stdout.write('%s\n' % error)
count += 1
if count == 0:
exit_code = ReturnCode.no_violations_found
else:
exit_code = ReturnCode.violations_found
if run_conf.count:
print(count)
return exit_code
def main():
"""Run pydocstyle as a script."""
try:
sys.exit(run_pydocstyle())
except KeyboardInterrupt:
pass
def setup_stream_handlers(conf):
"""Setup logging stream handlers according to the options."""
class StdoutFilter(logging.Filter):
def filter(self, record):
return record.levelno in (logging.DEBUG, logging.INFO)
log.handlers = []
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setLevel(logging.WARNING)
stdout_handler.addFilter(StdoutFilter())
if conf.debug:
stdout_handler.setLevel(logging.DEBUG)
elif conf.verbose:
stdout_handler.setLevel(logging.INFO)
else:
stdout_handler.setLevel(logging.WARNING)
log.addHandler(stdout_handler)
stderr_handler = logging.StreamHandler(sys.stderr)
msg_format = "%(levelname)s: %(message)s"
stderr_handler.setFormatter(logging.Formatter(fmt=msg_format))
stderr_handler.setLevel(logging.WARNING)
log.addHandler(stderr_handler)
| 27.67619 | 97 | 0.707846 |
acf3da5c12aff29213525043ddc9a8947f6d15ca | 433 | py | Python | tests/test_comment.py | MutumaMutuma/Personal-Blog | e3392056004460b7882814d325a029bb2119929d | [
"MIT"
] | 1 | 2018-10-22T10:42:08.000Z | 2018-10-22T10:42:08.000Z | tests/test_comment.py | MutumaMutuma/Personal-Blog | e3392056004460b7882814d325a029bb2119929d | [
"MIT"
] | null | null | null | tests/test_comment.py | MutumaMutuma/Personal-Blog | e3392056004460b7882814d325a029bb2119929d | [
"MIT"
] | null | null | null | import unittest
from app.models import Comment
class CommentTest(unittest.TestCase):
'''
Test Class to test the behaviour of the comment class
'''
def setUp(self):
'''
Set up method that will run before every Test
'''
self.new_comment = Comment(id =1, comment_content = 'I love this post')
def test_instance(self):
self.assertTrue(isinstance(self.new_comment,Comment))
| 24.055556 | 79 | 0.655889 |
acf3dab4f55297a13ff1eb7857b1977a389c3a26 | 22,121 | py | Python | openstack_dashboard/dashboards/identity/domains/workflows.py | NunoEdgarGFlowHub/horizon | 73a0bbd43ea78ac5337f7d00977ec5f32452067e | [
"Apache-2.0"
] | 1 | 2018-04-17T02:32:05.000Z | 2018-04-17T02:32:05.000Z | openstack_dashboard/dashboards/identity/domains/workflows.py | NunoEdgarGFlowHub/horizon | 73a0bbd43ea78ac5337f7d00977ec5f32452067e | [
"Apache-2.0"
] | 3 | 2021-01-21T14:27:55.000Z | 2021-06-10T23:08:49.000Z | openstack_dashboard/dashboards/identity/domains/workflows.py | Surfndez/horizon | a56765b6b3dbc09fd467b83a57bea2433ae3909e | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.conf import settings
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from openstack_auth import utils
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.identity.domains import constants
LOG = logging.getLogger(__name__)
class CreateDomainInfoAction(workflows.Action):
name = forms.CharField(label=_("Name"))
description = forms.CharField(widget=forms.widgets.Textarea(
attrs={'rows': 4}),
label=_("Description"),
required=False)
enabled = forms.BooleanField(label=_("Enabled"),
required=False,
initial=True)
class Meta(object):
name = _("Domain Information")
slug = "create_domain"
help_text = _("Domains provide separation between users and "
"infrastructure used by different organizations.")
class CreateDomainInfo(workflows.Step):
action_class = CreateDomainInfoAction
contributes = ("domain_id",
"name",
"description",
"enabled")
class UpdateDomainUsersAction(workflows.MembershipAction):
def __init__(self, request, *args, **kwargs):
super(UpdateDomainUsersAction, self).__init__(request,
*args,
**kwargs)
domain_id = self.initial.get("domain_id", '')
# Get the default role
try:
default_role = api.keystone.get_default_role(self.request)
# Default role is necessary to add members to a domain
if default_role is None:
default = getattr(settings,
"OPENSTACK_KEYSTONE_DEFAULT_ROLE", None)
msg = (_('Could not find default role "%s" in Keystone') %
default)
raise exceptions.NotFound(msg)
except Exception:
exceptions.handle(self.request,
_('Unable to find default role.'),
redirect=reverse(constants.DOMAINS_INDEX_URL))
default_role_name = self.get_default_role_field_name()
self.fields[default_role_name] = forms.CharField(required=False)
self.fields[default_role_name].initial = default_role.id
# Get list of available users
all_users = []
try:
all_users = api.keystone.user_list(request,
domain=domain_id)
except Exception:
exceptions.handle(request, _('Unable to retrieve user list.'))
users_list = [(user.id, user.name) for user in all_users]
# Get list of roles
role_list = []
try:
role_list = api.keystone.role_list(request)
except Exception:
exceptions.handle(request,
_('Unable to retrieve role list.'),
redirect=reverse(constants.DOMAINS_INDEX_URL))
for role in role_list:
field_name = self.get_member_field_name(role.id)
label = role.name
self.fields[field_name] = forms.MultipleChoiceField(required=False,
label=label)
self.fields[field_name].choices = users_list
self.fields[field_name].initial = []
# Figure out users & roles
if domain_id:
try:
users_roles = api.keystone.get_domain_users_roles(request,
domain_id)
except Exception:
exceptions.handle(request,
_('Unable to retrieve user domain role '
'assignments.'),
redirect=reverse(
constants.DOMAINS_INDEX_URL))
for user_id in users_roles:
roles_ids = users_roles[user_id]
for role_id in roles_ids:
field_name = self.get_member_field_name(role_id)
self.fields[field_name].initial.append(user_id)
class Meta(object):
name = _("Domain Members")
slug = constants.DOMAIN_USER_MEMBER_SLUG
class UpdateDomainUsers(workflows.UpdateMembersStep):
action_class = UpdateDomainUsersAction
available_list_title = _("All Users")
members_list_title = _("Domain Members")
no_available_text = _("No users found.")
no_members_text = _("No users.")
def contribute(self, data, context):
context = super(UpdateDomainUsers, self).contribute(data, context)
if data:
try:
roles = api.keystone.role_list(self.workflow.request)
except Exception:
exceptions.handle(self.workflow.request,
_('Unable to retrieve role list.'),
redirect=reverse(
constants.DOMAINS_INDEX_URL))
post = self.workflow.request.POST
for role in roles:
field = self.get_member_field_name(role.id)
context[field] = post.getlist(field)
return context
class UpdateDomainGroupsAction(workflows.MembershipAction):
def __init__(self, request, *args, **kwargs):
super(UpdateDomainGroupsAction, self).__init__(request,
*args,
**kwargs)
err_msg = _('Unable to retrieve group list. Please try again later.')
domain_id = self.initial.get("domain_id", '')
# Get the default role
try:
default_role = api.keystone.get_default_role(self.request)
# Default role is necessary to add members to a domain
if default_role is None:
default = getattr(settings,
"OPENSTACK_KEYSTONE_DEFAULT_ROLE", None)
msg = (_('Could not find default role "%s" in Keystone') %
default)
raise exceptions.NotFound(msg)
except Exception:
exceptions.handle(self.request,
err_msg,
redirect=reverse(constants.DOMAINS_INDEX_URL))
default_role_name = self.get_default_role_field_name()
self.fields[default_role_name] = forms.CharField(required=False)
self.fields[default_role_name].initial = default_role.id
# Get list of available groups
all_groups = []
try:
all_groups = api.keystone.group_list(request,
domain=domain_id)
except Exception:
exceptions.handle(request, err_msg)
groups_list = [(group.id, group.name) for group in all_groups]
# Get list of roles
role_list = []
try:
role_list = api.keystone.role_list(request)
except Exception:
exceptions.handle(request,
err_msg,
redirect=reverse(constants.DOMAINS_INDEX_URL))
for role in role_list:
field_name = self.get_member_field_name(role.id)
label = role.name
self.fields[field_name] = forms.MultipleChoiceField(required=False,
label=label)
self.fields[field_name].choices = groups_list
self.fields[field_name].initial = []
# Figure out groups & roles
if domain_id:
for group in all_groups:
try:
roles = api.keystone.roles_for_group(self.request,
group=group.id,
domain=domain_id)
except Exception:
exceptions.handle(request,
err_msg,
redirect=reverse(
constants.DOMAINS_INDEX_URL))
for role in roles:
field_name = self.get_member_field_name(role.id)
self.fields[field_name].initial.append(group.id)
class Meta(object):
name = _("Domain Groups")
slug = constants.DOMAIN_GROUP_MEMBER_SLUG
class UpdateDomainGroups(workflows.UpdateMembersStep):
action_class = UpdateDomainGroupsAction
available_list_title = _("All Groups")
members_list_title = _("Domain Groups")
no_available_text = _("No groups found.")
no_members_text = _("No groups.")
def contribute(self, data, context):
context = super(UpdateDomainGroups, self).contribute(data, context)
if data:
try:
roles = api.keystone.role_list(self.workflow.request)
except Exception:
exceptions.handle(self.workflow.request,
_('Unable to retrieve role list.'))
post = self.workflow.request.POST
for role in roles:
field = self.get_member_field_name(role.id)
context[field] = post.getlist(field)
return context
class CreateDomain(workflows.Workflow):
slug = "create_domain"
name = _("Create Domain")
finalize_button_name = _("Create Domain")
success_message = _('Created new domain "%s".')
failure_message = _('Unable to create domain "%s".')
success_url = constants.DOMAINS_INDEX_URL
default_steps = (CreateDomainInfo, )
def format_status_message(self, message):
return message % self.context.get('name', 'unknown domain')
def handle(self, request, data):
# create the domain
try:
LOG.info('Creating domain with name "%s"', data['name'])
desc = data['description']
api.keystone.domain_create(request,
name=data['name'],
description=desc,
enabled=data['enabled'])
except Exception:
exceptions.handle(request, ignore=True)
return False
return True
class UpdateDomainInfoAction(CreateDomainInfoAction):
class Meta(object):
name = _("Domain Information")
slug = 'update_domain'
help_text = _("Domains provide separation between users and "
"infrastructure used by different organizations. "
"Edit the domain details to add or remove "
"groups in the domain.")
class UpdateDomainInfo(workflows.Step):
action_class = UpdateDomainInfoAction
depends_on = ("domain_id",)
contributes = ("name",
"description",
"enabled")
class UpdateDomain(workflows.Workflow):
slug = "update_domain"
name = _("Edit Domain")
finalize_button_name = _("Save")
success_message = _('Modified domain "%s".')
failure_message = _('Unable to modify domain "%s".')
success_url = constants.DOMAINS_INDEX_URL
default_steps = (UpdateDomainInfo,
UpdateDomainUsers,
UpdateDomainGroups)
def format_status_message(self, message):
return message % self.context.get('name', 'unknown domain')
def _update_domain_members(self, request, domain_id, data):
# update domain members
users_to_modify = 0
# Project-user member step
member_step = self.get_step(constants.DOMAIN_USER_MEMBER_SLUG)
try:
# Get our role options
available_roles = api.keystone.role_list(request)
# Get the users currently associated with this domain so we
# can diff against it.
users_roles = api.keystone.get_domain_users_roles(request,
domain=domain_id)
users_to_modify = len(users_roles)
all_users = api.keystone.user_list(request,
domain=domain_id)
users_dict = {user.id: user.name for user in all_users}
for user_id in users_roles.keys():
# Don't remove roles if the user isn't in the domain
if user_id not in users_dict:
users_to_modify -= 1
continue
# Check if there have been any changes in the roles of
# Existing domain members.
current_role_ids = list(users_roles[user_id])
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
# Check if the user is in the list of users with this role.
if user_id in data[field_name]:
# Add it if necessary
if role.id not in current_role_ids:
# user role has changed
api.keystone.add_domain_user_role(
request,
domain=domain_id,
user=user_id,
role=role.id)
else:
# User role is unchanged, so remove it from the
# remaining roles list to avoid removing it later.
index = current_role_ids.index(role.id)
current_role_ids.pop(index)
# Prevent admins from doing stupid things to themselves.
is_current_user = user_id == request.user.id
# TODO(lcheng) When Horizon moves to Domain scoped token for
# invoking identity operation, replace this with:
# domain_id == request.user.domain_id
is_current_domain = True
available_admin_role_ids = [
role.id for role in available_roles
if role.name.lower() in utils.get_admin_roles()
]
admin_role_ids = [role for role in current_role_ids
if role in available_admin_role_ids]
if len(admin_role_ids):
removing_admin = any([role in current_role_ids
for role in admin_role_ids])
else:
removing_admin = False
if is_current_user and is_current_domain and removing_admin:
# Cannot remove "admin" role on current(admin) domain
msg = _('You cannot revoke your administrative privileges '
'from the domain you are currently logged into. '
'Please switch to another domain with '
'administrative privileges or remove the '
'administrative role manually via the CLI.')
messages.warning(request, msg)
# Otherwise go through and revoke any removed roles.
else:
for id_to_delete in current_role_ids:
api.keystone.remove_domain_user_role(
request,
domain=domain_id,
user=user_id,
role=id_to_delete)
users_to_modify -= 1
# Grant new roles on the domain.
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
# Count how many users may be added for exception handling.
users_to_modify += len(data[field_name])
for role in available_roles:
users_added = 0
field_name = member_step.get_member_field_name(role.id)
for user_id in data[field_name]:
if user_id not in users_roles:
api.keystone.add_domain_user_role(request,
domain=domain_id,
user=user_id,
role=role.id)
users_added += 1
users_to_modify -= users_added
return True
except Exception:
exceptions.handle(request,
_('Failed to modify %s project '
'members and update domain groups.')
% users_to_modify)
return False
def _update_domain_groups(self, request, domain_id, data):
# update domain groups
groups_to_modify = 0
member_step = self.get_step(constants.DOMAIN_GROUP_MEMBER_SLUG)
try:
# Get our role options
available_roles = api.keystone.role_list(request)
# Get the groups currently associated with this domain so we
# can diff against it.
domain_groups = api.keystone.group_list(request,
domain=domain_id)
groups_to_modify = len(domain_groups)
for group in domain_groups:
# Check if there have been any changes in the roles of
# Existing domain members.
current_roles = api.keystone.roles_for_group(
self.request,
group=group.id,
domain=domain_id)
current_role_ids = [role.id for role in current_roles]
for role in available_roles:
# Check if the group is in the list of groups with
# this role.
field_name = member_step.get_member_field_name(role.id)
if group.id in data[field_name]:
# Add it if necessary
if role.id not in current_role_ids:
# group role has changed
api.keystone.add_group_role(
request,
role=role.id,
group=group.id,
domain=domain_id)
else:
# Group role is unchanged, so remove it from
# the remaining roles list to avoid removing it
# later
index = current_role_ids.index(role.id)
current_role_ids.pop(index)
# Revoke any removed roles.
for id_to_delete in current_role_ids:
api.keystone.remove_group_role(request,
role=id_to_delete,
group=group.id,
domain=domain_id)
groups_to_modify -= 1
# Grant new roles on the domain.
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
# Count how many groups may be added for error handling.
groups_to_modify += len(data[field_name])
for role in available_roles:
groups_added = 0
field_name = member_step.get_member_field_name(role.id)
for group_id in data[field_name]:
if not filter(lambda x: group_id == x.id, domain_groups):
api.keystone.add_group_role(request,
role=role.id,
group=group_id,
domain=domain_id)
groups_added += 1
groups_to_modify -= groups_added
return True
except Exception:
exceptions.handle(request,
_('Failed to modify %s domain groups.')
% groups_to_modify)
return False
def handle(self, request, data):
domain_id = data.pop('domain_id')
try:
LOG.info('Updating domain with name "%s"', data['name'])
api.keystone.domain_update(request,
domain_id,
name=data['name'],
description=data['description'],
enabled=data['enabled'])
except Exception:
exceptions.handle(request, ignore=True)
return False
if not self._update_domain_members(request, domain_id, data):
return False
if not self._update_domain_groups(request, domain_id, data):
return False
return True
| 43.289628 | 79 | 0.526875 |
acf3db7343e2a53f402c87e6ff6985ccac05ef76 | 351 | py | Python | facedancer/__init__.py | mrh1997/Facedancer | 4c5aaa6a7d60abd121fe03d49beebdfad6212515 | [
"BSD-3-Clause"
] | 345 | 2019-03-12T23:36:36.000Z | 2022-02-02T00:26:27.000Z | facedancer/__init__.py | mrh1997/Facedancer | 4c5aaa6a7d60abd121fe03d49beebdfad6212515 | [
"BSD-3-Clause"
] | 35 | 2019-03-24T19:06:41.000Z | 2021-12-28T07:38:49.000Z | facedancer/__init__.py | mrh1997/Facedancer | 4c5aaa6a7d60abd121fe03d49beebdfad6212515 | [
"BSD-3-Clause"
] | 59 | 2019-04-18T14:22:47.000Z | 2022-01-18T14:34:50.000Z |
import logging
# Alias objects to make them easier to import.
from .core import FacedancerUSBApp, FacedancerUSBHostApp, FacedancerBasicScheduler
from .backends import *
from .USBProxy import USBProxyFilter, USBProxyDevice
from .devices import default_main as main
# Set up our extra log levels.
logging.addLevelName(5, 'TRACE')
LOGLEVEL_TRACE = 5
| 25.071429 | 82 | 0.809117 |
acf3dc0868b00c43d758d33a88b803377b0c3e9f | 4,027 | py | Python | awwards/models.py | ClintonClin/awwardscole | d6b6f5bc6e08cdfa70ba345c5102562ebcbfb4d1 | [
"Unlicense"
] | null | null | null | awwards/models.py | ClintonClin/awwardscole | d6b6f5bc6e08cdfa70ba345c5102562ebcbfb4d1 | [
"Unlicense"
] | 6 | 2021-03-18T21:50:40.000Z | 2022-03-11T23:38:54.000Z | awwards/models.py | ClintonClin/awwardsclone | d6b6f5bc6e08cdfa70ba345c5102562ebcbfb4d1 | [
"Unlicense"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
from tinymce.models import HTMLField
from django.db.models import Q
import datetime as dt
# Create your models here.
class categories(models.Model):
categories = models.CharField(max_length=100)
def __str__(self):
return self.categories
def save_category(self):
self.save()
@classmethod
def delete_category(cls, categories):
cls.objects.filter(categories=categories).delete()
class technologies(models.Model):
technologies = models.CharField(max_length=100)
def __str__(self):
return self.technologies
def save_technology(self):
self.save()
@classmethod
def delete_technology(cls, technologies):
cls.objects.filter(technologies=technologies).delete()
class colors(models.Model):
colors = models.CharField(max_length=100)
def __str__(self):
return self.colors
def save_color(self):
self.save()
@classmethod
def delete_color(cls, colors):
cls.objects.filter(colors=colors).delete()
class countries(models.Model):
countries = models.CharField(max_length=100)
def __str__(self):
return self.countries
class Meta:
ordering = ['countries']
def save_country(self):
self.save()
@classmethod
def delete_country(cls, countries):
cls.objects.filter(countries=countries).delete()
class Project(models.Model):
title = models.CharField(max_length=150)
landing_page = models.ImageField(upload_to='landingpage/')
description = HTMLField()
link = models.CharField(max_length=255)
username = models.ForeignKey(User, on_delete=models.CASCADE)
screenshot1 = models.ImageField(upload_to='screenshots/')
screenshot2 = models.ImageField(upload_to='screenshots/')
screenshot3 = models.ImageField(upload_to='screenshots/')
screenshot4 = models.ImageField(upload_to='screenshots/')
design = models.IntegerField(blank=True, default=0)
usability = models.IntegerField(blank=True, default=0)
creativity = models.IntegerField(blank=True, default=0)
content = models.IntegerField(blank=True, default=0)
overall_score = models.IntegerField(blank=True, default=0)
country = models.ForeignKey(countries, on_delete=models.CASCADE)
technologies = models.ManyToManyField(technologies)
categories = models.ManyToManyField(categories)
colors = models.ManyToManyField(colors)
post_date = models.DateTimeField(auto_now_add=True)
avatar = models.ImageField(upload_to='avatars/')
def __str__(self):
return self.title
@classmethod
def search_project(cls, search_term):
# projects = cls.objects.filter(Q(username__username=search_term) | Q(title__icontains=search_term) | Q(colors__colors=search_term) | Q(technologies__technologies=search_term) | Q(categories__categories=search_term) | Q(country__countries=search_term))
projects = cls.objects.filter(Q(username__username=search_term) | Q(title__icontains=search_term) | Q(
country__countries=search_term) | Q(overall_score__icontains=search_term))
return projects
class Profile(models.Model):
avatar = models.ImageField(upload_to='avatars/')
description = HTMLField()
country = models.ForeignKey(countries, on_delete=models.CASCADE)
username = models.ForeignKey(User, on_delete=models.CASCADE)
name = models.CharField(max_length=100)
email = models.EmailField()
def __str__(self):
return self.name
class Rating(models.Model):
design = models.IntegerField(blank=True, default=0)
usability = models.IntegerField(blank=True, default=0)
creativity = models.IntegerField(blank=True, default=0)
content = models.IntegerField(blank=True, default=0)
overall_score = models.IntegerField(blank=True, default=0)
project = models.ForeignKey(Project, on_delete=models.CASCADE)
profile = models.ForeignKey(Profile, on_delete=models.CASCADE)
| 33.008197 | 260 | 0.726099 |
acf3dc65a179d40f09b54a1b10d1c4f83ee51736 | 1,230 | py | Python | industry_simulator/urls.py | lhy0807/industry_simulator | 75daadcce3d2c14f8cc226d5c92f310dff46dbf6 | [
"MIT"
] | null | null | null | industry_simulator/urls.py | lhy0807/industry_simulator | 75daadcce3d2c14f8cc226d5c92f310dff46dbf6 | [
"MIT"
] | null | null | null | industry_simulator/urls.py | lhy0807/industry_simulator | 75daadcce3d2c14f8cc226d5c92f310dff46dbf6 | [
"MIT"
] | null | null | null | """industry_simulator URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('', include('startscreen.urls', namespace='index'), name='index'),
path('create_robot/', include('startscreen.urls', namespace='startscreen'), name='create_robot'),
path('create_group/<int:game_id>/<int:company_id>', include('startscreen.urls', namespace='create_group'), name='create_group'),
path('join_group/', include('startscreen.urls', namespace='join_group'), name='join_group'),
path('game/', include('game.urls'), name='game'),
path('admin/', admin.site.urls),
] | 47.307692 | 132 | 0.707317 |
acf3dcbdd44072954910955605f3a5c24c439fc3 | 2,381 | py | Python | FictionTools/amitools/amitools/util/BlkDevTools.py | polluks/Puddle-BuildTools | c1762d53a33002b62d8cffe3db129505a387bec3 | [
"BSD-2-Clause"
] | 38 | 2021-06-18T12:56:15.000Z | 2022-03-12T20:38:40.000Z | FictionTools/amitools/amitools/util/BlkDevTools.py | polluks/Puddle-BuildTools | c1762d53a33002b62d8cffe3db129505a387bec3 | [
"BSD-2-Clause"
] | 2 | 2021-06-20T16:28:12.000Z | 2021-11-17T21:33:56.000Z | FictionTools/amitools/amitools/util/BlkDevTools.py | polluks/Puddle-BuildTools | c1762d53a33002b62d8cffe3db129505a387bec3 | [
"BSD-2-Clause"
] | 6 | 2021-06-18T18:18:36.000Z | 2021-12-22T08:01:32.000Z | #!/usr/bin/env python3
# based heavily on "iops" by Benjamin Schweizer
# https://github.com/gopher/iops
import sys
import array
import struct
import os
def getblkdevsize(dev):
"""report the size for a block device"""
size = 0
if sys.platform == "darwin":
# mac os x ioctl from sys/disk.h
import fcntl
DKIOCGETBLOCKSIZE = 0x40046418 # _IOR('d', 24, uint32_t)
DKIOCGETBLOCKCOUNT = 0x40086419 # _IOR('d', 25, uint64_t)
fh = open(dev, "r")
buf = array.array("B", list(range(0, 4))) # uint32
r = fcntl.ioctl(fh.fileno(), DKIOCGETBLOCKSIZE, buf, 1)
blocksize = struct.unpack("I", buf)[0]
buf = array.array("B", list(range(0, 8))) # uint64
r = fcntl.ioctl(fh.fileno(), DKIOCGETBLOCKCOUNT, buf, 1)
blockcount = struct.unpack("Q", buf)[0]
fh.close()
size = blocksize * blockcount
elif sys.platform.startswith("freebsd"):
# freebsd ioctl from sys/disk.h
import fcntl
DIOCGMEDIASIZE = 0x40086481 # _IOR('d', 129, uint64_t)
fh = open(dev, "r")
buf = array.array("B", list(range(0, 8))) # off_t / int64
r = fcntl.ioctl(fh.fileno(), DIOCGMEDIASIZE, buf, 1)
size = struct.unpack("q", buf)[0]
fh.close()
elif sys.platform == "win32":
# win32 ioctl from winioctl.h, requires pywin32
try:
import win32file
except ImportError:
raise SystemExit(
"Package pywin32 not found, see http://sf.net/projects/pywin32/"
)
IOCTL_DISK_GET_DRIVE_GEOMETRY = 0x00070000
dh = win32file.CreateFile(
dev, 0, win32file.FILE_SHARE_READ, None, win32file.OPEN_EXISTING, 0, None
)
info = win32file.DeviceIoControl(dh, IOCTL_DISK_GET_DRIVE_GEOMETRY, "", 24)
win32file.CloseHandle(dh)
(cyl_lo, cyl_hi, media_type, tps, spt, bps) = struct.unpack("6L", info)
size = ((cyl_hi << 32) + cyl_lo) * tps * spt * bps
else: # linux or compat
# linux 2.6 lseek from fcntl.h
fh = open(dev, "r")
fh.seek(0, os.SEEK_END)
size = fh.tell()
fh.close()
if not size:
raise Exception("getblkdevsize: Unsupported platform")
return size
# test
if __name__ == "__main__":
for a in sys.argv[1:]:
print(a, getblkdevsize(a))
| 30.525641 | 85 | 0.587568 |
acf3dccdce4ae061e5c4d99ace6f4575646172f4 | 841 | py | Python | build/android/gyp/ant.py | pozdnyakov/chromium-crosswalk | 0fb25c7278bf1d93e53a3b0bcb75aa8b99d4b26e | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 9 | 2018-09-21T05:36:12.000Z | 2021-11-15T15:14:36.000Z | build/android/gyp/ant.py | pozdnyakov/chromium-crosswalk | 0fb25c7278bf1d93e53a3b0bcb75aa8b99d4b26e | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | build/android/gyp/ant.py | pozdnyakov/chromium-crosswalk | 0fb25c7278bf1d93e53a3b0bcb75aa8b99d4b26e | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 16 | 2015-01-08T01:47:24.000Z | 2022-02-25T06:06:06.000Z | #!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""An Ant wrapper that suppresses useless Ant output
Ant build scripts output "BUILD SUCCESSFUL" and build timing at the end of
every build. In the Android build, this just adds a lot of useless noise to the
build output. This script forwards its arguments to ant, and prints Ant's
output up until the BUILD SUCCESSFUL line.
"""
import sys
from util import build_utils
def main(argv):
stdout = build_utils.CheckCallDie(['ant'] + argv[1:], suppress_output=True)
stdout = stdout.strip().split('\n')
for line in stdout:
if line.strip() == 'BUILD SUCCESSFUL':
break
print line
if __name__ == '__main__':
sys.exit(main(sys.argv))
| 27.129032 | 79 | 0.728894 |
acf3dd2216c7b937353989991fa32a4b458d26f2 | 54,156 | py | Python | sfepy/terms/terms_multilinear.py | antonykamp/sfepy | 8213d3c8cc2825602b41dc65eb543b575856ca8c | [
"BSD-3-Clause"
] | null | null | null | sfepy/terms/terms_multilinear.py | antonykamp/sfepy | 8213d3c8cc2825602b41dc65eb543b575856ca8c | [
"BSD-3-Clause"
] | null | null | null | sfepy/terms/terms_multilinear.py | antonykamp/sfepy | 8213d3c8cc2825602b41dc65eb543b575856ca8c | [
"BSD-3-Clause"
] | null | null | null | import numpy as nm
try:
import dask.array as da
except ImportError:
da = None
try:
import opt_einsum as oe
except ImportError:
oe = None
try:
from jax.config import config
config.update("jax_enable_x64", True)
import jax
import jax.numpy as jnp
except ImportError:
jnp = jax = None
from pyparsing import (Word, Suppress, oneOf, OneOrMore, delimitedList,
Combine, alphas, alphanums, Literal)
from sfepy.base.base import output, Struct
from sfepy.base.timing import Timer
from sfepy.mechanics.tensors import dim2sym
from sfepy.terms.terms import Term
def _get_char_map(c1, c2):
mm = {}
for ic, char in enumerate(c1):
if char in mm:
print(char, '->eq?', mm[char], c2[ic])
if mm[char] != c2[ic]:
mm[char] += c2[ic]
else:
mm[char] = c2[ic]
return mm
def collect_modifiers(modifiers):
def _collect_modifiers(toks):
if len(toks) > 1:
out = []
modifiers.append([])
for ii, mod in enumerate(toks[::3]):
tok = toks[3*ii+1]
tok = tok.replace(tok[0], toks[2])
modifiers[-1].append(list(toks))
out.append(tok)
return out
else:
modifiers.append(None)
return toks
return _collect_modifiers
def parse_term_expression(texpr):
mods = 's'
lparen, rparen = map(Suppress, '()')
simple_arg = Word(alphanums + '.:')
arrow = Literal('->').suppress()
letter = Word(alphas, exact=1)
mod_arg = oneOf(mods) + lparen + simple_arg + rparen + arrow + letter
arg = OneOrMore(simple_arg ^ mod_arg)
modifiers = []
arg.setParseAction(collect_modifiers(modifiers))
parser = delimitedList(Combine(arg))
eins = parser.parseString(texpr, parseAll=True)
return eins, modifiers
def append_all(seqs, item, ii=None):
if ii is None:
for seq in seqs:
seq.append(item)
else:
seqs[ii].append(item)
def get_sizes(indices, operands):
sizes = {}
for iis, op in zip(indices, operands):
for ii, size in zip(iis, op.shape):
sizes[ii] = size
return sizes
def get_output_shape(out_subscripts, subscripts, operands):
return tuple(get_sizes(subscripts, operands)[ii] for ii in out_subscripts)
def find_free_indices(indices):
ii = ''.join(indices)
ifree = [c for c in set(ii) if ii.count(c) == 1]
return ifree
def get_loop_indices(subs, loop_index):
return [indices.index(loop_index) if loop_index in indices else None
for indices in subs]
def get_einsum_ops(eargs, ebuilder, expr_cache):
dargs = {arg.name : arg for arg in eargs}
operands = [[] for ia in range(ebuilder.n_add)]
for ia in range(ebuilder.n_add):
for io, oname in enumerate(ebuilder.operand_names[ia]):
arg_name, val_name = oname.split('.')
arg = dargs[arg_name]
if val_name == 'dofs':
step_cache = arg.arg.evaluate_cache.setdefault('dofs', {})
cache = step_cache.setdefault(0, {})
op = arg.get_dofs(cache, expr_cache, oname)
elif val_name == 'I':
op = ebuilder.make_eye(arg.n_components)
elif val_name == 'Psg':
op = ebuilder.make_psg(arg.dim)
else:
op = dargs[arg_name].get(
val_name,
msg_if_none='{} has no attribute {}!'
.format(arg_name, val_name)
)
ics = ebuilder.components[ia][io]
if len(ics):
iis = [slice(None)] * 2
iis += [slice(None) if ic is None else ic for ic in ics]
op = op[tuple(iis)]
operands[ia].append(op)
return operands
def get_slice_ops(subs, ops, loop_index):
ics = get_loop_indices(subs, loop_index)
def slice_ops(ic):
sops = []
for ii, icol in enumerate(ics):
op = ops[ii]
if icol is not None:
slices = tuple(slice(None, None) if isub != icol else ic
for isub in range(op.ndim))
sops.append(op[slices])
else:
sops.append(op)
return sops
return slice_ops
class ExpressionArg(Struct):
@staticmethod
def from_term_arg(arg, term, cache):
from sfepy.discrete import FieldVariable
if isinstance(arg, ExpressionArg):
return arg
if isinstance(arg, FieldVariable):
ag, _ = term.get_mapping(arg)
bf = ag.bf
key = 'bf{}'.format(id(bf))
_bf = cache.get(key)
if bf.shape[0] > 1: # cell-depending basis.
if _bf is None:
_bf = bf[:, :, 0]
cache[key] = _bf
else:
if _bf is None:
_bf = bf[0, :, 0]
cache[key] = _bf
if isinstance(arg, FieldVariable) and arg.is_virtual():
ag, _ = term.get_mapping(arg)
obj = ExpressionArg(name=arg.name, arg=arg, bf=_bf, bfg=ag.bfg,
det=ag.det[..., 0, 0],
n_components=arg.n_components,
dim=arg.dim,
kind='virtual')
elif isinstance(arg, FieldVariable) and arg.is_state_or_parameter():
ag, _ = term.get_mapping(arg)
conn = arg.field.get_econn(term.get_dof_conn_type(),
term.region)
shape = (ag.n_el, arg.n_components, ag.bf.shape[-1])
obj = ExpressionArg(name=arg.name, arg=arg, bf=_bf, bfg=ag.bfg,
det=ag.det[..., 0, 0],
region_name=term.region.name,
conn=conn, shape=shape,
n_components=arg.n_components,
dim=arg.dim,
kind='state')
elif isinstance(arg, nm.ndarray):
aux = term.get_args()
# Find arg in term arguments using a loop (numpy arrays cannot be
# compared) to get its name.
ii = [ii for ii in range(len(term.args)) if aux[ii] is arg][0]
obj = ExpressionArg(name='_'.join(term.arg_names[ii]), arg=arg,
kind='ndarray')
else:
raise ValueError('unknown argument type! ({})'
.format(type(arg)))
return obj
def get_dofs(self, cache, expr_cache, oname):
if self.kind != 'state': return
key = (self.name, self.region_name)
dofs = cache.get(key)
if dofs is None:
arg = self.arg
dofs_vec = self.arg().reshape((-1, arg.n_components))
# # axis 0: cells, axis 1: node, axis 2: component
# dofs = dofs_vec[conn]
# axis 0: cells, axis 1: component, axis 2: node
dofs = dofs_vec[self.conn].transpose((0, 2, 1))
if arg.n_components == 1:
dofs.shape = (dofs.shape[0], -1)
cache[key] = dofs
# New dofs -> clear dofs from expression cache.
for key in list(expr_cache.keys()):
if isinstance(key, tuple) and (key[0] == oname):
expr_cache.pop(key)
return dofs
class ExpressionBuilder(Struct):
letters = 'defgh'
_aux_letters = 'rstuvwxyz'
def __init__(self, n_add, cache):
self.n_add = n_add
self.subscripts = [[] for ia in range(n_add)]
self.operand_names = [[] for ia in range(n_add)]
self.components = [[] for ia in range(n_add)]
self.out_subscripts = ['c' for ia in range(n_add)]
self.ia = 0
self.cache = cache
self.aux_letters = iter(self._aux_letters)
def make_eye(self, size):
key = 'I{}'.format(size)
ee = self.cache.get(key)
if ee is None:
ee = nm.eye(size)
self.cache[key] = ee
return ee
def make_psg(self, dim):
key = 'Psg{}'.format(dim)
psg = self.cache.get(key)
if psg is None:
sym = dim2sym(dim)
psg = nm.zeros((dim, dim, sym))
if dim == 3:
psg[0, [0,1,2], [0,3,4]] = 1
psg[1, [0,1,2], [3,1,5]] = 1
psg[2, [0,1,2], [4,5,2]] = 1
elif dim == 2:
psg[0, [0,1], [0,2]] = 1
psg[1, [0,1], [2,1]] = 1
self.cache[key] = psg
return psg
def add_constant(self, name, cname):
append_all(self.subscripts, 'cq')
append_all(self.operand_names, '.'.join((name, cname)))
append_all(self.components, [])
def add_bfg(self, iin, ein, name):
append_all(self.subscripts, 'cq{}{}'.format(ein[2], iin))
append_all(self.operand_names, name + '.bfg')
append_all(self.components, [])
def add_bf(self, iin, ein, name, cell_dependent=False):
if cell_dependent:
append_all(self.subscripts, 'cq{}'.format(iin))
else:
append_all(self.subscripts, 'q{}'.format(iin))
append_all(self.operand_names, name + '.bf')
append_all(self.components, [])
def add_eye(self, iic, ein, name, iia=None):
append_all(self.subscripts, '{}{}'.format(ein[0], iic), ii=iia)
append_all(self.operand_names, '{}.I'.format(name), ii=iia)
append_all(self.components, [])
def add_psg(self, iic, ein, name, iia=None):
append_all(self.subscripts, '{}{}{}'.format(iic, ein[2], ein[0]),
ii=iia)
append_all(self.operand_names, name + '.Psg', ii=iia)
append_all(self.components, [])
def add_arg_dofs(self, iin, ein, name, n_components, iia=None):
if n_components > 1:
#term = 'c{}{}'.format(iin, ein[0])
term = 'c{}{}'.format(ein[0], iin)
else:
term = 'c{}'.format(iin)
append_all(self.subscripts, term, ii=iia)
append_all(self.operand_names, name + '.dofs', ii=iia)
append_all(self.components, [])
def add_virtual_arg(self, arg, ii, ein, modifier):
iin = self.letters[ii] # node (qs basis index)
if ('.' in ein) or (':' in ein): # derivative, symmetric gradient
self.add_bfg(iin, ein, arg.name)
else:
self.add_bf(iin, ein, arg.name)
out_letters = iin
if arg.n_components > 1:
iic = next(self.aux_letters) # component
if ':' not in ein:
self.add_eye(iic, ein, arg.name)
else: # symmetric gradient
if modifier[0][0] == 's': # vector storage
self.add_psg(iic, ein, arg.name)
else:
raise ValueError('unknown argument modifier! ({})'
.format(modifier))
out_letters = iic + out_letters
for iia in range(self.n_add):
self.out_subscripts[iia] += out_letters
def add_state_arg(self, arg, ii, ein, modifier, diff_var):
iin = self.letters[ii] # node (qs basis index)
if ('.' in ein) or (':' in ein): # derivative, symmetric gradient
self.add_bfg(iin, ein, arg.name)
else:
self.add_bf(iin, ein, arg.name)
out_letters = iin
if (diff_var != arg.name):
if ':' not in ein:
self.add_arg_dofs(iin, ein, arg.name, arg.n_components)
else: # symmetric gradient
if modifier[0][0] == 's': # vector storage
iic = next(self.aux_letters) # component
self.add_psg(iic, ein, arg.name)
self.add_arg_dofs(iin, [iic], arg.name, arg.n_components)
else:
raise ValueError('unknown argument modifier! ({})'
.format(modifier))
else:
if arg.n_components > 1:
iic = next(self.aux_letters) # component
if ':' in ein: # symmetric gradient
if modifier[0][0] != 's': # vector storage
raise ValueError('unknown argument modifier! ({})'
.format(modifier))
out_letters = iic + out_letters
for iia in range(self.n_add):
if iia != self.ia:
self.add_arg_dofs(iin, ein, arg.name, arg.n_components, iia)
elif arg.n_components > 1:
if ':' not in ein:
self.add_eye(iic, ein, arg.name, iia)
else:
self.add_psg(iic, ein, arg.name, iia)
self.out_subscripts[self.ia] += out_letters
self.ia += 1
def add_material_arg(self, arg, ii, ein):
append_all(self.components, [])
rein = []
for ii, ie in enumerate(ein):
if str.isnumeric(ie):
for comp in self.components:
comp[-1].append(int(ie))
else:
for comp in self.components:
comp[-1].append(None)
rein.append(ie)
rein = ''.join(rein)
append_all(self.subscripts, 'cq{}'.format(rein))
append_all(self.operand_names, arg.name + '.arg')
def build(self, texpr, *args, diff_var=None):
eins, modifiers = parse_term_expression(texpr)
# Virtual variable must be the first variable.
# Numpy arrays cannot be compared -> use a loop.
for iv, arg in enumerate(args):
if arg.kind == 'virtual':
self.add_constant(arg.name, 'det')
self.add_virtual_arg(arg, iv, eins[iv], modifiers[iv])
break
else:
iv = -1
for ip, arg in enumerate(args):
if arg.kind == 'state':
self.add_constant(arg.name, 'det')
break
else:
raise ValueError('no FieldVariable in arguments!')
for ii, ein in enumerate(eins):
if ii == iv: continue
arg = args[ii]
if arg.kind == 'ndarray':
self.add_material_arg(arg, ii, ein)
elif arg.kind == 'state':
self.add_state_arg(arg, ii, ein, modifiers[ii], diff_var)
else:
raise ValueError('unknown argument type! ({})'
.format(type(arg)))
for ia, subscripts in enumerate(self.subscripts):
ifree = [ii for ii in find_free_indices(subscripts)
if ii not in self.out_subscripts[ia]]
if ifree:
self.out_subscripts[ia] += ''.join(ifree)
@staticmethod
def join_subscripts(subscripts, out_subscripts):
return ','.join(subscripts) + '->' + out_subscripts
def get_expressions(self, subscripts=None):
if subscripts is None:
subscripts = self.subscripts
expressions = [self.join_subscripts(subscripts[ia],
self.out_subscripts[ia])
for ia in range(self.n_add)]
return tuple(expressions)
def print_shapes(self, subscripts, operands):
if subscripts is None:
subscripts = self.subscripts
output('number of expressions:', self.n_add)
for onames, outs, subs, ops in zip(
self.operand_names, self.out_subscripts, subscripts, operands,
):
sizes = get_sizes(subs, ops)
output(sizes)
out_shape = get_output_shape(outs, subs, ops)
output(outs, out_shape, '=')
for name, ii, op in zip(onames, subs, ops):
output(' {:10} {:8} {}'.format(name, ii, op.shape))
def apply_layout(self, layout, operands, defaults=None, verbosity=0):
if layout == 'cqgvd0':
return self.subscripts, operands
if defaults is None:
defaults = {
'det' : 'cq',
'bf' : ('qd', 'cqd'),
'bfg' : 'cqgd',
'dofs' : ('cd', 'cvd'),
'mat' : 'cq',
}
mat_range = ''.join([str(ii) for ii in range(10)])
new_subscripts = [subs.copy() for subs in self.subscripts]
new_operands = [ops.copy() for ops in operands]
for ia in range(self.n_add):
for io, (oname, subs, op) in enumerate(zip(self.operand_names[ia],
self.subscripts[ia],
operands[ia])):
arg_name, val_name = oname.split('.')
if val_name in ('det','bfg'):
default = defaults[val_name]
elif val_name in ('bf', 'dofs'):
default = defaults[val_name][op.ndim - 2]
elif val_name in ('I', 'Psg'):
default = layout.replace('0', '') # -> Do nothing.
else:
default = defaults['mat'] + mat_range[:(len(subs) - 2)]
if '0' in default: # Material
inew = nm.array([default.find(il)
for il in layout.replace('0', default[2:])
if il in default])
else:
inew = nm.array([default.find(il)
for il in layout if il in default])
new = ''.join([default[ii] for ii in inew])
if verbosity > 2:
output(arg_name, val_name, subs, default, op.shape, layout)
output(inew, new)
if new == default:
new_subscripts[ia][io] = subs
new_operands[ia][io] = op
else:
new_subs = ''.join([subs[ii] for ii in inew])
if val_name == 'dofs':
key = (oname,) + tuple(inew)
else:
# id is unique only during object lifetime!
key = (id(op),) + tuple(inew)
new_op = self.cache.get(key)
if new_op is None:
new_op = op.transpose(inew).copy()
self.cache[key] = new_op
new_subscripts[ia][io] = new_subs
new_operands[ia][io] = new_op
if verbosity > 2:
output('->', new_subscripts[ia][io])
return new_subscripts, new_operands
def transform(self, subscripts, operands, transformation='loop', **kwargs):
if transformation == 'loop':
expressions, poperands, all_slice_ops, loop_sizes = [], [], [], []
loop_index = kwargs.get('loop_index', 'c')
for ia, (subs, out_subscripts, ops) in enumerate(zip(
subscripts, self.out_subscripts, operands
)):
slice_ops = get_slice_ops(subs, ops, loop_index)
tsubs = [ii.replace(loop_index, '') for ii in subs]
tout_subs = out_subscripts.replace(loop_index, '')
expr = self.join_subscripts(tsubs, tout_subs)
pops = slice_ops(0)
expressions.append(expr)
poperands.append(pops)
all_slice_ops.append(slice_ops)
loop_sizes.append(get_sizes(subs, ops)[loop_index])
return tuple(expressions), poperands, all_slice_ops, loop_sizes
elif transformation == 'dask':
da_operands = []
c_chunk_size = kwargs.get('c_chunk_size')
loop_index = kwargs.get('loop_index', 'c')
for ia in range(len(operands)):
da_ops = []
for name, ii, op in zip(self.operand_names[ia],
subscripts[ia],
operands[ia]):
if loop_index in ii:
if c_chunk_size is None:
chunks = 'auto'
else:
ic = ii.index(loop_index)
chunks = (op.shape[:ic]
+ (c_chunk_size,)
+ op.shape[ic + 1:])
da_op = da.from_array(op, chunks=chunks, name=name)
else:
da_op = op
da_ops.append(da_op)
da_operands.append(da_ops)
return da_operands
else:
raise ValueError('unknown transformation! ({})'
.format(transformation))
class ETermBase(Term):
"""
Reserved letters:
c .. cells
q .. quadrature points
d-h .. DOFs axes
r-z .. auxiliary axes
Layout specification letters:
c .. cells
q .. quadrature points
v .. variable component - matrix form (v, d) -> vector v*d
g .. gradient component
d .. local DOF (basis, node)
0 .. all material axes
"""
verbosity = 0
can_backend = {
'numpy' : nm,
'numpy_loop' : nm,
'numpy_qloop' : nm,
'opt_einsum' : oe,
'opt_einsum_loop' : oe,
'opt_einsum_qloop' : oe,
'jax' : jnp,
'jax_vmap' : jnp,
'dask_single' : da,
'dask_threads' : da,
'opt_einsum_dask_single' : oe and da,
'opt_einsum_dask_threads' : oe and da,
}
layout_letters = 'cqgvd0'
def __init__(self, *args, **kwargs):
Term.__init__(self, *args, **kwargs)
self.set_verbosity(kwargs.get('verbosity', 0))
self.set_backend(**kwargs)
@staticmethod
def function_timer(out, eval_einsum, *args):
tt = Timer('')
tt.start()
eval_einsum(out, *args)
output('eval_einsum: {} s'.format(tt.stop()))
return 0
@staticmethod
def function_silent(out, eval_einsum, *args):
eval_einsum(out, *args)
return 0
def set_verbosity(self, verbosity=None):
if verbosity is not None:
self.verbosity = verbosity
if self.verbosity > 0:
self.function = self.function_timer
else:
self.function = self.function_silent
def set_backend(self, backend='numpy', optimize=True, layout=None,
**kwargs):
if backend not in self.can_backend.keys():
raise ValueError('backend {} not in {}!'
.format(self.backend, self.can_backend.keys()))
if not self.can_backend[backend]:
raise ValueError('backend {} is not available!'.format(backend))
if (hasattr(self, 'backend')
and (backend == self.backend) and (optimize == self.optimize)
and (layout == self.layout) and (kwargs == self.backend_kwargs)):
return
if layout is not None:
if set(layout) != set(self.layout_letters):
raise ValueError('layout can contain only "{}" letters! ({})'
.format(self.layout_letters, layout))
self.layout = layout
else:
self.layout = self.layout_letters
self.backend = backend
self.optimize = optimize
self.backend_kwargs = kwargs
self.einfos = {}
self.clear_cache()
def clear_cache(self):
self.expr_cache = {}
def build_expression(self, texpr, *eargs, diff_var=None):
timer = Timer('')
timer.start()
if diff_var is not None:
n_add = len([arg.name for arg in eargs
if (arg.kind == 'state')
and (arg.name == diff_var)])
else:
n_add = 1
ebuilder = ExpressionBuilder(n_add, self.expr_cache)
ebuilder.build(texpr, *eargs, diff_var=diff_var)
if self.verbosity:
output('build expression: {} s'.format(timer.stop()))
return ebuilder
def make_function(self, texpr, *args, diff_var=None):
timer = Timer('')
timer.start()
einfo = self.einfos.setdefault(diff_var, Struct(
eargs=None,
ebuilder=None,
paths=None,
path_infos=None,
eval_einsum=None,
))
if einfo.eval_einsum is not None:
if self.verbosity:
output('einsum setup: {} s'.format(timer.stop()))
return einfo.eval_einsum
if einfo.eargs is None:
einfo.eargs = [
ExpressionArg.from_term_arg(arg, self, self.expr_cache)
for arg in args
]
if einfo.ebuilder is None:
einfo.ebuilder = self.build_expression(texpr, *einfo.eargs,
diff_var=diff_var)
n_add = einfo.ebuilder.n_add
if self.backend in ('numpy', 'opt_einsum'):
contract = nm.einsum if self.backend == 'numpy' else oe.contract
def eval_einsum_orig(out, eshape, expressions, operands, paths):
if operands[0][0].flags.c_contiguous:
# This is very slow if vout layout differs from operands
# layout.
vout = out.reshape(eshape)
contract(expressions[0], *operands[0], out=vout,
optimize=paths[0])
else:
aux = contract(expressions[0], *operands[0],
optimize=paths[0])
out[:] += aux.reshape(out.shape)
for ia in range(1, n_add):
aux = contract(expressions[ia], *operands[ia],
optimize=paths[ia])
out[:] += aux.reshape(out.shape)
def eval_einsum0(out, eshape, expressions, operands, paths):
aux = contract(expressions[0], *operands[0],
optimize=paths[0])
out[:] = aux.reshape(out.shape)
for ia in range(1, n_add):
aux = contract(expressions[ia], *operands[ia],
optimize=paths[ia])
out[:] += aux.reshape(out.shape)
def eval_einsum1(out, eshape, expressions, operands, paths):
out.reshape(-1)[:] = contract(
expressions[0], *operands[0], optimize=paths[0],
).reshape(-1)
for ia in range(1, n_add):
out.reshape(-1)[:] += contract(
expressions[ia], *operands[ia], optimize=paths[ia],
).reshape(-1)
def eval_einsum2(out, eshape, expressions, operands, paths):
out.flat = contract(
expressions[0], *operands[0], optimize=paths[0],
)
for ia in range(1, n_add):
out.ravel()[...] += contract(
expressions[ia], *operands[ia], optimize=paths[ia],
).ravel()
def eval_einsum3(out, eshape, expressions, operands, paths):
out.ravel()[:] = contract(
expressions[0], *operands[0], optimize=paths[0],
).ravel()
for ia in range(1, n_add):
out.ravel()[:] += contract(
expressions[ia], *operands[ia], optimize=paths[ia],
).ravel()
def eval_einsum4(out, eshape, expressions, operands, paths):
vout = out.reshape(eshape)
contract(expressions[0], *operands[0], out=vout,
optimize=paths[0])
for ia in range(1, n_add):
aux = contract(expressions[ia], *operands[ia],
optimize=paths[ia])
out[:] += aux.reshape(out.shape)
eval_fun = self.backend_kwargs.get('eval_fun', 'eval_einsum0')
eval_einsum = locals()[eval_fun]
elif self.backend in ('numpy_loop', 'opt_einsum_loop'):
contract = nm.einsum if self.backend == 'numpy_loop' else oe.contract
def eval_einsum(out, eshape, expressions, all_slice_ops, paths):
n_cell = out.shape[0]
vout = out.reshape(eshape)
slice_ops = all_slice_ops[0]
if vout.ndim > 1:
for ic in range(n_cell):
ops = slice_ops(ic)
contract(expressions[0], *ops, out=vout[ic],
optimize=paths[0])
else: # vout[ic] can be scalar in eval mode.
for ic in range(n_cell):
ops = slice_ops(ic)
vout[ic] = contract(expressions[0], *ops,
optimize=paths[0])
for ia in range(1, n_add):
slice_ops = all_slice_ops[ia]
for ic in range(n_cell):
ops = slice_ops(ic)
vout[ic] += contract(expressions[ia], *ops,
optimize=paths[ia])
elif self.backend in ('numpy_qloop', 'opt_einsum_qloop'):
contract = (nm.einsum if self.backend == 'numpy_qloop'
else oe.contract)
def eval_einsum(out, eshape, expressions, all_slice_ops,
loop_sizes, paths):
n_qp = loop_sizes[0]
vout = out.reshape(eshape)
slice_ops = all_slice_ops[0]
ops = slice_ops(0)
vout[:] = contract(expressions[0], *ops,
optimize=paths[0])
for iq in range(1, n_qp):
ops = slice_ops(iq)
vout[:] += contract(expressions[0], *ops,
optimize=paths[0])
for ia in range(1, n_add):
n_qp = loop_sizes[ia]
slice_ops = all_slice_ops[ia]
for iq in range(n_qp):
ops = slice_ops(iq)
vout[:] += contract(expressions[ia], *ops,
optimize=paths[ia])
elif self.backend == 'jax':
@jax.partial(jax.jit, static_argnums=(0, 1, 2))
def _eval_einsum(expressions, paths, n_add, operands):
val = jnp.einsum(expressions[0], *operands[0],
optimize=paths[0])
for ia in range(1, n_add):
val += jnp.einsum(expressions[ia], *operands[ia],
optimize=paths[ia])
return val
def eval_einsum(out, eshape, expressions, operands, paths):
aux = _eval_einsum(expressions, paths, n_add, operands)
out[:] = nm.asarray(aux.reshape(out.shape))
elif self.backend == 'jax_vmap':
def _eval_einsum_cell(expressions, paths, n_add, operands):
val = jnp.einsum(expressions[0], *operands[0],
optimize=paths[0])
for ia in range(1, n_add):
val += jnp.einsum(expressions[ia], *operands[ia],
optimize=paths[ia])
return val
def eval_einsum(out, vmap_eval_cell, eshape, expressions, operands,
paths):
aux = vmap_eval_cell(expressions, paths, n_add,
operands)
out[:] = nm.asarray(aux.reshape(out.shape))
eval_einsum = (eval_einsum, _eval_einsum_cell)
elif self.backend.startswith('dask'):
scheduler = {'dask_single' : 'single-threaded',
'dask_threads' : 'threads'}[self.backend]
def eval_einsum(out, eshape, expressions, operands, paths):
_out = da.einsum(expressions[0], *operands[0],
optimize=paths[0])
for ia in range(1, n_add):
aux = da.einsum(expressions[ia],
*operands[ia],
optimize=paths[ia])
_out += aux
out[:] = _out.compute(scheduler=scheduler).reshape(out.shape)
elif self.backend.startswith('opt_einsum_dask'):
scheduler = {'opt_einsum_dask_single' : 'single-threaded',
'opt_einsum_dask_threads' : 'threads'}[self.backend]
def eval_einsum(out, eshape, expressions, operands, paths):
_out = oe.contract(expressions[0], *operands[0],
optimize=paths[0],
backend='dask')
for ia in range(1, n_add):
aux = oe.contract(expressions[ia],
*operands[ia],
optimize=paths[ia],
backend='dask')
_out += aux
out[:] = _out.compute(scheduler=scheduler).reshape(out.shape)
else:
raise ValueError('unsupported backend! ({})'.format(self.backend))
einfo.eval_einsum = eval_einsum
if self.verbosity:
output('einsum setup: {} s'.format(timer.stop()))
return eval_einsum
def get_operands(self, diff_var):
einfo = self.einfos[diff_var]
return get_einsum_ops(einfo.eargs, einfo.ebuilder, self.expr_cache)
def get_paths(self, expressions, operands):
memory_limit = self.backend_kwargs.get('memory_limit')
if ('numpy' in self.backend) or self.backend.startswith('dask'):
optimize = (self.optimize if memory_limit is None
else (self.optimize, memory_limit))
paths, path_infos = zip(*[nm.einsum_path(
expressions[ia], *operands[ia],
optimize=optimize,
) for ia in range(len(operands))])
elif 'opt_einsum' in self.backend:
paths, path_infos = zip(*[oe.contract_path(
expressions[ia], *operands[ia],
optimize=self.optimize,
memory_limit=memory_limit,
) for ia in range(len(operands))])
elif 'jax' in self.backend:
paths, path_infos = [], []
for ia in range(len(operands)):
path, info = jnp.einsum_path(
expressions[ia], *operands[ia],
optimize=self.optimize,
)
paths.append(tuple(path))
path_infos.append(info)
paths = tuple(paths)
path_infos = tuple(path_infos)
else:
raise ValueError('unsupported backend! ({})'.format(self.backend))
return paths, path_infos
def get_fargs(self, *args, **kwargs):
mode, term_mode, diff_var = args[-3:]
eval_einsum = self.get_function(*args, **kwargs)
operands = self.get_operands(diff_var)
einfo = self.einfos[diff_var]
ebuilder = einfo.ebuilder
eshape = get_output_shape(ebuilder.out_subscripts[0],
ebuilder.subscripts[0], operands[0])
out = [eval_einsum, eshape]
subscripts, operands = ebuilder.apply_layout(
self.layout, operands, verbosity=self.verbosity,
)
self.parsed_expressions = ebuilder.get_expressions(subscripts)
if self.verbosity:
output('parsed expressions:', self.parsed_expressions)
cloop = self.backend in ('numpy_loop', 'opt_einsum_loop', 'jax_vmap')
qloop = self.backend in ('numpy_qloop', 'opt_einsum_qloop')
if cloop or qloop:
loop_index = 'c' if cloop else 'q'
transform = ebuilder.transform(subscripts, operands,
transformation='loop',
loop_index=loop_index)
expressions, poperands, all_slice_ops, loop_sizes = transform
if self.backend == 'jax_vmap':
all_ics = [get_loop_indices(subs, loop_index)
for subs in subscripts]
vms = (None, None, None, all_ics)
vmap_eval_cell = jax.jit(jax.vmap(eval_einsum[1], vms, 0),
static_argnums=(0, 1, 2))
out += [expressions, operands]
out[:1] = [eval_einsum[0], vmap_eval_cell]
else:
out += [expressions, all_slice_ops]
if qloop:
out.append(loop_sizes)
elif (self.backend.startswith('dask')
or self.backend.startswith('opt_einsum_dask')):
c_chunk_size = self.backend_kwargs.get('c_chunk_size')
da_operands = ebuilder.transform(subscripts, operands,
transformation='dask',
c_chunk_size=c_chunk_size)
poperands = operands
expressions = self.parsed_expressions
out += [expressions, da_operands]
else:
poperands = operands
expressions = self.parsed_expressions
out += [expressions, operands]
if einfo.paths is None:
if self.verbosity > 1:
ebuilder.print_shapes(subscripts, operands)
einfo.paths, einfo.path_infos = self.get_paths(
expressions,
poperands,
)
if self.verbosity > 2:
for path, path_info in zip(einfo.paths, einfo.path_infos):
output('path:', path)
output(path_info)
out += [einfo.paths]
return out
def get_eval_shape(self, *args, **kwargs):
mode, term_mode, diff_var = args[-3:]
if diff_var is not None:
raise ValueError('cannot differentiate in {} mode!'
.format(mode))
self.get_function(*args, **kwargs)
operands = self.get_operands(diff_var)
ebuilder = self.einfos[diff_var].ebuilder
out_shape = get_output_shape(ebuilder.out_subscripts[0],
ebuilder.subscripts[0], operands[0])
dtype = nm.find_common_type([op.dtype for op in operands[0]], [])
return out_shape, dtype
def get_normals(self, arg):
normals = self.get_mapping(arg)[0].normal
if normals is not None:
normals = ExpressionArg(name='n({})'.format(arg.name),
arg=normals[..., 0],
kind='ndarray')
return normals
class EIntegrateVolumeOperatorTerm(ETermBase):
r"""
Volume integral of a test function weighted by a scalar function
:math:`c`.
:Definition:
.. math::
\int_\Omega q \mbox{ or } \int_\Omega c q
:Arguments:
- material : :math:`c` (optional)
- virtual : :math:`q`
"""
name = 'de_volume_integrate'
arg_types = ('opt_material', 'virtual')
arg_shapes = [{'opt_material' : '1, 1', 'virtual' : (1, None)},
{'opt_material' : None}]
def get_function(self, mat, virtual, mode=None, term_mode=None,
diff_var=None, **kwargs):
if mat is None:
fun = self.make_function(
'0', virtual, diff_var=diff_var,
)
else:
fun = self.make_function(
'00,0', mat, virtual, diff_var=diff_var,
)
return fun
class ELaplaceTerm(ETermBase):
r"""
Laplace term with :math:`c` coefficient. Can be
evaluated. Can use derivatives.
:Definition:
.. math::
\int_{\Omega} c \nabla q \cdot \nabla p \mbox{ , } \int_{\Omega}
c \nabla \bar{p} \cdot \nabla r
:Arguments 1:
- material : :math:`c`
- virtual : :math:`q`
- state : :math:`p`
:Arguments 2:
- material : :math:`c`
- parameter_1 : :math:`\bar{p}`
- parameter_2 : :math:`r`
"""
name = 'de_laplace'
arg_types = (('opt_material', 'virtual', 'state'),
('opt_material', 'parameter_1', 'parameter_2'))
arg_shapes = [{'opt_material' : '1, 1', 'virtual' : (1, 'state'),
'state' : 1, 'parameter_1' : 1, 'parameter_2' : 1},
{'opt_material' : None}]
modes = ('weak', 'eval')
def get_function(self, mat, virtual, state, mode=None, term_mode=None,
diff_var=None, **kwargs):
if mat is None:
fun = self.make_function(
'0.j,0.j', virtual, state, diff_var=diff_var,
)
else:
fun = self.make_function(
'00,0.j,0.j', mat, virtual, state, diff_var=diff_var,
)
return fun
class EVolumeDotTerm(ETermBase):
r"""
Volume :math:`L^2(\Omega)` weighted dot product for both scalar and vector
fields. Can be evaluated. Can use derivatives.
:Definition:
.. math::
\int_\Omega q p \mbox{ , } \int_\Omega \ul{v} \cdot \ul{u}
\mbox{ , }
\int_\Omega p r \mbox{ , } \int_\Omega \ul{u} \cdot \ul{w} \\
\int_\Omega c q p \mbox{ , } \int_\Omega c \ul{v} \cdot \ul{u}
\mbox{ , }
\int_\Omega c p r \mbox{ , } \int_\Omega c \ul{u} \cdot \ul{w} \\
\int_\Omega \ul{v} \cdot \ull{M} \cdot \ul{u}
\mbox{ , }
\int_\Omega \ul{u} \cdot \ull{M} \cdot \ul{w}
:Arguments 1:
- material : :math:`c` or :math:`\ull{M}` (optional)
- virtual : :math:`q` or :math:`\ul{v}`
- state : :math:`p` or :math:`\ul{u}`
:Arguments 2:
- material : :math:`c` or :math:`\ull{M}` (optional)
- parameter_1 : :math:`p` or :math:`\ul{u}`
- parameter_2 : :math:`r` or :math:`\ul{w}`
"""
name = 'de_volume_dot'
arg_types = (('opt_material', 'virtual', 'state'),
('opt_material', 'parameter_1', 'parameter_2'))
arg_shapes = [{'opt_material' : '1, 1', 'virtual' : (1, 'state'),
'state' : 1, 'parameter_1' : 1, 'parameter_2' : 1},
{'opt_material' : None},
{'opt_material' : '1, 1', 'virtual' : ('D', 'state'),
'state' : 'D', 'parameter_1' : 'D', 'parameter_2' : 'D'},
{'opt_material' : 'D, D'},
{'opt_material' : None}]
modes = ('weak', 'eval')
def get_function(self, mat, virtual, state, mode=None, term_mode=None,
diff_var=None, **kwargs):
if mat is None:
fun = self.make_function(
'i,i', virtual, state, diff_var=diff_var,
)
else:
if mat.shape[-1] > 1:
fun = self.make_function(
'ij,i,j', mat, virtual, state, diff_var=diff_var,
)
else:
fun = self.make_function(
'00,i,i', mat, virtual, state, diff_var=diff_var,
)
return fun
class ESurfaceDotTerm(EVolumeDotTerm):
r"""
Surface :math:`L^2(\Gamma)` dot product for both scalar and vector
fields.
:Definition:
.. math::
\int_\Gamma q p \mbox{ , } \int_\Gamma \ul{v} \cdot \ul{u}
\mbox{ , }
\int_\Gamma p r \mbox{ , } \int_\Gamma \ul{u} \cdot \ul{w} \\
\int_\Gamma c q p \mbox{ , } \int_\Gamma c \ul{v} \cdot \ul{u}
\mbox{ , }
\int_\Gamma c p r \mbox{ , } \int_\Gamma c \ul{u} \cdot \ul{w} \\
\int_\Gamma \ul{v} \cdot \ull{M} \cdot \ul{u}
\mbox{ , }
\int_\Gamma \ul{u} \cdot \ull{M} \cdot \ul{w}
:Arguments 1:
- material : :math:`c` or :math:`\ull{M}` (optional)
- virtual : :math:`q` or :math:`\ul{v}`
- state : :math:`p` or :math:`\ul{u}`
:Arguments 2:
- material : :math:`c` or :math:`\ull{M}` (optional)
- parameter_1 : :math:`p` or :math:`\ul{u}`
- parameter_2 : :math:`r` or :math:`\ul{w}`
"""
name = 'de_surface_dot'
integration = 'surface'
class EScalarDotMGradScalarTerm(ETermBase):
r"""
Volume dot product of a scalar gradient dotted with a material vector with
a scalar.
:Definition:
.. math::
\int_{\Omega} q \ul{y} \cdot \nabla p \mbox{ , }
\int_{\Omega} p \ul{y} \cdot \nabla q
:Arguments 1:
- material : :math:`\ul{y}`
- virtual : :math:`q`
- state : :math:`p`
:Arguments 2:
- material : :math:`\ul{y}`
- state : :math:`p`
- virtual : :math:`q`
"""
name = 'de_s_dot_mgrad_s'
arg_types = (('material', 'virtual', 'state'),
('material', 'state', 'virtual'))
arg_shapes = [{'material' : 'D, 1',
'virtual/grad_state' : (1, None),
'state/grad_state' : 1,
'virtual/grad_virtual' : (1, None),
'state/grad_virtual' : 1}]
modes = ('grad_state', 'grad_virtual')
def get_function(self, mat, var1, var2, mode=None, term_mode=None,
diff_var=None, **kwargs):
return self.make_function(
'i0,0,0.i', mat, var1, var2, diff_var=diff_var,
)
class ENonPenetrationPenaltyTerm(ETermBase):
r"""
Non-penetration condition in the weak sense using a penalty.
:Definition:
.. math::
\int_{\Gamma} c (\ul{n} \cdot \ul{v}) (\ul{n} \cdot \ul{u})
:Arguments:
- material : :math:`c`
- virtual : :math:`\ul{v}`
- state : :math:`\ul{u}`
"""
name = 'de_non_penetration_p'
arg_types = ('material', 'virtual', 'state')
arg_shapes = {'material' : '1, 1',
'virtual' : ('D', 'state'), 'state' : 'D'}
integration = 'surface'
def get_function(self, mat, virtual, state, mode=None, term_mode=None,
diff_var=None, **kwargs):
normals = self.get_normals(state)
return self.make_function(
'00,i,i,j,j',
mat, virtual, normals, state, normals, diff_var=diff_var,
)
class EDivGradTerm(ETermBase):
r"""
Vector field diffusion term.
:Definition:
.. math::
\int_{\Omega} \nu\ \nabla \ul{v} : \nabla \ul{u} \mbox{ , }
\int_{\Omega} \nu\ \nabla \ul{u} : \nabla \ul{w} \\
\int_{\Omega} \nabla \ul{v} : \nabla \ul{u} \mbox{ , }
\int_{\Omega} \nabla \ul{u} : \nabla \ul{w}
:Arguments 1:
- material : :math:`\nu` (viscosity, optional)
- virtual : :math:`\ul{v}`
- state : :math:`\ul{u}`
:Arguments 2:
- material : :math:`\nu` (viscosity, optional)
- parameter_1 : :math:`\ul{u}`
- parameter_2 : :math:`\ul{w}`
"""
name = 'de_div_grad'
arg_types = (('opt_material', 'virtual', 'state'),
('opt_material', 'parameter_1', 'parameter_2'))
arg_shapes = [{'opt_material' : '1, 1', 'virtual' : ('D', 'state'),
'state' : 'D', 'parameter_1' : 'D', 'parameter_2' : 'D'},
{'opt_material' : None}]
modes = ('weak', 'eval')
def get_function(self, mat, virtual, state, mode=None, term_mode=None,
diff_var=None, **kwargs):
if mat is None:
fun = self.make_function(
'i.j,i.j', virtual, state, diff_var=diff_var,
)
else:
fun = self.make_function(
'00,i.j,i.j', mat, virtual, state, diff_var=diff_var,
)
return fun
class EConvectTerm(ETermBase):
r"""
Nonlinear convective term.
:Definition:
.. math::
\int_{\Omega} ((\ul{u} \cdot \nabla) \ul{u}) \cdot \ul{v}
:Arguments:
- virtual : :math:`\ul{v}`
- state : :math:`\ul{u}`
"""
name = 'de_convect'
arg_types = ('virtual', 'state')
arg_shapes = {'virtual' : ('D', 'state'), 'state' : 'D'}
def get_function(self, virtual, state, mode=None, term_mode=None,
diff_var=None, **kwargs):
return self.make_function(
'i,i.j,j', virtual, state, state, diff_var=diff_var,
)
class EDivTerm(ETermBase):
r"""
Weighted divergence term.
:Definition:
.. math::
\int_{\Omega} \nabla \cdot \ul{v} \mbox { , } \int_{\Omega} \nabla
\cdot \ul{u} \\
\int_{\Omega} c \nabla \cdot \ul{v} \mbox { , } \int_{\Omega} c \nabla
\cdot \ul{u}
:Arguments 1:
- material : :math:`c` (optional)
- virtual : :math:`\ul{v}`
:Arguments 2:
- material : :math:`c` (optional)
- parameter : :math:`\ul{u}`
"""
name = 'de_div'
arg_types = (('opt_material', 'virtual'),
('opt_material', 'parameter'),)
arg_shapes = [{'opt_material' : '1, 1', 'virtual' : ('D', None),
'parameter' : 'D'},
{'opt_material' : None}]
modes = ('weak', 'eval')
def get_function(self, mat, virtual, mode=None, term_mode=None,
diff_var=None, **kwargs):
if mat is None:
fun = self.make_function(
'i.i', virtual, diff_var=diff_var,
)
else:
fun = self.make_function(
'00,i.i', mat, virtual, diff_var=diff_var,
)
return fun
class EStokesTerm(ETermBase):
r"""
Stokes problem coupling term. Corresponds to weak forms of gradient and
divergence terms.
:Definition:
.. math::
\int_{\Omega} p\ \nabla \cdot \ul{v} \mbox{ , }
\int_{\Omega} q\ \nabla \cdot \ul{u}
\mbox{ or }
\int_{\Omega} c\ p\ \nabla \cdot \ul{v} \mbox{ , }
\int_{\Omega} c\ q\ \nabla \cdot \ul{u} \\
\int_{\Omega} r\ \nabla \cdot \ul{w} \mbox{ , }
\int_{\Omega} c r\ \nabla \cdot \ul{w}
:Arguments 1:
- material : :math:`c` (optional)
- virtual : :math:`\ul{v}`
- state : :math:`p`
:Arguments 2:
- material : :math:`c` (optional)
- state : :math:`\ul{u}`
- virtual : :math:`q`
:Arguments 3:
- material : :math:`c` (optional)
- parameter_v : :math:`\ul{u}`
- parameter_s : :math:`p`
"""
name = 'de_stokes'
arg_types = (('opt_material', 'virtual', 'state'),
('opt_material', 'state', 'virtual'),
('opt_material', 'parameter_v', 'parameter_s'))
arg_shapes = [{'opt_material' : '1, 1',
'virtual/grad' : ('D', None), 'state/grad' : 1,
'virtual/div' : (1, None), 'state/div' : 'D',
'parameter_v' : 'D', 'parameter_s' : 1},
{'opt_material' : None}]
modes = ('grad', 'div', 'eval')
def get_function(self, coef, vvar, svar, mode=None, term_mode=None,
diff_var=None, **kwargs):
if coef is None:
fun = self.make_function(
'i.i,0', vvar, svar, diff_var=diff_var,
)
else:
fun = self.make_function(
'00,i.i,0', coef, vvar, svar, diff_var=diff_var,
)
return fun
class ELinearElasticTerm(ETermBase):
r"""
General linear elasticity term, with :math:`D_{ijkl}` given in
the usual matrix form exploiting symmetry: in 3D it is :math:`6\times6`
with the indices ordered as :math:`[11, 22, 33, 12, 13, 23]`, in 2D it is
:math:`3\times3` with the indices ordered as :math:`[11, 22, 12]`.
:Definition:
.. math::
\int_{\Omega} D_{ijkl}\ e_{ij}(\ul{v}) e_{kl}(\ul{u})
\mbox{ , }
\int_{\Omega} D_{ijkl}\ e_{ij}(\ul{w}) e_{kl}(\ul{u})
:Arguments 1:
- material : :math:`D_{ijkl}`
- virtual : :math:`\ul{v}`
- state : :math:`\ul{u}`
:Arguments 2:
- material : :math:`D_{ijkl}`
- parameter_1 : :math:`\ul{w}`
- parameter_2 : :math:`\ul{u}`
"""
name = 'de_lin_elastic'
arg_types = (('material', 'virtual', 'state'),
('material', 'parameter_1', 'parameter_2'))
arg_shapes = {'material' : 'S, S', 'virtual' : ('D', 'state'),
'state' : 'D', 'parameter_1' : 'D', 'parameter_2' : 'D'}
modes = ('weak', 'eval')
def get_function(self, mat, virtual, state, mode=None, term_mode=None,
diff_var=None, **kwargs):
return self.make_function(
'IK,s(i:j)->I,s(k:l)->K', mat, virtual, state, diff_var=diff_var,
)
class ECauchyStressTerm(ETermBase):
r"""
Evaluate Cauchy stress tensor.
It is given in the usual vector form exploiting symmetry: in 3D it has 6
components with the indices ordered as :math:`[11, 22, 33, 12, 13, 23]`, in
2D it has 3 components with the indices ordered as :math:`[11, 22, 12]`.
:Definition:
.. math::
\int_{\Omega} D_{ijkl} e_{kl}(\ul{w})
:Arguments:
- material : :math:`D_{ijkl}`
- parameter : :math:`\ul{w}`
"""
name = 'de_cauchy_stress'
arg_types = ('material', 'parameter')
arg_shapes = {'material' : 'S, S', 'parameter' : 'D'}
def get_function(self, mat, parameter, mode=None, term_mode=None,
diff_var=None, **kwargs):
return self.make_function(
'IK,s(k:l)->K', mat, parameter, diff_var=diff_var,
)
| 34.961911 | 81 | 0.501828 |
acf3de090efcb928cd1049043021c794e19f9ac9 | 7,301 | py | Python | tests/pretrained_embeds_test.py | liaimi/pytext-1 | ac40f3956bb2ef0d05f07e2d6bf18bc808df82e4 | [
"BSD-3-Clause"
] | 2 | 2019-01-24T00:40:45.000Z | 2019-02-09T00:56:43.000Z | tests/pretrained_embeds_test.py | liaimi/pytext-1 | ac40f3956bb2ef0d05f07e2d6bf18bc808df82e4 | [
"BSD-3-Clause"
] | null | null | null | tests/pretrained_embeds_test.py | liaimi/pytext-1 | ac40f3956bb2ef0d05f07e2d6bf18bc808df82e4 | [
"BSD-3-Clause"
] | 1 | 2019-04-08T08:46:08.000Z | 2019-04-08T08:46:08.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import tempfile
import unittest
import numpy as np
from pytext.common.constants import DatasetFieldName
from pytext.config.field_config import (
DocLabelConfig,
EmbedInitStrategy,
FeatureConfig,
WordFeatConfig,
WordLabelConfig,
)
from pytext.data import JointModelDataHandler
from pytext.data.featurizer import SimpleFeaturizer
from pytext.utils.embeddings_utils import PretrainedEmbedding
from pytext.utils.test_utils import import_tests_module
tests_module = import_tests_module()
TRAIN_FILE = tests_module.test_file("train_data_tiny.tsv")
EVAL_FILE = tests_module.test_file("test_data_tiny.tsv")
TEST_FILE = tests_module.test_file("test_data_tiny.tsv")
EMBED_RAW_PATH = tests_module.test_file("pretrained_embed_raw")
EMBED_CACHED_PATH = tests_module.test_file("test_embed.cached")
EMBED_XLU_CACHED_PATH = tests_module.test_file("test_embed_xlu.cached")
class PretrainedEmbedsTest(unittest.TestCase):
def test_cache_embeds(self):
embeddings_ref = PretrainedEmbedding()
embeddings_ref.load_pretrained_embeddings(EMBED_RAW_PATH)
with tempfile.NamedTemporaryFile(
delete=False, suffix=".{}".format("cached")
) as cached_path:
embeddings_ref.cache_pretrained_embeddings(cached_path.name)
embeddings_cached = PretrainedEmbedding()
embeddings_cached.load_cached_embeddings(cached_path.name)
np.testing.assert_array_equal(
sorted(embeddings_cached.stoi.keys()), sorted(embeddings_ref.stoi.keys())
)
np.testing.assert_array_equal(
embeddings_cached.embed_vocab, embeddings_ref.embed_vocab
)
np.testing.assert_array_equal(
sorted(embeddings_cached.stoi.values()),
sorted(embeddings_ref.stoi.values()),
)
for word_idx in embeddings_ref.stoi.values():
np.testing.assert_array_almost_equal(
embeddings_cached.embedding_vectors[word_idx],
embeddings_ref.embedding_vectors[word_idx],
)
def test_assing_pretrained_weights(self):
embeddings_ref = PretrainedEmbedding()
embeddings_ref.load_cached_embeddings(EMBED_CACHED_PATH)
VOCAB = ["UNK", "aloha", "the"]
EMBED_DIM = 5
embed_vocab_to_idx = {tok: i for i, tok in enumerate(VOCAB)}
pretrained_embeds = embeddings_ref.initialize_embeddings_weights(
embed_vocab_to_idx, "UNK", EMBED_DIM, EmbedInitStrategy.RANDOM
)
assert pretrained_embeds.shape[0] == len(VOCAB)
assert pretrained_embeds.shape[1] == EMBED_DIM
np.testing.assert_array_almost_equal(
pretrained_embeds[1].numpy(),
[-0.43124, 0.014934, -0.50635, 0.60506, 0.56051],
) # embedding vector for 'aloha'
np.testing.assert_array_almost_equal(
pretrained_embeds[2].numpy(),
[-0.39153, -0.19803, 0.2573, -0.18617, 0.25551],
) # embedding vector for 'the'
def test_cache_xlu_embeds(self):
embeddings_ref = PretrainedEmbedding()
dialects = ["en_US", "en_UK", "es_XX"]
for dialect in dialects:
embeddings_ref.load_pretrained_embeddings(
EMBED_RAW_PATH, append=True, dialect=dialect
)
with tempfile.NamedTemporaryFile(
delete=False, suffix=".{}".format("cached")
) as cached_path:
embeddings_ref.cache_pretrained_embeddings(cached_path.name)
embeddings_cached = PretrainedEmbedding()
embeddings_cached.load_cached_embeddings(cached_path.name)
np.testing.assert_array_equal(
sorted(embeddings_cached.stoi.keys()), sorted(embeddings_ref.stoi.keys())
)
np.testing.assert_array_equal(
embeddings_cached.embed_vocab, embeddings_ref.embed_vocab
)
np.testing.assert_array_equal(
sorted(embeddings_cached.stoi.values()),
sorted(embeddings_ref.stoi.values()),
)
for word_idx in embeddings_ref.stoi.values():
np.testing.assert_array_almost_equal(
embeddings_cached.embedding_vectors[word_idx],
embeddings_ref.embedding_vectors[word_idx],
)
def test_assing_pretrained_xlu_weights(self):
embeddings_ref = PretrainedEmbedding()
embeddings_ref.load_cached_embeddings(EMBED_XLU_CACHED_PATH)
VOCAB = ["UNK", "aloha-en_US", "the-es_XX"]
EMBED_DIM = 5
embed_vocab_to_idx = {tok: i for i, tok in enumerate(VOCAB)}
pretrained_embeds = embeddings_ref.initialize_embeddings_weights(
embed_vocab_to_idx, "UNK", EMBED_DIM, EmbedInitStrategy.RANDOM
)
assert pretrained_embeds.shape[0] == len(VOCAB)
assert pretrained_embeds.shape[1] == EMBED_DIM
np.testing.assert_array_almost_equal(
pretrained_embeds[1].numpy(),
[-0.43124, 0.014934, -0.50635, 0.60506, 0.56051],
) # embedding vector for 'aloha-en_US'
np.testing.assert_array_almost_equal(
pretrained_embeds[2].numpy(),
[-0.39153, -0.19803, 0.2573, -0.18617, 0.25551],
) # embedding vector for 'the-es_XX'
def test_intializing_embeds_from_config(self):
feature_config = FeatureConfig(
word_feat=WordFeatConfig(
embedding_init_strategy=EmbedInitStrategy.RANDOM,
embed_dim=5,
pretrained_embeddings_path=tests_module.TEST_BASE_DIR,
)
)
data_handler = JointModelDataHandler.from_config(
JointModelDataHandler.Config(),
feature_config,
[DocLabelConfig(), WordLabelConfig()],
featurizer=SimpleFeaturizer.from_config(
SimpleFeaturizer.Config(), feature_config
),
)
data_handler.init_metadata_from_path(TRAIN_FILE, EVAL_FILE, TEST_FILE)
pretrained_embeds = data_handler.metadata.features[
DatasetFieldName.TEXT_FIELD
].pretrained_embeds_weight
# test random initialization (values should be non-0)
np.testing.assert_array_less(
[0, 0, 0, 0, 0], np.absolute(pretrained_embeds[11].numpy())
)
feature_config = FeatureConfig(
word_feat=WordFeatConfig(
embedding_init_strategy=EmbedInitStrategy.ZERO,
embed_dim=5,
pretrained_embeddings_path=tests_module.TEST_BASE_DIR,
)
)
data_handler = JointModelDataHandler.from_config(
JointModelDataHandler.Config(),
feature_config,
[DocLabelConfig(), WordLabelConfig()],
featurizer=SimpleFeaturizer.from_config(
SimpleFeaturizer.Config(), feature_config
),
)
data_handler.init_metadata_from_path(TRAIN_FILE, EVAL_FILE, TEST_FILE)
pretrained_embeds = data_handler.metadata.features[
DatasetFieldName.TEXT_FIELD
].pretrained_embeds_weight
# test zero initialization (values should all be 0)
np.testing.assert_array_equal([0, 0, 0, 0, 0], pretrained_embeds[11].numpy())
| 40.561111 | 85 | 0.666484 |
acf3de8d31c0010d3de7f9d24086d1126f69a047 | 10,914 | py | Python | piqe_ocp_lib/api/resources/ocp_nodes.py | vikasmulaje/piqe-ocp-lib | c7f98e2728db457d05b970d5bc5521d60271bca1 | [
"Apache-2.0"
] | null | null | null | piqe_ocp_lib/api/resources/ocp_nodes.py | vikasmulaje/piqe-ocp-lib | c7f98e2728db457d05b970d5bc5521d60271bca1 | [
"Apache-2.0"
] | null | null | null | piqe_ocp_lib/api/resources/ocp_nodes.py | vikasmulaje/piqe-ocp-lib | c7f98e2728db457d05b970d5bc5521d60271bca1 | [
"Apache-2.0"
] | null | null | null | import logging
import subprocess
from kubernetes.client.rest import ApiException
from piqe_ocp_lib import __loggername__
from piqe_ocp_lib.api.resources.ocp_base import OcpBase
logger = logging.getLogger(__loggername__)
class OcpNodes(OcpBase):
"""
OcpNodes Class extends OcpBase and encapsulates all methods
related to managing Openshift nodes.
:param kube_config_file: A kubernetes config file.
:return: None
"""
def __init__(self, kube_config_file=None):
self.kube_config_file = kube_config_file
OcpBase.__init__(self, kube_config_file=self.kube_config_file)
self.api_version = "v1"
self.kind = "Node"
self.ocp_nodes = self.dyn_client.resources.get(api_version=self.api_version, kind=self.kind)
def get_all_nodes(self, label_selector=None):
"""
Method that returns a list of node objects
:param label_selector: Used to return a a list of nodes based on the provided label(s)
:return: V1NodeList on success. None on failure.
"""
node_object_list = None
try:
node_object_list = self.ocp_nodes.get(label_selector=label_selector)
except ApiException as e:
logger.error("Exception when calling method list_node: %s\n", e)
return node_object_list
def get_all_node_names(self):
"""
Method that returns a list of all node names based on node objects
:return: List of unfiltered node names on success. None on failure.
"""
node_names = []
try:
node_object_list = self.get_all_nodes(label_selector=None)
for node in node_object_list.items:
node_names.append(node.metadata.name)
except ApiException as e:
logger.error("Exception encountered while gathering node names: %s\n", e)
return node_names
def get_a_node(self, node_name):
"""
Method returns a node object by name
:param node_name: The name of the node.
:return: V1Node on success. None on failure.
"""
node_object = None
try:
node_object = self.ocp_nodes.get(name=node_name)
except ApiException as e:
logger.error("Exception encountered while getting a node by name: %s\n", e)
return node_object
def get_total_memory_in_bytes(self):
"""
Get total cluster memory by adding memory from all Nodes
:return: (int) Total memory in byte on success OR 0 on Failure
"""
total_memory_in_bytes = 0
node_response = self.get_all_nodes()
if node_response:
for node in node_response.items:
if node["status"]["capacity"]["memory"][-2:] == "Ki":
total_memory_in_bytes += int(node["status"]["capacity"]["memory"][:-2]) * 1024
if node["status"]["capacity"]["memory"][-2:] == "Mi":
total_memory_in_bytes += int(node["status"]["capacity"]["memory"][:-2]) * (1024 * 1024)
if node["status"]["capacity"]["memory"][-2:] == "Gi":
total_memory_in_bytes += int(node["status"]["capacity"]["memory"][:-2]) * (1024 * 1024)
logger.info("Total memory in bytes : %s", total_memory_in_bytes)
return total_memory_in_bytes
def is_node_ready(self, node_name, timeout=300):
"""
Check if a node has reached a Ready state
:param node_name: (str) The node name
:param timeout: (int) The time limit for polling status. Defaults to 300
:return: (bool) True if it's Ready OR False otherwise
"""
field_selector = "metadata.name={}".format(node_name)
for event in self.ocp_nodes.watch(field_selector=field_selector, timeout=timeout):
conditions_list = event["object"]["status"]["conditions"]
latest_event = conditions_list[-1]
if conditions_list and latest_event["type"] == "Ready" and latest_event["status"] == "True":
logger.debug("Node {} has reached 'Ready' state".format(node_name))
return True
else:
logger.debug(
"Waiting for node {} to reach 'Ready' state."
"Reason: {}".format(node_name, latest_event["message"])
)
return False
def is_node_deleted(self, node_name, timeout=300):
"""
Check if a node was successfully deleted
:param node_name: (str) The node name
:param timeout: (int) The time limit for polling status. Defaults to 300
:return: (bool) True if it's deleted OR False otherwise
"""
if self.get_a_node(node_name) is None:
logger.info("Node {} is not present".format(node_name))
return True
else:
logger.debug("Node seems to be present, let's watch")
field_selector = "metadata.name={}".format(node_name)
for event in self.ocp_nodes.watch(field_selector=field_selector, timeout=timeout):
if self.get_a_node(node_name):
logger.debug("Node is still present")
logger.debug("Node state is: {}".format(event["object"]["status"]["conditions"][-1]["message"]))
continue
else:
logger.debug("Node is no longer here")
return True
return False
def label_a_node(self, node_name, labels):
"""
Method that patches a node as a means to apply a label to it.
:param node_name: The name of the node to patch
:param labels: A dictionary containing the key,val labels
:return: A V1DeploymentConfig object
"""
body = {"metadata": {"labels": labels}}
api_response = None
try:
api_response = self.ocp_nodes.patch(name=node_name, body=body)
except ApiException as e:
logger.error("Exception while patching nodes: %s\n", e)
return api_response
def get_node_status(self, node_name):
"""
Return the status of a node based on the condition type Ready.
:param node_name:
:return: The status for the condition. Either True or False
"""
node_object = None
try:
node_object = self.ocp_nodes.get(name=node_name)
for condition in node_object.status.conditions:
condition_type = condition.get("type")
if condition_type == "Ready":
return condition.get("status")
except ApiException as e:
logger.error("Exception encountered while determining the node condition: %s\n", e)
return node_object
def get_node_roles(self, node_name):
"""
Return the roles assigned to the nodes by looking for the following:
node-role.kubernetes.io/master: ''
node-role.kubernetes.io/worker: ''
:param node_name: The node to examine
:return: List containing the awssigned roles. Currently Master and/or Worker.
"""
node_role = []
try:
node_object = self.ocp_nodes.get(name=node_name)
# labels are returned as tuples
for label in node_object.metadata.labels:
if label[0] == "node-role.kubernetes.io/master":
node_role.append("Master")
if label[0] == "node-role.kubernetes.io/worker":
node_role.append("Worker")
except ApiException as e:
logger.error("Exception encountered while getting a node by name: %s\n", e)
return node_role
def execute_command_on_a_node(self, node_name, command_to_execute):
"""
Executes the provided command on the specified node_name.
:param node_name: The name of the node on which command gets executed
:param command_to_execute:
:return: return code, stdout, stderr of the command executed
"""
# Pod Name
pod_name = "execute-on-%s" % node_name
# Check if pod exists
command = "kubectl get pods | grep %s" % pod_name
logger.info("Executing command: %s", command)
subp = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = subp.communicate()
ret = subp.returncode
logger.info(
"Command - %s - execution status:\n" "RETCODE: %s\nSTDOUT: %s\nSTDERR: %s\n", command, ret, out, err
)
if ret != 0:
# Spin a new container for the node
container_definition = (
"""
{
"spec": {
"hostPID": true,
"hostNetwork": true,
"nodeSelector": { "kubernetes.io/hostname": "%s" },
"tolerations": [{
"operator": "Exists"
}],
"containers": [
{
"name": "nsenter",
"image": "alexeiled/nsenter:2.34",
"command": [
"/nsenter", "--all", "--target=1", "--", "su", "-"
],
"stdin": true,
"tty": true,
"securityContext": {
"privileged": true
},
"resources": {
"requests": {
"cpu": "10m"
}
}
}
]
}
}"""
% node_name
)
command = "kubectl run %s --restart=Never --image " "overriden --overrides '%s'" % (
pod_name,
container_definition,
)
logger.info("Executing command : %s", command)
subp = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = subp.communicate()
ret = subp.returncode
logger.info(
"Command - %s - execution status:\n" "RETCODE: %s\nSTDOUT: %s\nSTDERR: %s\n", command, ret, out, err
)
if ret != 0:
return ret, out, err
# Execute the command
command = "kubectl exec %s -- %s" % (pod_name, command_to_execute)
logger.info("Executing command: %s", command)
subp = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = subp.communicate()
ret = subp.returncode
logger.info(
"Command - %s - execution status:\n" "RETCODE: %s\nSTDOUT: %s\nSTDERR: %s\n", command, ret, out, err
)
return ret, out, err
| 41.976923 | 116 | 0.560473 |
acf3dffaf01b7a4ff4ca66444b53bd8870914539 | 19,258 | py | Python | nettests/experimental/bridge_reachability/bridget.py | meejah/ooni-probe | f46dc5879da409763718cfa5aa2635ddf5332a54 | [
"BSD-2-Clause-FreeBSD"
] | 24 | 2018-12-23T12:53:57.000Z | 2022-01-19T19:50:53.000Z | ooni/nettests/experimental/bridge_reachability/bridget.py | Acidburn0zzz/ooni-probe | 22720e383093c0d30fd1b6ca44794d3b29031f91 | [
"BSD-2-Clause"
] | 64 | 2018-09-13T08:26:20.000Z | 2022-02-11T03:48:34.000Z | ooni/nettests/experimental/bridge_reachability/bridget.py | Acidburn0zzz/ooni-probe | 22720e383093c0d30fd1b6ca44794d3b29031f91 | [
"BSD-2-Clause"
] | 9 | 2019-06-10T01:41:48.000Z | 2022-01-08T08:46:25.000Z | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# +-----------+
# | BRIDGET |
# | +--------------------------------------------+
# +--------| Use a Tor process to test making a Tor |
# | connection to a list of bridges or relays. |
# +--------------------------------------------+
#
# :authors: Isis Lovecruft, Arturo Filasto
# :licence: see included LICENSE
# :version: 0.1.0-alpha
from __future__ import with_statement
from functools import partial
from random import randint
import os
import sys
from twisted.python import usage
from twisted.internet import defer, error, reactor
from ooni import nettest
from ooni.utils import log, date
from ooni.utils.config import ValueChecker
from ooni.utils.onion import TxtorconImportError
from ooni.utils.onion import PTNoBridgesException, PTNotFoundException
try:
from ooni.utils.onion import parse_data_dir
except:
log.msg("Please go to /ooni/lib and do 'make txtorcon' to run this test!")
class MissingAssetException(Exception):
pass
class RandomPortException(Exception):
"""Raised when using a random port conflicts with configured ports."""
def __init__(self):
log.msg("Unable to use random and specific ports simultaneously")
return sys.exit()
class BridgetArgs(usage.Options):
"""Commandline options."""
allowed = "Port to use for Tor's %s, must be between 1024 and 65535."
sock_check = ValueChecker(allowed % "SocksPort").port_check
ctrl_check = ValueChecker(allowed % "ControlPort").port_check
optParameters = [
['bridges', 'b', None,
'File listing bridge IP:ORPorts to test'],
['relays', 'f', None,
'File listing relay IPs to test'],
['socks', 's', 9049, None, sock_check],
['control', 'c', 9052, None, ctrl_check],
['torpath', 'p', None,
'Path to the Tor binary to use'],
['datadir', 'd', None,
'Tor DataDirectory to use'],
['transport', 't', None,
'Tor ClientTransportPlugin'],
['resume', 'r', 0,
'Resume at this index']]
optFlags = [['random', 'x', 'Use random ControlPort and SocksPort']]
def postOptions(self):
if not self['bridges'] and not self['relays']:
raise MissingAssetException(
"Bridget can't run without bridges or relays to test!")
if self['transport']:
ValueChecker.uid_check(
"Can't run bridget as root with pluggable transports!")
if not self['bridges']:
raise PTNoBridgesException
if self['socks'] or self['control']:
if self['random']:
raise RandomPortException
if self['datadir']:
ValueChecker.dir_check(self['datadir'])
if self['torpath']:
ValueChecker.file_check(self['torpath'])
class BridgetTest(nettest.NetTestCase):
"""
XXX fill me in
:ivar config:
An :class:`ooni.lib.txtorcon.TorConfig` instance.
:ivar relays:
A list of all provided relays to test.
:ivar bridges:
A list of all provided bridges to test.
:ivar socks_port:
Integer for Tor's SocksPort.
:ivar control_port:
Integer for Tor's ControlPort.
:ivar transport:
String defining the Tor's ClientTransportPlugin, for testing
a bridge's pluggable transport functionality.
:ivar tor_binary:
Path to the Tor binary to use, e.g. \'/usr/sbin/tor\'
"""
name = "bridget"
author = "Isis Lovecruft <isis@torproject.org>"
version = "0.1"
description = "Use a Tor process to test connecting to bridges or relays"
usageOptions = BridgetArgs
def setUp(self):
"""
Extra initialization steps. We only want one child Tor process
running, so we need to deal with most of the TorConfig() only once,
before the experiment runs.
"""
self.socks_port = 9049
self.control_port = 9052
self.circuit_timeout = 90
self.tor_binary = '/usr/sbin/tor'
self.data_directory = None
def read_from_file(filename):
log.msg("Loading information from %s ..." % opt)
with open(filename) as fp:
lst = []
for line in fp.readlines():
if line.startswith('#'):
continue
else:
lst.append(line.replace('\n',''))
return lst
def __count_remaining__(which):
total, reach, unreach = map(lambda x: which[x],
['all', 'reachable', 'unreachable'])
count = len(total) - reach() - unreach()
return count
## XXX should we do report['bridges_up'].append(self.bridges['current'])
self.bridges = {}
self.bridges['all'], self.bridges['up'], self.bridges['down'] = \
([] for i in range(3))
self.bridges['reachable'] = lambda: len(self.bridges['up'])
self.bridges['unreachable'] = lambda: len(self.bridges['down'])
self.bridges['remaining'] = lambda: __count_remaining__(self.bridges)
self.bridges['current'] = None
self.bridges['pt_type'] = None
self.bridges['use_pt'] = False
self.relays = {}
self.relays['all'], self.relays['up'], self.relays['down'] = \
([] for i in range(3))
self.relays['reachable'] = lambda: len(self.relays['up'])
self.relays['unreachable'] = lambda: len(self.relays['down'])
self.relays['remaining'] = lambda: __count_remaining__(self.relays)
self.relays['current'] = None
if self.localOptions:
try:
from txtorcon import TorConfig
except ImportError:
raise TxtorconImportError
else:
self.config = TorConfig()
finally:
options = self.localOptions
if options['bridges']:
self.config.UseBridges = 1
self.bridges['all'] = read_from_file(options['bridges'])
if options['relays']:
## first hop must be in TorState().guards
# XXX where is this defined?
self.config.EntryNodes = ','.join(relay_list)
self.relays['all'] = read_from_file(options['relays'])
if options['socks']:
self.socks_port = options['socks']
if options['control']:
self.control_port = options['control']
if options['random']:
log.msg("Using randomized ControlPort and SocksPort ...")
self.socks_port = randint(1024, 2**16)
self.control_port = randint(1024, 2**16)
if options['torpath']:
self.tor_binary = options['torpath']
if options['datadir']:
self.data_directory = parse_data_dir(options['datadir'])
if options['transport']:
## ClientTransportPlugin transport exec pathtobinary [options]
## XXX we need a better way to deal with all PTs
log.msg("Using ClientTransportPlugin %s" % options['transport'])
self.bridges['use_pt'] = True
[self.bridges['pt_type'], pt_exec] = \
options['transport'].split(' ', 1)
if self.bridges['pt_type'] == "obfs2":
self.config.ClientTransportPlugin = \
self.bridges['pt_type'] + " " + pt_exec
else:
raise PTNotFoundException
self.config.SocksPort = self.socks_port
self.config.ControlPort = self.control_port
self.config.CookieAuthentication = 1
def test_bridget(self):
"""
if bridges:
1. configure first bridge line
2a. configure data_dir, if it doesn't exist
2b. write torrc to a tempfile in data_dir
3. start tor } if any of these
4. remove bridges which are public relays } fail, add current
5. SIGHUP for each bridge } bridge to unreach-
} able bridges.
if relays:
1a. configure the data_dir, if it doesn't exist
1b. write torrc to a tempfile in data_dir
2. start tor
3. remove any of our relays which are already part of current
circuits
4a. attach CustomCircuit() to self.state
4b. RELAY_EXTEND for each relay } if this fails, add
} current relay to list
} of unreachable relays
5.
if bridges and relays:
1. configure first bridge line
2a. configure data_dir if it doesn't exist
2b. write torrc to a tempfile in data_dir
3. start tor
4. remove bridges which are public relays
5. remove any of our relays which are already part of current
circuits
6a. attach CustomCircuit() to self.state
6b. for each bridge, build three circuits, with three
relays each
6c. RELAY_EXTEND for each relay } if this fails, add
} current relay to list
} of unreachable relays
:param args:
The :class:`BridgetAsset` line currently being used. Except that it
in Bridget it doesn't, so it should be ignored and avoided.
"""
try:
from ooni.utils import process
from ooni.utils.onion import remove_public_relays, start_tor
from ooni.utils.onion import start_tor_filter_nodes
from ooni.utils.onion import setup_fail, setup_done
from ooni.utils.onion import CustomCircuit
from ooni.utils.timer import deferred_timeout, TimeoutError
from ooni.lib.txtorcon import TorConfig, TorState
except ImportError:
raise TxtorconImportError
except TxtorconImportError, tie:
log.err(tie)
sys.exit()
def reconfigure_done(state, bridges):
"""
Append :ivar:`bridges['current']` to the list
:ivar:`bridges['up'].
"""
log.msg("Reconfiguring with 'Bridge %s' successful"
% bridges['current'])
bridges['up'].append(bridges['current'])
return state
def reconfigure_fail(state, bridges):
"""
Append :ivar:`bridges['current']` to the list
:ivar:`bridges['down'].
"""
log.msg("Reconfiguring TorConfig with parameters %s failed"
% state)
bridges['down'].append(bridges['current'])
return state
@defer.inlineCallbacks
def reconfigure_bridge(state, bridges):
"""
Rewrite the Bridge line in our torrc. If use of pluggable
transports was specified, rewrite the line as:
Bridge <transport_type> <IP>:<ORPort>
Otherwise, rewrite in the standard form:
Bridge <IP>:<ORPort>
:param state:
A fully bootstrapped instance of
:class:`ooni.lib.txtorcon.TorState`.
:param bridges:
A dictionary of bridges containing the following keys:
bridges['remaining'] :: A function returning and int for the
number of remaining bridges to test.
bridges['current'] :: A string containing the <IP>:<ORPort>
of the current bridge.
bridges['use_pt'] :: A boolean, True if we're testing
bridges with a pluggable transport;
False otherwise.
bridges['pt_type'] :: If :ivar:`bridges['use_pt'] is True,
this is a string containing the type
of pluggable transport to test.
:return:
:param:`state`
"""
log.msg("Current Bridge: %s" % bridges['current'])
log.msg("We now have %d bridges remaining to test..."
% bridges['remaining']())
try:
if bridges['use_pt'] is False:
controller_response = yield state.protocol.set_conf(
'Bridge', bridges['current'])
elif bridges['use_pt'] and bridges['pt_type'] is not None:
controller_reponse = yield state.protocol.set_conf(
'Bridge', bridges['pt_type'] +' '+ bridges['current'])
else:
raise PTNotFoundException
if controller_response == 'OK':
finish = yield reconfigure_done(state, bridges)
else:
log.err("SETCONF for %s responded with error:\n %s"
% (bridges['current'], controller_response))
finish = yield reconfigure_fail(state, bridges)
defer.returnValue(finish)
except Exception, e:
log.err("Reconfiguring torrc with Bridge line %s failed:\n%s"
% (bridges['current'], e))
defer.returnValue(None)
def attacher_extend_circuit(attacher, deferred, router):
## XXX todo write me
## state.attacher.extend_circuit
raise NotImplemented
#attacher.extend_circuit
def state_attach(state, path):
log.msg("Setting up custom circuit builder...")
attacher = CustomCircuit(state)
state.set_attacher(attacher, reactor)
state.add_circuit_listener(attacher)
return state
## OLD
#for circ in state.circuits.values():
# for relay in circ.path:
# try:
# relay_list.remove(relay)
# except KeyError:
# continue
## XXX how do we attach to circuits with bridges?
d = defer.Deferred()
attacher.request_circuit_build(d)
return d
def state_attach_fail(state):
log.err("Attaching custom circuit builder failed: %s" % state)
log.msg("Bridget: initiating test ... ") ## Start the experiment
## if we've at least one bridge, and our config has no 'Bridge' line
if self.bridges['remaining']() >= 1 \
and not 'Bridge' in self.config.config:
## configure our first bridge line
self.bridges['current'] = self.bridges['all'][0]
self.config.Bridge = self.bridges['current']
## avoid starting several
self.config.save() ## processes
assert self.config.config.has_key('Bridge'), "No Bridge Line"
## start tor and remove bridges which are public relays
from ooni.utils.onion import start_tor_filter_nodes
state = start_tor_filter_nodes(reactor, self.config,
self.control_port, self.tor_binary,
self.data_directory, self.bridges)
#controller = defer.Deferred()
#controller.addCallback(singleton_semaphore, tor)
#controller.addErrback(setup_fail)
#bootstrap = defer.gatherResults([controller, filter_bridges],
# consumeErrors=True)
if state is not None:
log.debug("state:\n%s" % state)
log.debug("Current callbacks on TorState():\n%s"
% state.callbacks)
## if we've got more bridges
if self.bridges['remaining']() >= 2:
#all = []
for bridge in self.bridges['all'][1:]:
self.bridges['current'] = bridge
#new = defer.Deferred()
#new.addCallback(reconfigure_bridge, state, self.bridges)
#all.append(new)
#check_remaining = defer.DeferredList(all, consumeErrors=True)
#state.chainDeferred(check_remaining)
state.addCallback(reconfigure_bridge, self.bridges)
if self.relays['remaining']() > 0:
while self.relays['remaining']() >= 3:
#path = list(self.relays.pop() for i in range(3))
#log.msg("Trying path %s" % '->'.join(map(lambda node:
# node, path)))
self.relays['current'] = self.relays['all'].pop()
for circ in state.circuits.values():
for node in circ.path:
if node == self.relays['current']:
self.relays['up'].append(self.relays['current'])
if len(circ.path) < 3:
try:
ext = attacher_extend_circuit(state.attacher, circ,
self.relays['current'])
ext.addCallback(attacher_extend_circuit_done,
state.attacher, circ,
self.relays['current'])
except Exception, e:
log.err("Extend circuit failed: %s" % e)
else:
continue
#state.callback(all)
#self.reactor.run()
return state
def disabled_startTest(self, args):
"""
Local override of :meth:`OONITest.startTest` to bypass calling
self.control.
:param args:
The current line of :class:`Asset`, not used but kept for
compatibility reasons.
:return:
A fired deferred which callbacks :meth:`experiment` and
:meth:`OONITest.finished`.
"""
self.start_time = date.now()
self.d = self.experiment(args)
self.d.addErrback(log.err)
self.d.addCallbacks(self.finished, log.err)
return self.d
## ISIS' NOTES
## -----------
## TODO:
## x cleanup documentation
## x add DataDirectory option
## x check if bridges are public relays
## o take bridge_desc file as input, also be able to give same
## format as output
## x Add asynchronous timeout for deferred, so that we don't wait
## o Add assychronous timout for deferred, so that we don't wait
## forever for bridges that don't work.
| 41.593952 | 81 | 0.529442 |
acf3e202292bb337adc65c2ca0b965d368293c72 | 9,488 | py | Python | molyso/mm/cell_detection.py | csachs/molyso | 9ee9eaef5751e6e53c2673b62c94c8ddf5df692e | [
"BSD-2-Clause"
] | 9 | 2016-12-14T11:33:18.000Z | 2021-12-15T01:47:55.000Z | molyso/mm/cell_detection.py | csachs/molyso | 9ee9eaef5751e6e53c2673b62c94c8ddf5df692e | [
"BSD-2-Clause"
] | 4 | 2016-11-16T11:29:26.000Z | 2020-03-23T11:35:53.000Z | molyso/mm/cell_detection.py | csachs/molyso | 9ee9eaef5751e6e53c2673b62c94c8ddf5df692e | [
"BSD-2-Clause"
] | 5 | 2016-05-27T11:13:02.000Z | 2021-07-12T16:26:02.000Z | # -*- coding: utf-8 -*-
"""
documentation
"""
from __future__ import division, unicode_literals, print_function
import numpy as np
from ..generic.otsu import threshold_otsu
from ..generic.signal import hamming_smooth, simple_baseline_correction, find_extrema_and_prominence, \
vertical_mean, threshold_outliers
from ..debugging import DebugPlot
from ..generic.tunable import tunable
class Cell(object):
"""
A Cell.
:param top: coordinate of the 'top' of the cell, in channel coordinates
:param bottom: coordinate of the 'bottom' of the cell, in channel coordinates
:param channel: Channel object the cell belongs to
"""
__slots__ = ['local_top', 'local_bottom', 'channel']
def __init__(self, top, bottom, channel):
self.local_top = float(top)
self.local_bottom = float(bottom)
self.channel = channel
@property
def top(self):
"""
Returns the absolute (on rotated image) coordinate of the cell top.
:return: top
"""
return self.channel.top + self.local_top
@property
def bottom(self):
"""
Returns the absolute (on rotated image) coordinate of the cell bottom.
:return:
"""
return self.channel.top + self.local_bottom
@property
def length(self):
"""
Returns the cell length.
:return: length
"""
return abs(self.top - self.bottom)
@property
def centroid_1d(self):
"""
Returns the (one dimensional) (absolute coordinate on rotated image) centroid.
:return: centroid
:rtype: float
"""
return (self.top + self.bottom) / 2.0
@property
def centroid(self):
"""
Returns the (absolute coordinate on rotated image) centroid (2D).
:return:
:rtype: list
"""
return [self.channel.centroid[0], self.centroid_1d]
@property
def cell_image(self):
"""
The cell image, cropped out of the channel image.
:return: image
:rtype: numpy.ndarray
"""
return self.crop_out_of_channel_image(self.channel.channel_image)
def crop_out_of_channel_image(self, channel_image):
"""
Crops the clel out of a provided image.
Used internally for :py:meth:`Cell.cell_image`, and to crop cells out of fluorescence channel images.
:param channel_image:
:type channel_image: numpy.ndarray
:return: image
:rtype: numpy.ndarray
"""
return channel_image[int(self.local_top):int(self.local_bottom), :]
def __lt__(self, other_cell):
return self.local_top < other_cell.local_top
class Cells(object):
"""
A Cells object, a collection of Cell objects.
"""
__slots__ = ['cells_list', 'channel', 'nearest_tree']
cell_type = Cell
def __init__(self, channel, bootstrap=True):
self.cells_list = []
self.channel = channel
self.nearest_tree = None
if not bootstrap:
return
for b, e in find_cells_in_channel(self.channel.channel_image):
# ... this is the actual minimal size filtering
if self.channel.image.mu_to_pixel(
tunable('cells.minimal_length.in_mu', 1.0,
description="The minimal allowed cell size (Smaller cells will be filtered out).")
) < e - b:
self.cells_list.append(self.__class__.cell_type(b, e, self.channel))
def __len__(self):
return len(self.cells_list)
def __iter__(self):
return iter(self.cells_list)
def clean(self):
"""
Performs clean-up.
"""
pass
@property
def centroids(self):
"""
Returns the centroids of the cells.
:return: centroids
:rtype: list
"""
return [cell.centroid for cell in self.cells_list]
def find_cells_in_channel(image):
method = tunable('cells.detectionmethod', 'classic', description="Cell detection method to use.")
if method == 'classic':
return find_cells_in_channel_classic(image)
else:
raise RuntimeError('Unsupported cell detection method passed.')
def find_cells_in_channel_classic(image):
# processing is as always mainly performed on the intensity profile
"""
:param image:
:return:
"""
profile = vertical_mean(image)
# empty channel detection
thresholded_profile = threshold_outliers(
profile,
tunable('cells.empty_channel.skipping.outlier_times_sigma', 2.0,
description="For empty channel detection, maximum sigma used for thresholding the profile."
)
)
if tunable('cells.empty_channel.skipping', False,
description="For empty channel detection, whether it is enabled."):
# if active, a non-empty channel must have a certain dynamic range min/max
if ((thresholded_profile.max() - thresholded_profile.min()) / thresholded_profile.max()) < \
tunable(
'cells.empty_channel.skipping.intensity_range_quotient',
0.5,
description="For empty channel detection, the minimum relative difference between max and min."):
return []
# for cell detection, another intensity profile based on an Otsu binarization is used as well
binary_image = image > (threshold_otsu(image) *
tunable(
'cells.otsu_bias',
1.0,
description="Bias factor for the cell detection Otsu image."
))
profile_of_binary_image = vertical_mean(binary_image.astype(float))
# the profile is first baseline corrected and smoothed ...
profile = simple_baseline_correction(profile)
profile = hamming_smooth(profile, tunable(
'cells.smoothing.length',
10,
description="Length of smoothing Hamming window for cell detection."))
# the the smoothing steps above seem to subtly change the profile
# in a python2 vs. python3 different way
# thus we round them to get a reproducible workflow
profile = profile.round(8)
# ... then local extrema are searched
extrema = find_extrema_and_prominence(
profile,
order=tunable(
'cells.extrema.order',
15,
description="For cell detection, window width of the local extrema detector."
)
)
# based on the following filter function,
# it will be decided whether a pair of extrema marks a cell or not
# #1# size must be larger than zero
# #2# the cell must have a certain 'blackness' (based on the Otsu binarization)
# #3# the cell must have a certain prominence (difference from background brightness)
# please note, while #1# looks like the minimum size criterion as described in the paper,
# it is just a pre-filter, the actual minimal size filtering is done in the Cells class!
# that way, the cell detection routine here is independent of more mundane aspects like calibration,
# and changes in cell detection routine will still profit from the size-postprocessing
def is_a_cell(last_pos, pos):
"""
:param last_pos:
:param pos:
:return:
"""
return \
pos - last_pos > 2 and \
profile_of_binary_image[last_pos:pos].mean() < \
tunable('cells.filtering.maximum_brightness', 0.5,
description="For cell detection, maximum brightness a cell may have.") and \
extrema.prominence[last_pos:pos].mean() > \
tunable('cells.filtering.minimum_prominence', 10.0,
description="For cell detection, minimum prominence a cell must have.")
# possible positions are constructed, and a cell list is generated by checking them with the is_a_cell function
if tunable('cells.split.use_brightness', 0, description="For cell splitting, use threshold") == 0:
positions = [_pos for _pos in extrema.maxima if extrema.prominence[_pos] > 0] + [profile.size]
else:
positions = [_pos for _pos in extrema.maxima if extrema.prominence[_pos] > 0]
positions = [_pos for _pos in positions if
profile_of_binary_image[_pos] >
tunable('cells.split.minimum_brightness', 0.5,
description="For cell detection, minimum brightness a split point must have.")
]
positions = [0] + positions + [profile.size]
cells = [
[_last_pos + 1, _pos - 1] for _last_pos, _pos in zip([0] + positions, positions)
if is_a_cell(_last_pos, _pos)
]
with DebugPlot('cell_detection', 'channel', 'graph') as p:
p.title("Cell detection")
p.imshow(np.transpose(image), aspect='auto', extent=(0, image.shape[0], 10 * image.shape[1], 0))
p.imshow(np.transpose(binary_image), aspect='auto', extent=(0, image.shape[0], 0, -10 * image.shape[1]))
p.plot(profile)
p.plot(thresholded_profile)
cell_lines = [__pos for __pos in cells for __pos in __pos]
p.vlines(cell_lines,
[image.shape[1] * -10] * len(cell_lines),
[image.shape[1] * 10] * len(cell_lines),
colors='yellow')
return cells
| 33.174825 | 117 | 0.619414 |
acf3e2333e941accdeb12af5fe26981aa0beaf4f | 5,077 | py | Python | lifelogger/events/models.py | radusqrt/lifelogger | 0334464e1a01dd17cc34da94e2f0da91eadde697 | [
"MIT"
] | null | null | null | lifelogger/events/models.py | radusqrt/lifelogger | 0334464e1a01dd17cc34da94e2f0da91eadde697 | [
"MIT"
] | null | null | null | lifelogger/events/models.py | radusqrt/lifelogger | 0334464e1a01dd17cc34da94e2f0da91eadde697 | [
"MIT"
] | null | null | null | from django.db import models
class Sleep(models.Model):
# Keys
sleep_id = models.AutoField(primary_key=True)
# Attributes
start_time = models.DateTimeField()
end_time = models.DateTimeField()
deep_period = models.IntegerField(null=True)
rem_period = models.IntegerField(null=True)
light_period = models.IntegerField(null=True)
awake_period = models.IntegerField(null=True)
def __str__(self):
return "[Sleep: id: {0}, start_time: {1} end_time: {2} deep_period: {3} rem_period: {4} light_period: {5} awake_period: {6}]".format(
self.sleep_id,
self.start_time,
self.end_time,
self.deep_period,
self.rem_period,
self.light_period,
self.awake_period,
)
class SymptomGroup(models.Model):
# Keys
symptom_group_id = models.AutoField(primary_key=True)
# Attributes
target_time = models.DateTimeField()
has_ectopic_beats = models.BooleanField()
has_dizziness = models.BooleanField()
has_chest_pain = models.BooleanField()
has_shortness_of_breath: models.BooleanField()
def __str__(self):
return "[SymptomGroup: id: {0}, target_time: {1}, has_ectopic_beats: {2}, has_dizziness: {3}, has_chest_pain: {4}, has_shortness_of_breath: {5}]".format(
self.symptom_group_id,
self.target_time,
self.has_ectopic_beats,
self.has_dizziness,
self.has_chest_pain,
self.has_shortness_of_breath,
)
class SportType(models.TextChoices):
WORKOUT = "WORKOUT"
CARDIO = "CARDIO"
class Sport(models.Model):
# Keys
sport_id = models.AutoField(primary_key=True)
# Attribute
start_time = models.DateTimeField()
end_time = models.DateTimeField()
type = models.CharField(choices=SportType.choices, max_length=20, null=True)
calories = models.IntegerField(null=True)
def __str__(self):
return "[Sport: id: {0}, start_time: {1}, end_time: {2}, type: {3}, calories: {4}]".format(
self.sport_id, self.start_time, self.end_time, self.type, self.calories
)
class Meta:
constraints = [
models.CheckConstraint(
name="$(app_label)s_%(class)s_type_valid",
check=models.Q(type__in=SportType.values),
)
]
class Recipe(models.Model):
# Keys
recipe_id = models.AutoField(primary_key=True)
# Attributes
ingredients = models.JSONField()
name = models.CharField(max_length=50)
def __str__(self):
return "[Recipe: name: {0}, ingredients: {1}, id: {2}]".format(
self.name, self.ingredients, self.recipe_id
)
class Food(models.Model):
# Keys
food_id = models.AutoField(
primary_key=True,
)
recipe = models.ForeignKey(
Recipe,
on_delete=models.CASCADE,
null=True,
blank=True,
)
start_time = models.DateTimeField(
null=False,
)
# Attributes
ingredients = models.JSONField(null=True, blank=True)
def __str__(self):
return "[Food: id: {0}, recipe: {1}, ingredients: {2}]".format(
self.food_id, self.recipe, self.ingredients
)
class ActivityType(models.TextChoices):
MEDITATION = "MEDITATION"
class Activity(models.Model):
# Keys
activity_id = models.AutoField(primary_key=True)
# Attributes
start_time = models.DateTimeField(null=True)
end_time = models.DateTimeField(null=True)
type = models.CharField(choices=SportType.choices, max_length=20, null=True)
class EventType(models.TextChoices):
SLEEP = "SLEEP"
SYMPTOM_GROUP = "SYMPTOM_GROUP"
SPORT = "SPORT"
FOOD = "FOOD"
ACTIVITY = "ACTIVITY"
class Event(models.Model):
# Keys
event_id = models.AutoField(primary_key=True)
sleep = models.ForeignKey(Sleep, on_delete=models.CASCADE, null=True, blank=True)
symptom_group = models.ForeignKey(
SymptomGroup, on_delete=models.CASCADE, null=True, blank=True
)
sport = models.ForeignKey(Sport, on_delete=models.CASCADE, null=True, blank=True)
food = models.ForeignKey(Food, on_delete=models.CASCADE, null=True, blank=True)
activity = models.ForeignKey(
Activity, on_delete=models.CASCADE, null=True, blank=True
)
# Attributes
creation_time = models.DateTimeField()
type = models.CharField(
choices=EventType.choices,
max_length=20,
)
def __str__(self) -> str:
return "[Event: id: {0}, creation_time: {1}, type: {2}, sleep: {3}, symptom_group: {4}, sport: {5}, food: {6}]".format(
self.event_id,
self.creation_time,
self.type,
self.sleep,
self.symptom_group,
self.sport,
self.food,
)
class Meta:
constraints = [
models.CheckConstraint(
name="$(app_label)s_%(class)s_type_valid",
check=models.Q(type__in=EventType.values),
)
]
| 28.683616 | 161 | 0.628521 |
acf3e290c0dc5c5579016ed92b4a528c93eaed79 | 6,613 | py | Python | tests/usecase/test_access_restrictions.py | diogomatsubara/cf-mendix-buildpack | 48ab483585785e55dc7b94e2a8017fa70f0ef4a9 | [
"Apache-2.0"
] | null | null | null | tests/usecase/test_access_restrictions.py | diogomatsubara/cf-mendix-buildpack | 48ab483585785e55dc7b94e2a8017fa70f0ef4a9 | [
"Apache-2.0"
] | null | null | null | tests/usecase/test_access_restrictions.py | diogomatsubara/cf-mendix-buildpack | 48ab483585785e55dc7b94e2a8017fa70f0ef4a9 | [
"Apache-2.0"
] | 1 | 2019-03-25T07:57:57.000Z | 2019-03-25T07:57:57.000Z | import basetest
import requests
import json
BLOCK_ALL = "/widgets/GoogleMaps/"
BLOCK_ALL_BUT_SUB_PATH_WIDE_OPEN = "/widgets/GoogleMaps/widget/template/"
MY_IP_FILTER = "/widgets/GeoLocationForPhoneGap/"
OTHER_IP_FILTER = "/widgets/ProfileMenu/"
BASIC_AUTH = "/widgets/CameraWidgetForPhoneGap/widget/ui/"
BASIC_AUTH_AND_MY_IP_FILTER = "/styles/sass/lib/buildingblocks/"
BASIC_AUTH_AND_OTHER_IP_FILTER = "/styles/sass/lib/components/ "
BASIC_AUTH_OR_MY_IP_FILTER = "/styles/sass/lib/base/"
BASIC_AUTH_OR_OTHER_IP_FILTER = "/styles/sass/custom/pagetemplates/tablet/"
BLOCK_ALL_RESOURCE = BLOCK_ALL + "GoogleMaps.xml"
BLOCK_ALL_BUT_SUB_PATH_WIDE_OPEN_RESOURCE = (
BLOCK_ALL_BUT_SUB_PATH_WIDE_OPEN + "GoogleMaps.html"
)
MY_IP_FILTER_RESOURCE = MY_IP_FILTER + "GeoLocationForPhoneGap.xml"
OTHER_IP_FILTER_RESOURCE = OTHER_IP_FILTER + "ProfileMenu.js"
BASIC_AUTH_RESOURCE = BASIC_AUTH + "CameraWidgetForPhoneGap.css"
BASIC_AUTH_AND_MY_IP_FILTER_RESOURCE = (
BASIC_AUTH_AND_MY_IP_FILTER + "_wizard.scss"
)
BASIC_AUTH_AND_OTHER_IP_FILTER_RESOURCE = (
BASIC_AUTH_AND_OTHER_IP_FILTER + "_alerts.scss"
)
BASIC_AUTH_OR_MY_IP_FILTER_RESOURCE = BASIC_AUTH_OR_MY_IP_FILTER + "_base.scss"
BASIC_AUTH_OR_OTHER_IP_FILTER_RESOURCE = (
BASIC_AUTH_OR_OTHER_IP_FILTER + "_tablet-page-wizard.scss"
)
class TestCaseAccessRestrictions(basetest.BaseTest):
def setUp(self):
super().setUp()
myips = set()
wide_open_ips = ["0.0.0.0/0", "::/0"]
other_ips = ["1.2.3.4/32", "1::2/128"]
# https://docs.travis-ci.com/user/ip-addresses/
r = requests.get("https://dnsjson.com/nat.travisci.net/A.json")
r.raise_for_status()
for ip in r.json()["results"]["records"]:
myips.add("%s/32" % ip)
try:
myips.add(requests.get("https://myipv6.mendix.com/").text + "/128")
except Exception:
pass
myips = list(myips)
print("my ip ranges are", ",".join(myips))
self.setUpCF(
"sample-6.2.0.mda",
env_vars={
"ACCESS_RESTRICTIONS": json.dumps(
{
BLOCK_ALL: {"ipfilter": []},
BLOCK_ALL_BUT_SUB_PATH_WIDE_OPEN: {
"ipfilter": wide_open_ips
},
MY_IP_FILTER: {"ipfilter": myips},
OTHER_IP_FILTER: {"ipfilter": other_ips},
BASIC_AUTH: {"basic_auth": {"user": "password"}},
BASIC_AUTH_AND_MY_IP_FILTER: {
"ipfilter": myips,
"basic_auth": {"user": "password"},
"satisfy": "all",
},
BASIC_AUTH_AND_OTHER_IP_FILTER: {
"ipfilter": other_ips,
"basic_auth": {"user": "password"},
"satisfy": "all",
},
BASIC_AUTH_OR_MY_IP_FILTER: {
"ipfilter": myips,
"basic_auth": {"user": "password"},
"satisfy": "any",
},
BASIC_AUTH_OR_OTHER_IP_FILTER: {
"ipfilter": other_ips,
"basic_auth": {"user": "password"},
"satisfy": "any",
},
}
)
},
)
self.startApp()
def test_access_is_restricted(self):
auth = ("user", "password")
auth_wrong_user = ("user1", "password")
auth_wrong_pass = ("user", "password1")
auth_wrong_pass2 = ("user", "somethingelse")
success = all(
[
self._httpget(BLOCK_ALL_RESOURCE, 403),
self._httpget(BLOCK_ALL_BUT_SUB_PATH_WIDE_OPEN_RESOURCE, 200),
self._httpget(MY_IP_FILTER_RESOURCE, 200),
self._httpget(OTHER_IP_FILTER_RESOURCE, 403),
self._httpget(OTHER_IP_FILTER_RESOURCE, 403, auth=auth),
self._httpget(BASIC_AUTH_RESOURCE, 200, auth=auth),
self._httpget(BASIC_AUTH_RESOURCE, 401),
self._httpget(BASIC_AUTH_RESOURCE, 401, auth=auth_wrong_user),
self._httpget(BASIC_AUTH_RESOURCE, 401, auth=auth_wrong_pass),
self._httpget(BASIC_AUTH_RESOURCE, 401, auth=auth_wrong_pass2),
self._httpget(
BASIC_AUTH_AND_MY_IP_FILTER_RESOURCE, 200, auth=auth
),
self._httpget(BASIC_AUTH_AND_MY_IP_FILTER_RESOURCE, 401),
self._httpget(
BASIC_AUTH_AND_MY_IP_FILTER_RESOURCE,
401,
auth=auth_wrong_user,
),
self._httpget(
BASIC_AUTH_AND_OTHER_IP_FILTER_RESOURCE, 403, auth=auth
),
self._httpget(
BASIC_AUTH_OR_MY_IP_FILTER_RESOURCE, 200, auth=auth
),
self._httpget(BASIC_AUTH_OR_MY_IP_FILTER_RESOURCE, 200),
self._httpget(
BASIC_AUTH_OR_MY_IP_FILTER_RESOURCE,
200,
auth=auth_wrong_user,
),
self._httpget(
BASIC_AUTH_OR_OTHER_IP_FILTER_RESOURCE, 200, auth=auth
),
self._httpget(
BASIC_AUTH_OR_OTHER_IP_FILTER_RESOURCE,
401,
auth=auth_wrong_user,
),
self._httpget(
BASIC_AUTH_OR_OTHER_IP_FILTER_RESOURCE,
401,
auth=auth_wrong_pass,
),
self._httpget(
BASIC_AUTH_OR_OTHER_IP_FILTER_RESOURCE,
401,
auth=auth_wrong_pass2,
),
self._httpget(BASIC_AUTH_OR_OTHER_IP_FILTER_RESOURCE, 401),
]
)
assert success
def _httpget(self, path, expected_code, auth=None):
r = requests.get("https://" + self.app_name + path, auth=auth)
if r.status_code == expected_code:
print("OK")
else:
print(
"NOK {} expected {} got {} authentication {}".format(
path, expected_code, r.status_code, auth
)
)
return r.status_code == expected_code
| 39.837349 | 79 | 0.535007 |
acf3e2af9f4b83c5b8144150afb52c93511a4f03 | 421 | py | Python | tasks/mws/__init__.py | WildMeOrg/houston | 8102229421388e44234c07ee6cb73bf705b6fba0 | [
"Apache-2.0"
] | 6 | 2021-04-06T19:50:52.000Z | 2022-01-19T17:42:33.000Z | tasks/mws/__init__.py | WildMeOrg/houston | 8102229421388e44234c07ee6cb73bf705b6fba0 | [
"Apache-2.0"
] | 491 | 2021-01-20T01:10:00.000Z | 2022-03-31T19:30:48.000Z | tasks/mws/__init__.py | WildMeOrg/houston | 8102229421388e44234c07ee6cb73bf705b6fba0 | [
"Apache-2.0"
] | 2 | 2021-03-12T02:33:55.000Z | 2021-03-16T20:18:43.000Z | # -*- coding: utf-8 -*-
"""
Application related tasks for Invoke.
"""
from invoke import Collection
from config import get_preliminary_config
from tasks.app import run
from tasks.mws import (
consistency,
initialize,
integrations,
)
namespace = Collection(
consistency,
initialize,
integrations,
run,
)
namespace.configure({'app': {'static_root': get_preliminary_config().STATIC_ROOT}})
| 16.192308 | 83 | 0.707838 |
acf3e2e111ce9c35905dc255da528e3395e288ed | 4,781 | py | Python | src/data/constants.py | Pbatch/ClashRoyaleAI | 56fd030ddbdc08ec4924d2c3ecebec19c1a3810e | [
"MIT"
] | 20 | 2021-10-14T14:29:25.000Z | 2022-03-30T16:13:55.000Z | src/data/constants.py | Pbatch/ClashRoyaleAI | 56fd030ddbdc08ec4924d2c3ecebec19c1a3810e | [
"MIT"
] | 11 | 2021-12-19T18:13:10.000Z | 2022-03-27T20:40:47.000Z | src/data/constants.py | Pbatch/ClashRoyaleAI | 56fd030ddbdc08ec4924d2c3ecebec19c1a3810e | [
"MIT"
] | 2 | 2022-02-11T18:24:39.000Z | 2022-02-12T08:31:12.000Z | import os
"""
Miscellaneous
"""
# Directories
SRC_DIR = os.path.dirname(os.path.dirname(__file__))
DATA_DIR = os.path.join(SRC_DIR, 'data')
SCREENSHOTS_DIR = os.path.join(SRC_DIR, 'screenshots')
"""
Click config
"""
# Display dimensions
DISPLAY_WIDTH = 720
DISPLAY_HEIGHT = 1280
# Screenshot dimensions
SCREENSHOT_WIDTH = 368
SCREENSHOT_HEIGHT = 652
# Screen ID
CHEST_SIZE = 62
CHEST_X = 0
CHEST_Y = 590
OK_X = 143
OK_Y = 558
OK_WIDTH = 82
OK_HEIGHT = 30
SCREEN_CONFIG = {
'lobby': {'bbox': (CHEST_X,
CHEST_Y,
CHEST_X + CHEST_SIZE,
CHEST_Y + CHEST_SIZE),
'click_coordinates': (220, 830)},
'end_of_game': {'bbox': (OK_X,
OK_Y,
OK_X + OK_WIDTH,
OK_Y + OK_HEIGHT),
'click_coordinates': (360, 1125)}
}
# Playable tiles
TILE_HEIGHT = 27.6
TILE_WIDTH = 34
N_HEIGHT_TILES = 15
N_WIDE_TILES = 18
TILE_INIT_X = 52
TILE_INIT_Y = 296
ALLY_TILES = [[x, 0]
for x in range(N_WIDE_TILES // 3, 2 * N_WIDE_TILES // 3)]
ALLY_TILES += [[x, y]
for x in range(N_WIDE_TILES)
for y in range(1, N_HEIGHT_TILES)]
LEFT_PRINCESS_TILES = [[3, N_HEIGHT_TILES], [3, N_HEIGHT_TILES + 1]]
LEFT_PRINCESS_TILES += [[x, y]
for x in range(N_WIDE_TILES // 2)
for y in range(N_HEIGHT_TILES + 2, N_HEIGHT_TILES + 6)]
RIGHT_PRINCESS_TILES = [[14, N_HEIGHT_TILES], [14, N_HEIGHT_TILES + 1]]
RIGHT_PRINCESS_TILES += [[x, y]
for x in range(N_WIDE_TILES // 2, N_WIDE_TILES)
for y in range(N_HEIGHT_TILES + 2, N_HEIGHT_TILES + 6)]
DISPLAY_CARD_Y = 1067
DISPLAY_CARD_INIT_X = 164
DISPLAY_CARD_WIDTH = 117
DISPLAY_CARD_HEIGHT = 147
DISPLAY_CARD_DELTA_X = 136
"""
Detector config
"""
# Cards
HAND_SIZE = 5
DECK_SIZE = 8
CARD_Y = 545
CARD_INIT_X = 87
CARD_WIDTH = 55
CARD_HEIGHT = 65
CARD_DELTA_X = 69
CARD_CONFIG = [
(19, 605, 51, 645),
(CARD_INIT_X, CARD_Y, CARD_INIT_X + CARD_WIDTH, CARD_Y + CARD_HEIGHT),
(CARD_INIT_X + CARD_DELTA_X, CARD_Y, CARD_INIT_X + CARD_WIDTH + CARD_DELTA_X, CARD_Y + CARD_HEIGHT),
(CARD_INIT_X + 2 * CARD_DELTA_X, CARD_Y, CARD_INIT_X + CARD_WIDTH + 2 * CARD_DELTA_X, CARD_Y + CARD_HEIGHT),
(CARD_INIT_X + 3 * CARD_DELTA_X, CARD_Y, CARD_INIT_X + CARD_WIDTH + 3 * CARD_DELTA_X, CARD_Y + CARD_HEIGHT),
]
# Numbers
_W = 28
_H = 7
KING_HP_X = 188
LEFT_PRINCESS_HP_X = 74
RIGHT_PRINCESS_HP_X = 266
ALLY_PRINCESS_HP_Y = 401
ENEMY_PRINCESS_HP_Y = 93
ALLY_KING_LEVEL_Y = 487
ENEMY_KING_LEVEL_Y = 19
KING_LEVEL_X = 134
KING_LEVEL_2_X = KING_LEVEL_X + _W
ELIXIR_BOUNDING_BOX = (100, 628, 350, 643)
NUMBER_CONFIG = [
['enemy_king_level', (KING_LEVEL_X, ENEMY_KING_LEVEL_Y, KING_LEVEL_X + _W, ENEMY_KING_LEVEL_Y + _H)],
['enemy_king_level_2', (KING_LEVEL_2_X, ENEMY_KING_LEVEL_Y, KING_LEVEL_2_X + _W, ENEMY_KING_LEVEL_Y + _H)],
['ally_king_level', (KING_LEVEL_X, ALLY_KING_LEVEL_Y, KING_LEVEL_X + _W, ALLY_KING_LEVEL_Y + _H)],
['ally_king_level_2', (KING_LEVEL_2_X, ALLY_KING_LEVEL_Y, KING_LEVEL_2_X + _W, ALLY_KING_LEVEL_Y + _H)],
['enemy_king_hp', (KING_HP_X, 15, 188 + _W, 15 + _H)],
['ally_king_hp', (KING_HP_X, 495, 188 + _W, 495 + _H)],
['right_ally_princess_hp',
(RIGHT_PRINCESS_HP_X, ALLY_PRINCESS_HP_Y, RIGHT_PRINCESS_HP_X + _W, ALLY_PRINCESS_HP_Y + _H)],
['left_ally_princess_hp',
(LEFT_PRINCESS_HP_X, ALLY_PRINCESS_HP_Y, LEFT_PRINCESS_HP_X + _W, ALLY_PRINCESS_HP_Y + _H)],
['right_enemy_princess_hp',
(RIGHT_PRINCESS_HP_X, ENEMY_PRINCESS_HP_Y, RIGHT_PRINCESS_HP_X + _W, ENEMY_PRINCESS_HP_Y + _H)],
['left_enemy_princess_hp',
(LEFT_PRINCESS_HP_X, ENEMY_PRINCESS_HP_Y, LEFT_PRINCESS_HP_X + _W, ENEMY_PRINCESS_HP_Y + _H)],
]
NUMBER_HEIGHT = 16
NUMBER_WIDTH = 64
NUMBER_MIN_CONFIDENCE = 0.5
# HP
KING_HP = [2400, 2568, 2736, 2904, 3096, 3312, 3528, 3768, 4008, 4392, 4824, 5304, 5832, 6408]
PRINCESS_HP = [1400, 1512, 1624, 1750, 1890, 2030, 2184, 2352, 2534, 2786, 3052, 3346, 3668, 4032]
# Units
UNIT_SIZE = 416
UNIT_Y_START = 0.05
UNIT_Y_END = 0.80
UNITS = [
"ally_archer",
"ally_brawler",
"ally_giant",
"ally_goblin_cage",
"ally_hungry_dragon",
"ally_knight",
"ally_minion",
"ally_minipekka",
"ally_musketeer",
"ally_prince",
"ally_valkyrie",
"enemy_archer",
"enemy_brawler",
"enemy_giant",
"enemy_goblin",
"enemy_goblin_cage",
"enemy_hungry_dragon",
"enemy_knight",
"enemy_minion",
"enemy_minipekka",
"enemy_muskateer",
"enemy_prince",
"enemy_skeleton",
"enemy_spear_goblin",
"enemy_valkyrie"
]
# Multihash coefficients
MULTI_HASH_SCALE = 0.355
MULTI_HASH_INTERCEPT = 163
| 29.88125 | 112 | 0.666597 |
acf3e44555e4959fb6f1011e93033b4d9bc11e41 | 9,785 | py | Python | wcf/records/base.py | InLefter/Air_In_China | 1a6c59a1b2576ad693b20633bd81573ebe1dbcde | [
"MIT"
] | 28 | 2017-03-14T12:46:54.000Z | 2021-09-17T03:02:32.000Z | wcf/records/base.py | InLefter/Air_In_China | 1a6c59a1b2576ad693b20633bd81573ebe1dbcde | [
"MIT"
] | null | null | null | wcf/records/base.py | InLefter/Air_In_China | 1a6c59a1b2576ad693b20633bd81573ebe1dbcde | [
"MIT"
] | 12 | 2017-04-10T16:29:19.000Z | 2022-01-10T15:16:09.000Z | # vim: set ts=4 sw=4 tw=79 fileencoding=utf-8:
# Copyright (c) 2011, Timo Schmid <tschmid@ernw.de>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the ERMW GmbH nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import unicode_literals
from builtins import str, bytes
import struct
import logging
from wcf.datatypes import *
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
class Record(object):
records = dict()
@classmethod
def add_records(cls, records):
"""adds records to the lookup table
:param records: list of Record subclasses
:type records: list(Record)
"""
for r in records:
Record.records[r.type] = r
def __init__(self, type=None):
if type:
self.type = type
def to_bytes(self):
"""
Generates the representing bytes of the record
>>> from wcf.records import *
>>> Record(0xff).to_bytes()
b'\\xff'
>>> ElementRecord('a', 'test').to_bytes()
b'A\\x01a\\x04test'
"""
return bytes(struct.pack(b'<B', self.type))
def __repr__(self):
args = ['type=0x%X' % self.type]
return '<%s(%s)>' % (type(self).__name__, ','.join(args))
@classmethod
def parse(cls, fp):
"""
Parses the binary data from fp into Record objects
:param fp: file like object to read from
:returns: a root Record object with its child Records
:rtype: Record
>>> from wcf.records import *
>>> from io import BytesIO
>>> buf = BytesIO(b'A\\x01a\\x04test\\x01')
>>> r = Record.parse(buf)
>>> r
[<ElementRecord(type=0x41)>]
>>> str(r[0])
'<a:test>'
>>> dump_records(r)
b'A\\x01a\\x04test\\x01'
>>> _ = print_records(r)
<a:test></a:test>
"""
if cls != Record:
return cls()
root = []
records = root
parents = []
last_el = None
type = True
while type:
type = fp.read(1)
if type:
type = struct.unpack(b'<B', type)[0]
if type in Record.records:
log.debug('%s found' % Record.records[type].__name__)
obj = Record.records[type].parse(fp)
if isinstance(obj, EndElementRecord):
if len(parents) > 0:
records = parents.pop()
#records.append(obj)
elif isinstance(obj, Element):
last_el = obj
records.append(obj)
parents.append(records)
obj.childs = []
records = obj.childs
elif isinstance(obj, Attribute) and last_el:
last_el.attributes.append(obj)
else:
records.append(obj)
log.debug('Value: %s' % str(obj))
elif type-1 in Record.records:
log.debug('%s with end element found (0x%x)' %
(Record.records[type-1].__name__, type))
records.append(Record.records[type-1].parse(fp))
#records.append(EndElementRecord())
last_el = None
if len(parents) > 0:
records = parents.pop()
else:
log.warn('type 0x%x not found' % type)
return root
class Element(Record):
pass
class Attribute(Record):
pass
class Text(Record):
pass
class EndElementRecord(Element):
type = 0x01
class CommentRecord(Record):
type = 0x02
def __init__(self, comment, *args, **kwargs):
self.comment = comment
def to_bytes(self):
"""
>>> CommentRecord('test').to_bytes()
b'\\x02\\x04test'
"""
string = Utf8String(self.comment)
return bytes(super(CommentRecord, self).to_bytes() +
string.to_bytes())
def __str__(self):
"""
>>> str(CommentRecord('test'))
'<!-- test -->'
"""
return '<!-- %s -->' % self.comment
@classmethod
def parse(cls, fp):
data = Utf8String.parse(fp).value
return cls(data)
class ArrayRecord(Record):
type = 0x03
datatypes = {
0xB5: ('BoolTextWithEndElement', 1, '?'),
0x8B: ('Int16TextWithEndElement', 2, 'h'),
0x8D: ('Int32TextWithEndElement', 4, 'i'),
0x8F: ('Int64TextWithEndElement', 8, 'q'),
0x91: ('FloatTextWithEndElement', 4, 'f'),
0x93: ('DoubleTextWithEndElement', 8, 'd'),
0x95: ('DecimalTextWithEndElement', 16, ''),
0x97: ('DateTimeTextWithEndElement', 8, ''),
0xAF: ('TimeSpanTextWithEndElement', 8, ''),
0xB1: ('UuidTextWithEndElement', 16, ''),
}
def __init__(self, element, data, attributes):
self.element = element
self.count = len(data)
self.data = data
recordtype = None
for data in self.data:
if recordtype is None:
recordtype = data.type + 1
else:
assert recordtype == data.type + 1
self.recordtype = recordtype
self.attributes = []
def to_bytes(self):
"""
>>> from wcf.records.text import Int32TextRecord
>>> from wcf.records.elements import ShortElementRecord
>>> ArrayRecord(ShortElementRecord('item'), [Int32TextRecord(1), Int32TextRecord(2), Int32TextRecord(3)], []).to_bytes()
b'\\x03@\\x04item\\x01\\x8d\\x03\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x03\\x00\\x00\\x00'
"""
bt = super(ArrayRecord, self).to_bytes()
bt += self.element.to_bytes()
for attrib in self.attributes:
bt += attrib.to_bytes()
bt += EndElementRecord().to_bytes()
bt += bytes(struct.pack(b'<B', self.recordtype))
bt += MultiByteInt31(self.count).to_bytes()
for data in self.data:
bt += data.to_bytes()[1:]
return bytes(bt)
@classmethod
def parse(cls, fp):
"""
>>> from wcf.records import *
>>> from io import BytesIO
>>> buf = BytesIO(b'@\\x04item\\x01\\x8d\\x03\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x03\\x00\\x00\\x00')
>>> r = ArrayRecord.parse(buf)
>>> r
<ArrayRecord(type=0x3)>
>>> r.to_bytes()
b'\\x03@\\x04item\\x01\\x8d\\x03\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x03\\x00\\x00\\x00'
"""
element = struct.unpack(b'<B', fp.read(1))[0]
element = Record.records[element].parse(fp)
attributes = []
while True:
type = struct.unpack(b'<B', fp.read(1))[0]
obj = Record.records[type].parse(fp)
if isinstance(obj, EndElementRecord):
break
elif isinstance(obj, Attribute):
attributes.append(obj)
else:
raise ValueError('unknown type: %s' % hex(type))
recordtype = struct.unpack(b'<B', fp.read(1))[0]
count = MultiByteInt31.parse(fp).value
data = []
for i in range(count):
data.append(Record.records[recordtype-1].parse(fp))
return cls(element, data, attributes)
def __str__(self):
"""
>>> from wcf.records.elements import ShortElementRecord
>>> from wcf.records.text import Int32TextRecord
>>> str(ArrayRecord(ShortElementRecord('item'), [Int32TextRecord(1), Int32TextRecord(2), Int32TextRecord(3)], []))
'<item>1</item><item>2</item><item>3</item>'
"""
string = ''
for data in self.data:
string += str(self.element)
string += str(data)
string += '</%s>' % self.element.name
return string
Record.add_records((EndElementRecord,
CommentRecord,
ArrayRecord,))
| 34.698582 | 129 | 0.549515 |
acf3e44b68bcfa88ca1c7c63f788d396b0bf9f5e | 16,924 | py | Python | pay-api/src/pay_api/services/invoice.py | stevenc987/sbc-pay | 04f02f362f88a30c082b0643583b8d0ebff6063f | [
"Apache-2.0"
] | null | null | null | pay-api/src/pay_api/services/invoice.py | stevenc987/sbc-pay | 04f02f362f88a30c082b0643583b8d0ebff6063f | [
"Apache-2.0"
] | null | null | null | pay-api/src/pay_api/services/invoice.py | stevenc987/sbc-pay | 04f02f362f88a30c082b0643583b8d0ebff6063f | [
"Apache-2.0"
] | null | null | null | # Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Service to manage Invoice."""
from __future__ import annotations
from datetime import datetime
from typing import Dict, List, Tuple
from flask import current_app
from pay_api.exceptions import BusinessException
from pay_api.models import Invoice as InvoiceModel, CfsAccount as CfsAccountModel, PaymentAccount as PaymentAccountModel
from pay_api.models import InvoiceSchema
from pay_api.services.auth import check_auth
from pay_api.utils.constants import ALL_ALLOWED_ROLES
from pay_api.utils.enums import AuthHeaderType, ContentType, InvoiceStatus, PaymentMethod, Code
from pay_api.utils.errors import Error
from pay_api.utils.user_context import user_context
from pay_api.utils.util import generate_transaction_number, get_local_formatted_date
from .code import Code as CodeService
from .oauth_service import OAuthService
class Invoice: # pylint: disable=too-many-instance-attributes,too-many-public-methods
"""Service to manage Invoice related operations."""
def __init__(self):
"""Initialize the service."""
self.__dao = None
self._id: int = None
self._invoice_status_code: str = None
self._payment_account_id: str = None
self._bcol_account: str = None
self._total: float = None
self._paid: float = None
self._refund: float = None
self._payment_date: datetime = None
self._payment_line_items = None
self._corp_type_code = None
self._receipts = None
self._routing_slip: str = None
self._filing_id: str = None
self._folio_number: str = None
self._service_fees: float = None
self._business_identifier: str = None
self._dat_number: str = None
self._cfs_account_id: int
self._payment_method_code: str = None
self._details: Dict = None
@property
def _dao(self):
if not self.__dao:
self.__dao = InvoiceModel()
return self.__dao
@_dao.setter
def _dao(self, value):
self.__dao = value
self.id: int = self._dao.id
self.payment_method_code: int = self._dao.payment_method_code
self.invoice_status_code: str = self._dao.invoice_status_code
self.bcol_account: str = self._dao.bcol_account
self.payment_account_id: str = self._dao.payment_account_id
self.refund: float = self._dao.refund
self.payment_date: datetime = self._dao.payment_date
self.total: float = self._dao.total
self.paid: float = self._dao.paid
self.payment_line_items = self._dao.payment_line_items
self.corp_type_code = self._dao.corp_type_code
self.receipts = self._dao.receipts
self.routing_slip: str = self._dao.routing_slip
self.filing_id: str = self._dao.filing_id
self.folio_number: str = self._dao.folio_number
self.service_fees: float = self._dao.service_fees
self.business_identifier: str = self._dao.business_identifier
self.dat_number: str = self._dao.dat_number
self.cfs_account_id: int = self._dao.cfs_account_id
self.details: Dict = self._dao.details
@property
def id(self):
"""Return the _id."""
return self._id
@id.setter
def id(self, value: int):
"""Set the id."""
self._id = value
self._dao.id = value
@property
def payment_method_code(self):
"""Return the payment_method_code."""
return self._payment_method_code
@payment_method_code.setter
def payment_method_code(self, value: str):
"""Set the payment_method_code."""
self._payment_method_code = value
self._dao.payment_method_code = value
@property
def invoice_status_code(self):
"""Return the invoice_status_code."""
return self._invoice_status_code
@invoice_status_code.setter
def invoice_status_code(self, value: str):
"""Set the invoice_status_code."""
self._invoice_status_code = value
self._dao.invoice_status_code = value
@property
def payment_account_id(self):
"""Return the payment_account_id."""
return self._payment_account_id
@payment_account_id.setter
def payment_account_id(self, value: str):
"""Set the payment_account_id."""
self._payment_account_id = value
self._dao.payment_account_id = value
@property
def bcol_account(self):
"""Return the bcol_account."""
return self._bcol_account
@bcol_account.setter
def bcol_account(self, value: str):
"""Set the bcol_account."""
self._bcol_account = value
self._dao.bcol_account = value
@property
def refund(self):
"""Return the refund."""
return self._refund
@refund.setter
def refund(self, value: float):
"""Set the refund."""
self._refund = value
self._dao.refund = value
@property
def payment_date(self):
"""Return the payment_date."""
return self._payment_date
@payment_date.setter
def payment_date(self, value: datetime):
"""Set the payment_date."""
self._payment_date = value
self._dao.payment_date = value
@property
def total(self):
"""Return the total."""
return self._total
@total.setter
def total(self, value: float):
"""Set the fee_start_date."""
self._total = value
self._dao.total = value
@property
def paid(self):
"""Return the paid."""
return self._paid
@paid.setter
def paid(self, value: float):
"""Set the paid."""
self._paid = value
self._dao.paid = value
@property
def payment_line_items(self):
"""Return the payment payment_line_items."""
return self._payment_line_items
@payment_line_items.setter
def payment_line_items(self, value):
"""Set the payment_line_items."""
self._payment_line_items = value
self._dao.payment_line_items = value
@property
def corp_type_code(self):
"""Return the corp_type_code."""
return self._corp_type_code
@corp_type_code.setter
def corp_type_code(self, value):
"""Set the corp_type_code."""
self._corp_type_code = value
self._dao.corp_type_code = value
@property
def receipts(self):
"""Return the receipts."""
return self._receipts
@receipts.setter
def receipts(self, value):
"""Set the receipts."""
self._receipts = value
self._dao.receipts = value
@property
def routing_slip(self):
"""Return the routing_slip."""
return self._routing_slip
@routing_slip.setter
def routing_slip(self, value: str):
"""Set the routing_slip."""
self._routing_slip = value
self._dao.routing_slip = value
@property
def filing_id(self):
"""Return the filing_id."""
return self._filing_id
@filing_id.setter
def filing_id(self, value: str):
"""Set the filing_id."""
self._filing_id = value
self._dao.filing_id = value
@property
def folio_number(self):
"""Return the folio_number."""
return self._folio_number
@folio_number.setter
def folio_number(self, value: str):
"""Set the folio_number."""
self._folio_number = value
self._dao.folio_number = value
@property
def service_fees(self):
"""Return the service_fees."""
return self._service_fees
@service_fees.setter
def service_fees(self, value: float):
"""Set the service_fees."""
self._service_fees = value
self._dao.service_fees = value
@property
def business_identifier(self):
"""Return the business_identifier."""
return self._business_identifier
@business_identifier.setter
def business_identifier(self, value: int):
"""Set the business_identifier."""
self._business_identifier = value
self._dao.business_identifier = value
@property
def dat_number(self):
"""Return the dat_number."""
return self._dat_number
@dat_number.setter
def dat_number(self, value: str):
"""Set the dat_number."""
self._dat_number = value
self._dao.dat_number = value
@property
def cfs_account_id(self):
"""Return the cfs_account_id."""
return self._cfs_account_id
@cfs_account_id.setter
def cfs_account_id(self, value: int):
"""Set the cfs_account_id."""
self._cfs_account_id = value
self._dao.cfs_account_id = value
@property
def details(self):
"""Return the details."""
return self._details
@details.setter
def details(self, value: str):
"""Set the details."""
self._details = value
self._dao.details = value
def commit(self):
"""Save the information to the DB."""
return self._dao.commit()
def rollback(self):
"""Rollback."""
return self._dao.rollback()
def flush(self):
"""Save the information to the DB."""
return self._dao.flush()
def save(self):
"""Save the information to the DB."""
return self._dao.save()
def asdict(self, include_dynamic_fields: bool = False):
"""Return the invoice as a python dict."""
invoice_schema = InvoiceSchema()
d = invoice_schema.dump(self._dao)
self._add_dynamic_fields(d, include_dynamic_fields)
return d
@staticmethod
def populate(value):
"""Populate invoice service."""
invoice: Invoice = Invoice()
invoice._dao = value # pylint: disable=protected-access
return invoice
@staticmethod
def find_by_id(identifier: int, skip_auth_check: bool = False, one_of_roles=ALL_ALLOWED_ROLES):
"""Find invoice by id."""
invoice_dao = InvoiceModel.find_by_id(identifier)
if not invoice_dao:
raise BusinessException(Error.INVALID_INVOICE_ID)
if not skip_auth_check:
Invoice._check_for_auth(invoice_dao, one_of_roles)
invoice = Invoice()
invoice._dao = invoice_dao # pylint: disable=protected-access
current_app.logger.debug('>find_by_id')
return invoice
@staticmethod
def find_invoices_for_payment(payment_id: int) -> [Invoice]:
"""Find invoices by payment id."""
invoices: [Invoice] = []
invoice_daos: [InvoiceModel] = InvoiceModel.find_invoices_for_payment(payment_id)
for invoice_dao in invoice_daos:
invoice = Invoice()
invoice._dao = invoice_dao # pylint: disable=protected-access
invoices.append(invoice)
current_app.logger.debug('>find_by_id')
return invoices
@staticmethod
@user_context
def create_invoice_pdf(identifier: int, **kwargs) -> Tuple:
"""Find invoice by id."""
invoice_dao: InvoiceModel = InvoiceModel.find_by_id(identifier)
if not invoice_dao:
raise BusinessException(Error.INVALID_INVOICE_ID)
payment_account: PaymentAccountModel = PaymentAccountModel.find_by_id(invoice_dao.payment_account_id)
cfs_account: CfsAccountModel = CfsAccountModel.find_by_id(invoice_dao.cfs_account_id)
org_response = OAuthService.get(
current_app.config.get('AUTH_API_ENDPOINT') + f'orgs/{payment_account.auth_account_id}',
kwargs['user'].bearer_token, AuthHeaderType.BEARER,
ContentType.JSON).json()
org_contact_response = OAuthService.get(
current_app.config.get(
'AUTH_API_ENDPOINT') + f'orgs/{payment_account.auth_account_id}/contacts',
kwargs['user'].bearer_token, AuthHeaderType.BEARER,
ContentType.JSON).json()
org_contact = org_contact_response.get('contacts')[0] if org_contact_response.get('contacts', None) else {}
invoice_number: str = invoice_dao.references[0].invoice_number if invoice_dao.references \
else generate_transaction_number(invoice_dao.id)
filing_types: List[Dict[str, str]] = []
for line_item in invoice_dao.payment_line_items:
business_identifier = invoice_dao.business_identifier \
if not invoice_dao.business_identifier.startswith('T') \
else ''
filing_types.append({
'folioNumber': invoice_dao.folio_number,
'description': line_item.description,
'businessIdentifier': business_identifier,
'createdOn': get_local_formatted_date(invoice_dao.created_on),
'filingTypeCode': line_item.fee_schedule.filing_type_code,
'fee': line_item.total,
'gst': line_item.gst,
'serviceFees': line_item.service_fees,
'total': line_item.total + line_item.service_fees
})
template_vars: Dict[str, any] = {
'invoiceNumber': invoice_number,
'createdOn': get_local_formatted_date(invoice_dao.created_on),
'accountNumber': cfs_account.cfs_account if cfs_account else None,
'total': invoice_dao.total,
'gst': 0,
'serviceFees': invoice_dao.service_fees,
'fees': invoice_dao.total - invoice_dao.service_fees,
'filingTypes': filing_types,
'accountContact': {
'name': org_response.get('name'),
'city': org_contact.get('city', None),
'country': org_contact.get('country', None),
'postalCode': org_contact.get('postalCode', None),
'region': org_contact.get('region', None),
'street': org_contact.get('street', None),
'streetAdditional': org_contact.get('streetAdditional', None)
}
}
invoice_pdf_dict = {
'templateName': 'invoice',
'reportName': invoice_number,
'templateVars': template_vars
}
current_app.logger.info('Invoice PDF Dict %s', invoice_pdf_dict)
pdf_response = OAuthService.post(current_app.config.get('REPORT_API_BASE_URL'),
kwargs['user'].bearer_token, AuthHeaderType.BEARER,
ContentType.JSON, invoice_pdf_dict)
current_app.logger.debug('<OAuthService responded to receipt.py')
return pdf_response, invoice_pdf_dict.get('reportName')
@staticmethod
def _check_for_auth(dao, one_of_roles=ALL_ALLOWED_ROLES):
# Check if user is authorized to perform this action
check_auth(dao.business_identifier, one_of_roles=one_of_roles)
@staticmethod
def _add_dynamic_fields(invoice: Dict[str, any], calculate_dynamic_fields: bool = False) -> Dict[str, any]:
"""Add calculated fields to the schema json."""
if calculate_dynamic_fields:
# Include redirect_for_payment flag
redirect_for_payment: bool = False
action_required_types = (
PaymentMethod.DIRECT_PAY.value,
PaymentMethod.CC.value,
PaymentMethod.ONLINE_BANKING.value
)
if invoice.get('status_code') == InvoiceStatus.CREATED.value and \
invoice.get('payment_method') in action_required_types:
redirect_for_payment = True
invoice['is_payment_action_required'] = redirect_for_payment
# Include is online banking allowed
if invoice.get('payment_method') == PaymentMethod.ONLINE_BANKING.value:
online_banking_allowed = CodeService.find_code_value_by_type_and_code(
Code.CORP_TYPE.value, invoice.get('corp_type_code')
).get('is_online_banking_allowed', True)
if online_banking_allowed: # Check if it's a future effective filing
for line_item in invoice.get('line_items'):
if line_item.get('future_effective_fees', 0) != 0:
online_banking_allowed = False
invoice['is_online_banking_allowed'] = online_banking_allowed
return invoice
| 34.823045 | 120 | 0.643465 |
acf3e4e5fd24ce038d51de187553a3327e8b72d2 | 2,235 | py | Python | docs/versionutils.py | camilledevalk/qiskit-finance | 24b2737dbb66564d02876d2d59e1891fb827346e | [
"Apache-2.0"
] | null | null | null | docs/versionutils.py | camilledevalk/qiskit-finance | 24b2737dbb66564d02876d2d59e1891fb827346e | [
"Apache-2.0"
] | null | null | null | docs/versionutils.py | camilledevalk/qiskit-finance | 24b2737dbb66564d02876d2d59e1891fb827346e | [
"Apache-2.0"
] | 1 | 2022-02-19T12:42:11.000Z | 2022-02-19T12:42:11.000Z | # This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
import os
import re
import subprocess
import tempfile
from functools import partial
from docutils import nodes
from docutils.parsers.rst.directives.tables import Table
from docutils.parsers.rst import Directive, directives
from sphinx.util import logging
logger = logging.getLogger(__name__)
translations_list = [
("en", "English"),
]
default_language = "en"
def setup(app):
app.connect("config-inited", _extend_html_context)
app.add_config_value("content_prefix", "documentation/finance", "")
app.add_config_value("translations", True, "html")
def _extend_html_context(app, config):
context = config.html_context
context["translations"] = config.translations
context["translations_list"] = translations_list
context["current_translation"] = _get_current_translation(config) or config.language
context["translation_url"] = partial(_get_translation_url, config)
context["version_label"] = _get_version_label(config)
def _get_current_translation(config):
language = config.language or default_language
try:
found = next(v for k, v in translations_list if k == language)
except StopIteration:
found = None
return found
def _get_translation_url(config, code, pagename):
base = "/locale/%s" % code if code and code != default_language else ""
return _get_url(config, base, pagename)
def _get_version_label(config):
return "%s" % (_get_current_translation(config) or config.language,)
def _get_url(config, base, pagename):
return _add_content_prefix(config, "%s/%s.html" % (base, pagename))
def _add_content_prefix(config, url):
prefix = ""
if config.content_prefix:
prefix = "/%s" % config.content_prefix
return "%s%s" % (prefix, url)
| 29.407895 | 88 | 0.736913 |
acf3e5bccd40f0b64f13f30e105d6ac2afe2d5e0 | 1,494 | py | Python | test/integration/test_object_package.py | jonaslalin/label-maker | c271189fbfd0f0c198184ef45032e16546e25243 | [
"MIT"
] | 428 | 2018-01-10T19:22:01.000Z | 2022-03-29T06:25:53.000Z | test/integration/test_object_package.py | jonaslalin/label-maker | c271189fbfd0f0c198184ef45032e16546e25243 | [
"MIT"
] | 136 | 2018-01-10T20:25:31.000Z | 2022-01-13T00:48:57.000Z | test/integration/test_object_package.py | jonaslalin/label-maker | c271189fbfd0f0c198184ef45032e16546e25243 | [
"MIT"
] | 105 | 2018-01-10T19:57:50.000Z | 2022-02-26T10:40:41.000Z | """Test that the following CLI command returns the expected outputs
label-maker package -d integration-od -c test/fixtures/integration/config.integration.object_detection.json"""
import unittest
from os import makedirs
from shutil import copyfile, copytree, rmtree
import subprocess
import numpy as np
class TestObjectDetectionPackage(unittest.TestCase):
"""Tests for object detection package creation"""
@classmethod
def setUpClass(cls):
makedirs('integration-od')
copyfile('test/fixtures/integration/labels-od.npz', 'integration-od/labels.npz')
copytree('test/fixtures/integration/tiles', 'integration-od/tiles')
@classmethod
def tearDownClass(cls):
rmtree('integration-od')
def test_cli(self):
"""Verify data.npz produced by CLI"""
cmd = 'label-maker package -d integration-od -c test/fixtures/integration/config.integration.object_detection.json'
cmd = cmd.split(' ')
subprocess.run(cmd, universal_newlines=True)
data = np.load('integration-od/data.npz')
self.assertEqual(np.sum(data['x_train']), 144752757)
self.assertEqual(np.sum(data['x_test']), 52758414)
self.assertEqual(data['x_train'].shape, (6, 256, 256, 3))
self.assertEqual(data['x_test'].shape, (2, 256, 256, 3))
# validate our label data with exact matches in shape
self.assertEqual(data['y_train'].shape, (6, 16, 5))
self.assertEqual(data['y_test'].shape, (2, 16, 5))
| 39.315789 | 123 | 0.690763 |
acf3e68b1bab92f4d5ce7c9a8537b48944ce54fb | 7,492 | py | Python | __pycache__/tiramisu.py | lmycross/segmentation | 9b1f4bfba4c2933d87c49313234da6cfce1d8ad5 | [
"MIT"
] | 6 | 2018-03-16T16:57:14.000Z | 2019-05-02T16:32:06.000Z | models/tiramisu.py | lmycross/segmentation | 9b1f4bfba4c2933d87c49313234da6cfce1d8ad5 | [
"MIT"
] | null | null | null | models/tiramisu.py | lmycross/segmentation | 9b1f4bfba4c2933d87c49313234da6cfce1d8ad5 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
class DenseLayer(nn.Sequential):
def __init__(self, in_channels, growth_rate):
super().__init__()
self.add_module('norm', nn.BatchNorm2d(in_channels))
self.add_module('relu', nn.ReLU(True))
self.add_module('conv', nn.Conv2d(in_channels, growth_rate, kernel_size=3,
stride=1, padding=1, bias=True))
self.add_module('drop', nn.Dropout2d(0.2))
def forward(self, x):
return super().forward(x)
class DenseBlock(nn.Module):
def __init__(self, in_channels, growth_rate, n_layers, upsample=False):
super().__init__()
self.upsample = upsample
self.layers = nn.ModuleList([DenseLayer(
in_channels + i*growth_rate, growth_rate)
for i in range(n_layers)])
def forward(self, x):
if self.upsample:
new_features = []
#we pass all previous activations into each dense layer normally
#But we only store each dense layer's output in the new_features array
for layer in self.layers:
out = layer(x)
x = torch.cat([x, out], 1)
new_features.append(out)
return torch.cat(new_features,1)
else:
for layer in self.layers:
out = layer(x)
x = torch.cat([x, out], 1) # 1 = channel axis
return x
class TransitionDown(nn.Sequential):
def __init__(self, in_channels):
super().__init__()
self.add_module('norm', nn.BatchNorm2d(num_features=in_channels))
self.add_module('relu', nn.ReLU(inplace=True))
self.add_module('conv', nn.Conv2d(in_channels, in_channels,
kernel_size=1, stride=1,
padding=0, bias=True))
self.add_module('drop', nn.Dropout2d(0.2))
self.add_module('maxpool', nn.MaxPool2d(2))
def forward(self, x):
return super().forward(x)
class TransitionUp(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.convTrans = nn.ConvTranspose2d(
in_channels=in_channels, out_channels=out_channels,
kernel_size=3, stride=2, padding=0, bias=True)
def forward(self, x, skip):
out = self.convTrans(x)
out = center_crop(out, skip.size(2), skip.size(3))
out = torch.cat([out, skip], 1)
return out
class Bottleneck(nn.Sequential):
def __init__(self, in_channels, growth_rate, n_layers):
super().__init__()
self.add_module('bottleneck', DenseBlock(
in_channels, growth_rate, n_layers, upsample=True))
def forward(self, x):
return super().forward(x)
def center_crop(layer, max_height, max_width):
_, _, h, w = layer.size()
xy1 = (w - max_width) // 2
xy2 = (h - max_height) // 2
return layer[:, :, xy2:(xy2 + max_height), xy1:(xy1 + max_width)]
class FCDenseNet(nn.Module):
def __init__(self, in_channels=3, down_blocks=(5,5,5,5,5),
up_blocks=(5,5,5,5,5), bottleneck_layers=5,
growth_rate=16, out_chans_first_conv=48, n_classes=12):
super().__init__()
self.down_blocks = down_blocks
self.up_blocks = up_blocks
cur_channels_count = 0
skip_connection_channel_counts = []
## First Convolution ##
self.add_module('firstconv', nn.Conv2d(in_channels=in_channels,
out_channels=out_chans_first_conv, kernel_size=3,
stride=1, padding=1, bias=True))
cur_channels_count = out_chans_first_conv
#####################
# Downsampling path #
#####################
self.denseBlocksDown = nn.ModuleList([])
self.transDownBlocks = nn.ModuleList([])
for i in range(len(down_blocks)):
self.denseBlocksDown.append(
DenseBlock(cur_channels_count, growth_rate, down_blocks[i]))
cur_channels_count += (growth_rate*down_blocks[i])
skip_connection_channel_counts.insert(0,cur_channels_count)
self.transDownBlocks.append(TransitionDown(cur_channels_count))
#####################
# Bottleneck #
#####################
self.add_module('bottleneck',Bottleneck(cur_channels_count,
growth_rate, bottleneck_layers))
prev_block_channels = growth_rate*bottleneck_layers
cur_channels_count += prev_block_channels
#######################
# Upsampling path #
#######################
self.transUpBlocks = nn.ModuleList([])
self.denseBlocksUp = nn.ModuleList([])
for i in range(len(up_blocks)-1):
self.transUpBlocks.append(TransitionUp(prev_block_channels, prev_block_channels))
cur_channels_count = prev_block_channels + skip_connection_channel_counts[i]
self.denseBlocksUp.append(DenseBlock(
cur_channels_count, growth_rate, up_blocks[i],
upsample=True))
prev_block_channels = growth_rate*up_blocks[i]
## Final DenseBlock ##
self.transUpBlocks.append(TransitionUp(
prev_block_channels, prev_block_channels))
cur_channels_count = prev_block_channels + skip_connection_channel_counts[-1]
self.denseBlocksUp.append(DenseBlock(
cur_channels_count, growth_rate, up_blocks[-1],
upsample=False))
cur_channels_count += growth_rate*up_blocks[-1]
## Softmax ##
self.finalConv = nn.Conv2d(in_channels=cur_channels_count,
out_channels=n_classes, kernel_size=1, stride=1,
padding=0, bias=True)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_uniform(m.weight, mode='fan_out')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant(m.weight, 1)
nn.init.constant(m.bias, 0)
elif isinstance(m, nn.ConvTranspose2d):
nn.init.kaiming_uniform(m.weight, mode='fan_out')
def forward(self, x):
out = self.firstconv(x)
skip_connections = []
for i in range(len(self.down_blocks)):
out = self.denseBlocksDown[i](out)
skip_connections.append(out)
out = self.transDownBlocks[i](out)
out = self.bottleneck(out)
for i in range(len(self.up_blocks)):
skip = skip_connections.pop()
out = self.transUpBlocks[i](out, skip)
out = self.denseBlocksUp[i](out)
out = self.finalConv(out)
return out
def FCDenseNet57(n_classes):
return FCDenseNet(
in_channels=3, down_blocks=(4, 4, 4, 4, 4),
up_blocks=(4, 4, 4, 4, 4), bottleneck_layers=4,
growth_rate=12, out_chans_first_conv=48, n_classes=n_classes)
def FCDenseNet67(n_classes):
return FCDenseNet(
in_channels=3, down_blocks=(5, 5, 5, 5, 5),
up_blocks=(5, 5, 5, 5, 5), bottleneck_layers=5,
growth_rate=16, out_chans_first_conv=48, n_classes=n_classes)
def FCDenseNet103(n_classes):
return FCDenseNet(
in_channels=3, down_blocks=(4,5,7,10,12),
up_blocks=(12,10,7,5,4), bottleneck_layers=15,
growth_rate=16, out_chans_first_conv=48, n_classes=n_classes)
| 36.193237 | 93 | 0.593833 |
acf3e705b979068bb1275027be710094dc951d2a | 12,669 | py | Python | utils.py | hugo-oliveira/CoDAGANs | e9980f27f829a67a634f83331bb070de24a4be57 | [
"MIT"
] | 11 | 2019-03-11T13:53:03.000Z | 2021-09-27T13:34:05.000Z | utils.py | hugo-oliveira/CoDAGANs | e9980f27f829a67a634f83331bb070de24a4be57 | [
"MIT"
] | null | null | null | utils.py | hugo-oliveira/CoDAGANs | e9980f27f829a67a634f83331bb070de24a4be57 | [
"MIT"
] | 3 | 2019-01-22T01:54:38.000Z | 2020-12-08T18:18:21.000Z | """
Copyright (C) 2018 NVIDIA Corporation. All rights reserved.
Licensed under the CC BY-NC-SA 4.0 license (https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode).
"""
from torch.utils.serialization import load_lua
from torch.utils.data import DataLoader
from networks import Vgg16
from torch.autograd import Variable
from torch import nn
import torch.nn.functional as F
from torch.optim import lr_scheduler
from torchvision import transforms
from data import ImageFilelist, ImageFolder
import torch
import os
import math
import torchvision.utils as vutils
import yaml
import numpy as np
import torch.nn.init as init
# Methods
# get_all_data_loaders : Primary data loader interface (load trainA, trainB, ..., testA, testB, ...).
# get_data_loader_list : List-based data loader.
# get_data_loader_folder : Folder-based data loader.
# get_config : Load yaml file.
# eformat :
# write_2images : Save output image.
# prepare_sub_folder : Create checkpoints and images folders for saving outputs.
# write_one_row_html : Write one row of the html file for output images.
# write_html : Create the html file.
# write_loss
# slerp
# get_slerp_interp
# get_model_list
# load_vgg16
# vgg_preprocess
# get_scheduler
# weights_init
# jaccard : Computing jaccard for two inputs.
# norm : Normalizes images to the interval [0, 1] for output.
def get_all_data_loaders(conf, n_datasets, samples, augmentation, trim):
batch_size = conf['batch_size']
num_workers = conf['num_workers']
dataset_letters = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I']
train_loader_list = list()
test_loader_list = list()
for i in range(n_datasets):
train_loader = get_data_loader_folder(os.path.join(conf['data_root']), 'train', dataset_letters[i], batch_size, True, trim, num_workers, sample=samples[i], random_transform=augmentation[i], channels=conf['input_dim'])
test_loader = get_data_loader_folder(os.path.join(conf['data_root']), 'test', dataset_letters[i], 1, True, trim, num_workers, sample=1.0, return_path=True, random_transform=0, channels=conf['input_dim'])
train_loader_list.append(train_loader)
test_loader_list.append(test_loader)
return train_loader_list, test_loader_list
def get_data_loader_folder(input_folder, fold, dataset_letter, batch_size, shuffle, trim, num_workers=4, sample=-1, return_path=False, random_transform=False, channels=1):
dataset = ImageFolder(input_folder, sample=sample, fold=fold, dataset_letter=dataset_letter, trim_bool=trim, return_path=return_path, random_transform=random_transform, channels=channels)
loader = DataLoader(dataset=dataset, batch_size=batch_size, shuffle=shuffle, drop_last=True, num_workers=num_workers)
return loader
def get_config(config):
with open(config, 'r') as stream:
return yaml.load(stream)
def eformat(f, prec):
s = "%.*e"%(prec, f)
mantissa, exp = s.split('e')
# Add 1 to digits as 1 is taken by sign +/-.
return "%se%d"%(mantissa, int(exp))
def __write_images(image_outputs, display_image_num, file_name):
image_outputs = [images.expand(-1, 3, -1, -1) for images in image_outputs] # Expand gray-scale images to 3 channels.
image_tensor = torch.cat([images[:display_image_num] for images in image_outputs], 0)
image_grid = vutils.make_grid(image_tensor.data, nrow=display_image_num, padding=0, normalize=True)
vutils.save_image(image_grid, file_name, nrow=1)
def write_2images(image_outputs, display_image_num, image_directory, postfix):
n = len(image_outputs)
__write_images(image_outputs[0:n//2], display_image_num, '%s/gen_a2b_%s.jpg' % (image_directory, postfix))
__write_images(image_outputs[n//2:n], display_image_num, '%s/gen_b2a_%s.jpg' % (image_directory, postfix))
def prepare_sub_folder(output_directory):
image_directory = os.path.join(output_directory, 'images')
if not os.path.exists(image_directory):
print("Creating directory: {}".format(image_directory))
os.makedirs(image_directory)
if not os.path.exists(os.path.join(image_directory, 'originals')):
print("Creating directory: {}".format(os.path.join(image_directory, 'originals')))
os.makedirs(os.path.join(image_directory, 'originals'))
if not os.path.exists(os.path.join(image_directory, 'labels')):
print("Creating directory: {}".format(os.path.join(image_directory, 'labels')))
os.makedirs(os.path.join(image_directory, 'labels'))
if not os.path.exists(os.path.join(image_directory, 'predictions')):
print("Creating directory: {}".format(os.path.join(image_directory, 'predictions')))
os.makedirs(os.path.join(image_directory, 'predictions'))
checkpoint_directory = os.path.join(output_directory, 'checkpoints')
if not os.path.exists(checkpoint_directory):
print("Creating directory: {}".format(checkpoint_directory))
os.makedirs(checkpoint_directory)
return checkpoint_directory, image_directory
def write_one_row_html(html_file, iterations, img_filename, all_size):
html_file.write("<h3>iteration [%d] (%s)</h3>" % (iterations,img_filename.split('/')[-1]))
html_file.write("""
<p><a href="%s">
<img src="%s" style="width:%dpx">
</a><br>
<p>
""" % (img_filename, img_filename, all_size))
return
def write_html(filename, iterations, image_save_iterations, image_directory, all_size=1536):
html_file = open(filename, "w")
html_file.write('''
<!DOCTYPE html>
<html>
<head>
<title>Experiment name = %s</title>
<meta http-equiv="refresh" content="30">
</head>
<body>
''' % os.path.basename(filename))
html_file.write("<h3>current</h3>")
write_one_row_html(html_file, iterations, '%s/gen_a2b_train_current.jpg' % (image_directory), all_size)
write_one_row_html(html_file, iterations, '%s/gen_b2a_train_current.jpg' % (image_directory), all_size)
for j in range(iterations, image_save_iterations-1, -1):
if j % image_save_iterations == 0:
write_one_row_html(html_file, j, '%s/gen_a2b_test_%08d.jpg' % (image_directory, j), all_size)
write_one_row_html(html_file, j, '%s/gen_b2a_test_%08d.jpg' % (image_directory, j), all_size)
write_one_row_html(html_file, j, '%s/gen_a2b_train_%08d.jpg' % (image_directory, j), all_size)
write_one_row_html(html_file, j, '%s/gen_b2a_train_%08d.jpg' % (image_directory, j), all_size)
html_file.write("</body></html>")
html_file.close()
def write_loss(iterations, trainer, train_writer):
members = [attr for attr in dir(trainer) \
if not callable(getattr(trainer, attr)) and not attr.startswith("__") and ('loss' in attr or 'grad' in attr or 'nwd' in attr)]
for m in members:
train_writer.add_scalar(m, getattr(trainer, m), iterations + 1)
def slerp(val, low, high):
"""
original: Animating Rotation with Quaternion Curves, Ken Shoemake
https://arxiv.org/abs/1609.04468
Code: https://github.com/soumith/dcgan.torch/issues/14, Tom White
"""
omega = np.arccos(np.dot(low / np.linalg.norm(low), high / np.linalg.norm(high)))
so = np.sin(omega)
return np.sin((1.0 - val) * omega) / so * low + np.sin(val * omega) / so * high
def get_slerp_interp(nb_latents, nb_interp, z_dim):
"""
modified from: PyTorch inference for "Progressive Growing of GANs" with CelebA snapshot
https://github.com/ptrblck/prog_gans_pytorch_inference
"""
latent_interps = np.empty(shape=(0, z_dim), dtype=np.float32)
for _ in range(nb_latents):
low = np.random.randn(z_dim)
high = np.random.randn(z_dim) # low + np.random.randn(512) * 0.7.
interp_vals = np.linspace(0, 1, num=nb_interp)
latent_interp = np.array([slerp(v, low, high) for v in interp_vals],
dtype=np.float32)
latent_interps = np.vstack((latent_interps, latent_interp))
return latent_interps[:, :, np.newaxis, np.newaxis]
# Get model list for resume.
def get_model_list(dirname, key):
if os.path.exists(dirname) is False:
return None
gen_models = [os.path.join(dirname, f) for f in os.listdir(dirname) if
os.path.isfile(os.path.join(dirname, f)) and key in f and ".pt" in f]
if gen_models is None:
return None
gen_models.sort()
last_model_name = gen_models[-1]
return last_model_name
def load_vgg16(model_dir):
""" Use the model from https://github.com/abhiskk/fast-neural-style/blob/master/neural_style/utils.py """
if not os.path.exists(model_dir):
os.mkdir(model_dir)
if not os.path.exists(os.path.join(model_dir, 'vgg16.weight')):
if not os.path.exists(os.path.join(model_dir, 'vgg16.t7')):
os.system('wget https://www.dropbox.com/s/76l3rt4kyi3s8x7/vgg16.t7?dl=1 -O ' + os.path.join(model_dir, 'vgg16.t7'))
vgglua = load_lua(os.path.join(model_dir, 'vgg16.t7'))
vgg = Vgg16()
for (src, dst) in zip(vgglua.parameters()[0], vgg.parameters()):
dst.data[:] = src
torch.save(vgg.state_dict(), os.path.join(model_dir, 'vgg16.weight'))
vgg = Vgg16()
vgg.load_state_dict(torch.load(os.path.join(model_dir, 'vgg16.weight')))
return vgg
def vgg_preprocess(batch):
tensortype = type(batch.data)
(r, g, b) = torch.chunk(batch, 3, dim = 1)
batch = torch.cat((b, g, r), dim = 1) # Convert RGB to BGR.
batch = (batch + 1) * 255 * 0.5 # [-1, 1] -> [0, 255].
mean = tensortype(batch.data.size())
mean[:, 0, :, :] = 103.939
mean[:, 1, :, :] = 116.779
mean[:, 2, :, :] = 123.680
batch = batch.sub(Variable(mean)) # Subtract mean.
return batch
def get_scheduler(optimizer, hyperparameters, iterations=-1):
if 'lr_policy' not in hyperparameters or hyperparameters['lr_policy'] == 'constant':
scheduler = None # Constant scheduler.
elif hyperparameters['lr_policy'] == 'step':
scheduler = lr_scheduler.StepLR(optimizer, step_size=hyperparameters['step_size'],
gamma=hyperparameters['gamma'], last_epoch=iterations)
else:
return NotImplementedError('learning rate policy [%s] is not implemented', hyperparameters['lr_policy'])
return scheduler
# Weight initialization.
def weights_init(init_type='gaussian'):
def init_fun(m):
classname = m.__class__.__name__
if (classname.find('Conv') == 0 or classname.find('Linear') == 0) and hasattr(m, 'weight'):
# print m.__class__.__name__
if init_type == 'gaussian':
init.normal(m.weight.data, 0.0, 0.02)
elif init_type == 'xavier':
init.xavier_normal(m.weight.data, gain=math.sqrt(2))
elif init_type == 'kaiming':
init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
elif init_type == 'orthogonal':
init.orthogonal(m.weight.data, gain=math.sqrt(2))
elif init_type == 'default':
pass
else:
assert 0, "Unsupported initialization: {}".format(init_type)
if hasattr(m, 'bias') and m.bias is not None:
init.constant(m.bias.data, 0.0)
return init_fun
# Image normalization.
def norm(arr, multichannel = False):
arr = arr.astype(np.float)
if not multichannel:
mn = arr.min()
mx = arr.max()
arr = (arr - mn) / (mx - mn)
else:
tmp = np.zeros((arr.shape[1], arr.shape[2], arr.shape[0]), dtype=np.float32)
for i in range(arr.shape[0]):
mn = arr[i,:,:].min()
mx = arr[i,:,:].max()
tmp[:,:,i] = (arr[i,:,:] - mn) / (mx - mn)
arr = tmp
return arr
# Computing jaccard metric for two inputs.
def jaccard(input1, input2):
input1 = input1.astype(np.bool)
input2 = input2.astype(np.bool)
smpInt = input1 & input2
smpUni = input1 | input2
cntInt = np.count_nonzero(smpInt)
cntUni = np.count_nonzero(smpUni)
if cntUni == 0:
return 0.0
else:
return (float(cntInt) / float(cntUni))
class CrossEntropyLoss2d(nn.Module):
def __init__(self, weight=None, size_average=True, ignore_index=-1):
super(CrossEntropyLoss2d, self).__init__()
self.nll_loss = nn.NLLLoss2d(weight, size_average, ignore_index)
def forward(self, inputs, targets):
#print(inputs.shape)
return self.nll_loss(F.log_softmax(inputs, dim = 1), targets)
| 38.981538 | 225 | 0.66272 |
acf3e74b0bb737d08f38a789510f83d8470dd2b0 | 4,373 | py | Python | tests/st/ops/gpu/test_reshape_op.py | PowerOlive/mindspore | bda20724a94113cedd12c3ed9083141012da1f15 | [
"Apache-2.0"
] | 3,200 | 2020-02-17T12:45:41.000Z | 2022-03-31T20:21:16.000Z | tests/st/ops/gpu/test_reshape_op.py | zimo-geek/mindspore | 665ec683d4af85c71b2a1f0d6829356f2bc0e1ff | [
"Apache-2.0"
] | 176 | 2020-02-12T02:52:11.000Z | 2022-03-28T22:15:55.000Z | tests/st/ops/gpu/test_reshape_op.py | zimo-geek/mindspore | 665ec683d4af85c71b2a1f0d6829356f2bc0e1ff | [
"Apache-2.0"
] | 621 | 2020-03-09T01:31:41.000Z | 2022-03-30T03:43:19.000Z | # Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
from mindspore import Tensor
from mindspore.ops import operations as P
def reshape(nptype):
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
reshape_op = P.Reshape()
data = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]).astype(nptype)
input_tensor = Tensor(np.array(data))
new_shape = (2, 6)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
new_shape = (6, 2)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
new_shape = (3, 4)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
new_shape = (4, 3)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
new_shape = (1, 12)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
new_shape = (12, 1)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
def reshape_bool():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
reshape_op = P.Reshape()
data = np.array([True, True, False, True, False, False, True, False, False, False, False, False])
input_tensor = Tensor(np.array(data))
new_shape = (2, 6)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
new_shape = (6, 2)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
new_shape = (3, 4)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
new_shape = (4, 3)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
new_shape = (1, 12)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
new_shape = (12, 1)
output_tensor = reshape_op(input_tensor, new_shape)
assert new_shape == output_tensor.shape
np.testing.assert_array_equal(output_tensor.asnumpy().flatten(), data)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_reshape_float():
reshape(np.float32)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_reshape_float16():
reshape(np.float16)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_reshape_int32():
reshape(np.int32)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_reshape_uint8():
reshape(np.uint8)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_reshape_bool():
reshape_bool()
| 34.433071 | 101 | 0.732449 |
acf3e76166e99b7378b708b3ae4cfecdffc3c7f3 | 417 | py | Python | example/sensor.py | OlaWod/SmartBin | 1b3db59a118e9526f137abed6594bf12c720c34b | [
"MIT"
] | null | null | null | example/sensor.py | OlaWod/SmartBin | 1b3db59a118e9526f137abed6594bf12c720c34b | [
"MIT"
] | null | null | null | example/sensor.py | OlaWod/SmartBin | 1b3db59a118e9526f137abed6594bf12c720c34b | [
"MIT"
] | null | null | null | import RPi.GPIO as GPIO
class Sensor(object):
def __init__(self, port):
self.port = port
GPIO.setmode(GPIO.BCM)
GPIO.setup(self.port, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def hear_sound(self):
return GPIO.input(self.port)==0
if __name__=='__main__':
sensor = Sensor(17)
while True:
if sensor.hear_sound():
print('sound detected')
break | 23.166667 | 64 | 0.606715 |
acf3e81c8bfd50a24b2f663e7c307f649ec203d8 | 4,128 | py | Python | Browse.py | stripti/HumanIden | 25d1c7121cc58c647c010063530e28dbd363c15c | [
"MIT"
] | 1 | 2018-06-26T08:04:10.000Z | 2018-06-26T08:04:10.000Z | Browse.py | stripti/HumanIden | 25d1c7121cc58c647c010063530e28dbd363c15c | [
"MIT"
] | null | null | null | Browse.py | stripti/HumanIden | 25d1c7121cc58c647c010063530e28dbd363c15c | [
"MIT"
] | null | null | null | from tkinter import *
from tkinter.filedialog import askopenfilename
import os
import cv2
import tkinter as tk
import csv
def storage(nme, a):
myfile = open(str(a)+".csv", 'w')
newdata = nme + ","
myfile.write("\n")
myfile.write(str(newdata))
myfile.close()
class Brow(tk.Toplevel):
def __init__(self, parent):
tk.Toplevel.__init__(self, parent)
font10 = "-family {Viner Hand ITC} -size 15 -weight bold " \
"-slant italic -underline 0 -overstrike 0"
font12 = "-family Terminal -size 12 -weight normal -slant " \
"roman -underline 0 -overstrike 0"
font13 = "-family {Lucida Handwriting} -size 14 -weight normal" \
" -slant roman -underline 0 -overstrike 0"
self.geometry("600x450+439+128")
self.title("Browse file")
self.configure(background="#fed2b1")
self.Label1 = Label(self)
self.Label1.place(relx=0.13, rely=0.09, height=51, width=444)
self.Label1.configure(background="#fed2b1")
self.Label1.configure(disabledforeground="#a3a3a3")
self.Label1.configure(font=font10)
self.Label1.configure(foreground="#6c0000")
self.Label1.configure(text='''Choose the video file for monitoring..''')
self.Label1.configure(width=444)
self.Button2 = Button(self,command=self.track)
self.Button2.place(relx=0.18, rely=0.47, height=44, width=357)
self.Button2.configure(activebackground="#d9d9d9")
self.Button2.configure(activeforeground="#000000")
self.Button2.configure(background="#620702")
self.Button2.configure(disabledforeground="#a3a3a3")
self.Button2.configure(font=font13)
self.Button2.configure(foreground="#ffffff")
self.Button2.configure(highlightbackground="#d9d9d9")
self.Button2.configure(highlightcolor="black")
self.Button2.configure(pady="0")
self.Button2.configure(relief=GROOVE)
self.Button2.configure(text='''**Browse and track**''')
self.Button2.configure(width=357)
global a
def browse(self):
root = Tk()
root.withdraw()
currdir = os.getcwd()
a = askopenfilename(parent=root, initialdir=currdir, filetypes=(("Video File", "*.mp4,*.avi"), ("All Files", "*.*")),
title='Please select a directory')
return a
def track(self):
a = self.browse()
faceDetect = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
cam = cv2.VideoCapture(a)
rec = cv2.face.LBPHFaceRecognizer_create()
rec.read("recognizer/trainingData.yaml")
id1 = 0
font = cv2.FONT_HERSHEY_SCRIPT_SIMPLEX
while (True):
(grabbed, img) = cam.read()
if grabbed:# image is a color image so we need to convert to classifier
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = faceDetect.detectMultiScale(gray, 1.3,
5) # detects all faces in current frame and return coordinates of face
for (x, y, w, h) in faces:
cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 0), 2)
id1, conf = rec.predict(gray[y:y + h, x:x + w])
fields = ['id', 'name']
with open('datasetinfo.csv', 'r') as f:
d_reader = csv.DictReader(f, fieldnames=fields)
for line in d_reader:
if line['id'] == str(id1):
nme = line['name']
storage(nme,a)
cv2.putText(img, nme, (x, y + h), font, 2, (0, 0, 255), 2)
cv2.imshow("face", img)
if (cv2.waitKey(50) == ord('q')):
break
cam.release()
cv2.destroyAllWindows()
class App(tk.Tk):
def __init__(self):
tk.Tk.__init__(self)
self.withdraw()
Brow(self)
if __name__ == "__main__":
app = App()
app.mainloop()
| 36.530973 | 125 | 0.565165 |
acf3e81fc6c1aeca4b1008ada60a4da7cd42c8cb | 1,549 | py | Python | scenedetect.py | twostarxx/PySceneDetect | 92c836cfbef0cf475cf75b8be45dab8d5bd07dd8 | [
"BSD-3-Clause"
] | 1 | 2021-07-22T14:27:39.000Z | 2021-07-22T14:27:39.000Z | scenedetect.py | twostarxx/PySceneDetect | 92c836cfbef0cf475cf75b8be45dab8d5bd07dd8 | [
"BSD-3-Clause"
] | null | null | null | scenedetect.py | twostarxx/PySceneDetect | 92c836cfbef0cf475cf75b8be45dab8d5bd07dd8 | [
"BSD-3-Clause"
] | 1 | 2021-05-06T03:58:07.000Z | 2021-05-06T03:58:07.000Z |
#
# PySceneDetect: Python-Based Video Scene Detector
# ---------------------------------------------------------------
# [ Site: http://www.bcastell.com/projects/PySceneDetect/ ]
# [ Github: https://github.com/Breakthrough/PySceneDetect/ ]
# [ Documentation: http://pyscenedetect.readthedocs.org/ ]
#
# This is a convenience/backwards-compatibility script, and simply provides an
# alternative to running PySceneDetect from source (in addition to the standard
# python -m scenedetect).
#
# Copyright (C) 2014-2019 Brandon Castellano <http://www.bcastell.com>.
#
# PySceneDetect is licensed under the BSD 3-Clause License; see the
# included LICENSE file or visit one of the following pages for details:
# - http://www.bcastell.com/projects/PySceneDetect/
# - https://github.com/Breakthrough/PySceneDetect/
#
# This software uses Numpy and OpenCV; see the LICENSE-NUMPY and
# LICENSE-OPENCV files or visit one of above URLs for details.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
if __name__ == "__main__":
# pylint: disable=no-name-in-module
from scenedetect.__main__ import main
main()
| 43.027778 | 79 | 0.711427 |
acf3e823f86321271e6aad5255476881b0eb7d58 | 1,740 | py | Python | src/02-sensors/dht_sensor.py | davidalexisnyt/micropython-workshop | 8ea69a57080ddefca4ba7bf169a99e930e2bd846 | [
"MIT"
] | 3 | 2020-08-03T16:54:01.000Z | 2020-08-26T02:38:39.000Z | src/02-sensors/dht_sensor.py | davidalexisnyt/micropython-workshop | 8ea69a57080ddefca4ba7bf169a99e930e2bd846 | [
"MIT"
] | null | null | null | src/02-sensors/dht_sensor.py | davidalexisnyt/micropython-workshop | 8ea69a57080ddefca4ba7bf169a99e930e2bd846 | [
"MIT"
] | 2 | 2020-07-31T20:23:30.000Z | 2020-08-03T22:16:35.000Z | """
--------------------------------------------------------------------------------------
dhtSensor.py
--------------------------------------------------------------------------------------
This script shows hot to use the DHT11/22 digital humidity and temperature sensors.
These sensors cost around $3 or $4.
The sensor provides temperature values in Celcius, which can then be easily converted
to Fahrenheit if needed. The sensors can only read measurements every 2 seconds, so
a delay needs to be inserted between readings.
Author: David Alexis (2019)
--------------------------------------------------------------------------------------
"""
from machine import Pin
from time import sleep
import dht
SENSOR_PIN = 5
def main():
sensor = dht.DHT22(Pin(SENSOR_PIN))
# sensor = dht.DHT11(Pin(SENSOR_PIN)) # <-- Use this line if you have a DHT11 sensor.
while True:
# Get sensor readings
# The measure() method actually samples the temperature and humidity, and stores
# the values internally. The values are then accessed by calling temperature()
# and humidity().
sensor.measure()
temperature = sensor.temperature()
humidity = sensor.humidity()
# Temperature is returned in Celcius. Let's convert to Fahrenheit.
temperatureF = (temperature * 1.8 + 32)
reading = {
"temperature_F": int(temperatureF),
"humidity": int(humidity)
}
print(reading)
# Wait at least 2 seconds before next reading, since the DHT sensor's measure() methos
# can only be called once every 2 seconds.
sleep(5)
# ----- Program starts here -----
main()
| 31.636364 | 94 | 0.55 |
acf3e8c253c1a8f9db68302a657841b91999f7a6 | 2,238 | py | Python | tests/test_apis_hvac.py | zxdavb/evohome | 2d8ed28cf746170ab03678b66114326a5622a5e9 | [
"Unlicense"
] | 22 | 2018-07-16T12:46:15.000Z | 2019-06-23T22:36:33.000Z | tests/test_apis_hvac.py | zxdavb/evohome | 2d8ed28cf746170ab03678b66114326a5622a5e9 | [
"Unlicense"
] | 41 | 2018-05-11T14:05:32.000Z | 2019-10-07T20:19:22.000Z | tests/test_apis_hvac.py | zxdavb/evohome | 2d8ed28cf746170ab03678b66114326a5622a5e9 | [
"Unlicense"
] | 4 | 2018-07-16T13:08:24.000Z | 2019-02-25T23:36:37.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
"""RAMSES RF - a RAMSES-II protocol decoder & analyser.
Test the Command.put_*, Command.set_* APIs.
"""
from datetime import datetime as dt
from ramses_rf.protocol.command import Command
from ramses_rf.protocol.message import Message
from ramses_rf.protocol.packet import Packet
from tests.common import gwy # noqa: F401
def _test_api_line(gwy, api, pkt_line): # noqa: F811
pkt = Packet.from_port(gwy, dt.now(), pkt_line)
assert str(pkt) == pkt_line[4:]
msg = Message(gwy, pkt)
cmd = api(
msg.dst.id,
src_id=msg.src.id,
**{k: v for k, v in msg.payload.items() if k[:1] != "_"}
)
return pkt, msg, cmd
def _test_api(gwy, api, packets): # noqa: F811 # NOTE: incl. addr_set check
for pkt_line in packets:
pkt, msg, cmd = _test_api_line(gwy, api, pkt_line)
assert cmd == pkt # must have exact same addr set
def test_set_22f7_invert(gwy): # noqa: F811
_test_api(gwy, Command.set_bypass_position, SET_22F7_GOOD)
def test_set_22f7_kwargs():
for pkt, kwargs in SET_22F7_KWARGS.items():
cmd = Command.set_bypass_position("32:155617", src_id="37:171871", **kwargs)
assert str(cmd) == pkt[4:] # must have exact same addr set
SET_22F7_GOOD = (
"... W --- 37:171871 32:155617 --:------ 22F7 003 0000EF", # bypass off
"... W --- 37:171871 32:155617 --:------ 22F7 003 00C8EF", # bypass on
"... W --- 37:171871 32:155617 --:------ 22F7 003 00FFEF", # bypass auto
)
SET_22F7_KWARGS = {
"000 W --- 37:171871 32:155617 --:------ 22F7 003 00FFEF": {"bypass_mode": "auto"},
"000 W --- 37:171871 32:155617 --:------ 22F7 003 0000EF": {"bypass_mode": "off"},
"000 W --- 37:171871 32:155617 --:------ 22F7 003 00C8EF": {"bypass_mode": "on"},
"001 W --- 37:171871 32:155617 --:------ 22F7 003 00FFEF": {
"bypass_position": None
},
"001 W --- 37:171871 32:155617 --:------ 22F7 003 0000EF": {
"bypass_position": 0.0
},
"001 W --- 37:171871 32:155617 --:------ 22F7 003 0064EF": {
"bypass_position": 0.5
},
"001 W --- 37:171871 32:155617 --:------ 22F7 003 00C8EF": {
"bypass_position": 1.0
},
}
| 30.243243 | 88 | 0.597855 |
acf3ea475a83a6cfffb39fa4fb80d04dc4231f98 | 124,011 | py | Python | tests/hwsim/test_sigma_dut.py | b1tninja/hostap | 31ee2992c796c760573ebbfbfc311bb9ab22eb96 | [
"Unlicense"
] | null | null | null | tests/hwsim/test_sigma_dut.py | b1tninja/hostap | 31ee2992c796c760573ebbfbfc311bb9ab22eb96 | [
"Unlicense"
] | null | null | null | tests/hwsim/test_sigma_dut.py | b1tninja/hostap | 31ee2992c796c760573ebbfbfc311bb9ab22eb96 | [
"Unlicense"
] | 1 | 2022-03-25T08:21:36.000Z | 2022-03-25T08:21:36.000Z | # Test cases for sigma_dut
# Copyright (c) 2017, Qualcomm Atheros, Inc.
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import binascii
import logging
logger = logging.getLogger()
import os
import socket
import struct
import subprocess
import threading
import time
import hostapd
from utils import HwsimSkip
from hwsim import HWSimRadio
import hwsim_utils
from test_dpp import check_dpp_capab, update_hapd_config
from test_suite_b import check_suite_b_192_capa, suite_b_as_params, suite_b_192_rsa_ap_params
from test_ap_eap import check_eap_capa
from test_ap_hs20 import hs20_ap_params
def check_sigma_dut():
if not os.path.exists("./sigma_dut"):
raise HwsimSkip("sigma_dut not available")
def to_hex(s):
return binascii.hexlify(s.encode()).decode()
def from_hex(s):
return binascii.unhexlify(s).decode()
def sigma_dut_cmd(cmd, port=9000, timeout=2):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
sock.settimeout(timeout)
addr = ('127.0.0.1', port)
sock.connect(addr)
sock.send(cmd.encode() + b"\r\n")
try:
res = sock.recv(1000).decode()
running = False
done = False
for line in res.splitlines():
if line.startswith("status,RUNNING"):
running = True
elif line.startswith("status,INVALID"):
done = True
elif line.startswith("status,ERROR"):
done = True
elif line.startswith("status,COMPLETE"):
done = True
if running and not done:
# Read the actual response
res = sock.recv(1000).decode()
except:
res = ''
pass
sock.close()
res = res.rstrip()
logger.debug("sigma_dut: '%s' --> '%s'" % (cmd, res))
return res
def sigma_dut_cmd_check(cmd, port=9000, timeout=2):
res = sigma_dut_cmd(cmd, port=port, timeout=timeout)
if "COMPLETE" not in res:
raise Exception("sigma_dut command failed: " + cmd)
return res
def start_sigma_dut(ifname, debug=False, hostapd_logdir=None, cert_path=None,
bridge=None):
check_sigma_dut()
cmd = [ './sigma_dut',
'-M', ifname,
'-S', ifname,
'-F', '../../hostapd/hostapd',
'-G',
'-w', '/var/run/wpa_supplicant/',
'-j', ifname ]
if debug:
cmd += [ '-d' ]
if hostapd_logdir:
cmd += [ '-H', hostapd_logdir ]
if cert_path:
cmd += [ '-C', cert_path ]
if bridge:
cmd += [ '-b', bridge ]
sigma = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
for i in range(20):
try:
res = sigma_dut_cmd("HELLO")
break
except:
time.sleep(0.05)
return sigma
def stop_sigma_dut(sigma):
sigma.terminate()
sigma.wait()
out, err = sigma.communicate()
logger.debug("sigma_dut stdout: " + str(out.decode()))
logger.debug("sigma_dut stderr: " + str(err.decode()))
def sigma_dut_wait_connected(ifname):
for i in range(50):
res = sigma_dut_cmd("sta_is_connected,interface," + ifname)
if "connected,1" in res:
break
time.sleep(0.2)
if i == 49:
raise Exception("Connection did not complete")
def test_sigma_dut_basic(dev, apdev):
"""sigma_dut basic functionality"""
sigma = start_sigma_dut(dev[0].ifname)
res = sigma_dut_cmd("UNKNOWN")
if "status,INVALID,errorCode,Unknown command" not in res:
raise Exception("Unexpected sigma_dut response to unknown command")
tests = [ ("ca_get_version", "status,COMPLETE,version,1.0"),
("device_get_info", "status,COMPLETE,vendor"),
("device_list_interfaces,interfaceType,foo", "status,ERROR"),
("device_list_interfaces,interfaceType,802.11",
"status,COMPLETE,interfaceType,802.11,interfaceID," + dev[0].ifname) ]
for cmd, response in tests:
res = sigma_dut_cmd(cmd)
if response not in res:
raise Exception("Unexpected %s response: %s" % (cmd, res))
stop_sigma_dut(sigma)
def test_sigma_dut_open(dev, apdev):
"""sigma_dut controlled open network association"""
try:
run_sigma_dut_open(dev, apdev)
finally:
dev[0].set("ignore_old_scan_res", "0")
def run_sigma_dut_open(dev, apdev):
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname)
hapd = hostapd.add_ap(apdev[0], { "ssid": "open" })
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_encryption,interface,%s,ssid,%s,encpType,none" % (ifname, "open"))
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s" % (ifname, "open"))
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
def test_sigma_dut_psk_pmf(dev, apdev):
"""sigma_dut controlled PSK+PMF association"""
try:
run_sigma_dut_psk_pmf(dev, apdev)
finally:
dev[0].set("ignore_old_scan_res", "0")
def run_sigma_dut_psk_pmf(dev, apdev):
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname)
ssid = "test-pmf-required"
params = hostapd.wpa2_params(ssid=ssid, passphrase="12345678")
params["wpa_key_mgmt"] = "WPA-PSK-SHA256"
params["ieee80211w"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
sigma_dut_cmd_check("sta_reset_default,interface,%s,prog,PMF" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_psk,interface,%s,ssid,%s,passphrase,%s,encpType,aes-ccmp,keymgmttype,wpa2,PMF,Required" % (ifname, "test-pmf-required", "12345678"))
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s,channel,1" % (ifname, "test-pmf-required"))
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
def test_sigma_dut_psk_pmf_bip_cmac_128(dev, apdev):
"""sigma_dut controlled PSK+PMF association with BIP-CMAC-128"""
try:
run_sigma_dut_psk_pmf_cipher(dev, apdev, "BIP-CMAC-128", "AES-128-CMAC")
finally:
dev[0].set("ignore_old_scan_res", "0")
def test_sigma_dut_psk_pmf_bip_cmac_256(dev, apdev):
"""sigma_dut controlled PSK+PMF association with BIP-CMAC-256"""
try:
run_sigma_dut_psk_pmf_cipher(dev, apdev, "BIP-CMAC-256", "BIP-CMAC-256")
finally:
dev[0].set("ignore_old_scan_res", "0")
def test_sigma_dut_psk_pmf_bip_gmac_128(dev, apdev):
"""sigma_dut controlled PSK+PMF association with BIP-GMAC-128"""
try:
run_sigma_dut_psk_pmf_cipher(dev, apdev, "BIP-GMAC-128", "BIP-GMAC-128")
finally:
dev[0].set("ignore_old_scan_res", "0")
def test_sigma_dut_psk_pmf_bip_gmac_256(dev, apdev):
"""sigma_dut controlled PSK+PMF association with BIP-GMAC-256"""
try:
run_sigma_dut_psk_pmf_cipher(dev, apdev, "BIP-GMAC-256", "BIP-GMAC-256")
finally:
dev[0].set("ignore_old_scan_res", "0")
def test_sigma_dut_psk_pmf_bip_gmac_256_mismatch(dev, apdev):
"""sigma_dut controlled PSK+PMF association with BIP-GMAC-256 mismatch"""
try:
run_sigma_dut_psk_pmf_cipher(dev, apdev, "BIP-GMAC-256", "AES-128-CMAC",
failure=True)
finally:
dev[0].set("ignore_old_scan_res", "0")
def run_sigma_dut_psk_pmf_cipher(dev, apdev, sigma_cipher, hostapd_cipher,
failure=False):
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname)
ssid = "test-pmf-required"
params = hostapd.wpa2_params(ssid=ssid, passphrase="12345678")
params["wpa_key_mgmt"] = "WPA-PSK-SHA256"
params["ieee80211w"] = "2"
params["group_mgmt_cipher"] = hostapd_cipher
hapd = hostapd.add_ap(apdev[0], params)
sigma_dut_cmd_check("sta_reset_default,interface,%s,prog,PMF" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_psk,interface,%s,ssid,%s,passphrase,%s,encpType,aes-ccmp,keymgmttype,wpa2,PMF,Required,GroupMgntCipher,%s" % (ifname, "test-pmf-required", "12345678", sigma_cipher))
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s,channel,1" % (ifname, "test-pmf-required"))
if failure:
ev = dev[0].wait_event(["CTRL-EVENT-NETWORK-NOT-FOUND",
"CTRL-EVENT-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Network selection result not indicated")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected connection")
res = sigma_dut_cmd("sta_is_connected,interface," + ifname)
if "connected,1" in res:
raise Exception("Connection reported")
else:
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
def test_sigma_dut_sae(dev, apdev):
"""sigma_dut controlled SAE association"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname)
ssid = "test-sae"
params = hostapd.wpa2_params(ssid=ssid, passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params["ieee80211w"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
sigma_dut_cmd_check("sta_reset_default,interface,%s" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_security,interface,%s,ssid,%s,passphrase,%s,type,SAE,encpType,aes-ccmp,keymgmttype,wpa2" % (ifname, "test-sae", "12345678"))
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s,channel,1" % (ifname, "test-sae"))
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
if dev[0].get_status_field('sae_group') != '19':
raise Exception("Expected default SAE group not used")
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_security,interface,%s,ssid,%s,passphrase,%s,type,SAE,encpType,aes-ccmp,keymgmttype,wpa2,ECGroupID,20" % (ifname, "test-sae", "12345678"))
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s,channel,1" % (ifname, "test-sae"))
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
if dev[0].get_status_field('sae_group') != '20':
raise Exception("Expected SAE group not used")
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
def test_sigma_dut_sae_password(dev, apdev):
"""sigma_dut controlled SAE association and long password"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname)
try:
ssid = "test-sae"
params = hostapd.wpa2_params(ssid=ssid)
params['sae_password'] = 100*'B'
params['wpa_key_mgmt'] = 'SAE'
params["ieee80211w"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
sigma_dut_cmd_check("sta_reset_default,interface,%s" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_security,interface,%s,ssid,%s,passphrase,%s,type,SAE,encpType,aes-ccmp,keymgmttype,wpa2" % (ifname, "test-sae", 100*'B'))
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s,channel,1" % (ifname, "test-sae"))
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_sta_override_rsne(dev, apdev):
"""sigma_dut and RSNE override on STA"""
try:
run_sigma_dut_sta_override_rsne(dev, apdev)
finally:
dev[0].set("ignore_old_scan_res", "0")
def run_sigma_dut_sta_override_rsne(dev, apdev):
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname)
ssid = "test-psk"
params = hostapd.wpa2_params(ssid=ssid, passphrase="12345678")
hapd = hostapd.add_ap(apdev[0], params)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
tests = [ "30120100000fac040100000fac040100000fac02",
"30140100000fac040100000fac040100000fac02ffff" ]
for test in tests:
sigma_dut_cmd_check("sta_set_security,interface,%s,ssid,%s,type,PSK,passphrase,%s,EncpType,aes-ccmp,KeyMgmtType,wpa2" % (ifname, "test-psk", "12345678"))
sigma_dut_cmd_check("dev_configure_ie,interface,%s,IE_Name,RSNE,Contents,%s" % (ifname, test))
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s,channel,1" % (ifname, "test-psk"))
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
dev[0].dump_monitor()
sigma_dut_cmd_check("sta_set_security,interface,%s,ssid,%s,type,PSK,passphrase,%s,EncpType,aes-ccmp,KeyMgmtType,wpa2" % (ifname, "test-psk", "12345678"))
sigma_dut_cmd_check("dev_configure_ie,interface,%s,IE_Name,RSNE,Contents,300101" % ifname)
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s,channel,1" % (ifname, "test-psk"))
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"])
if ev is None:
raise Exception("Association rejection not reported")
if "status_code=40" not in ev:
raise Exception("Unexpected status code: " + ev)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
def test_sigma_dut_ap_psk(dev, apdev):
"""sigma_dut controlled AP"""
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-psk,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-PSK,PSK,12345678")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("test-psk", psk="12345678", scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_pskhex(dev, apdev, params):
"""sigma_dut controlled AP and PSKHEX"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_pskhex.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
psk = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-psk,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-PSK,PSKHEX," + psk)
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("test-psk", raw_psk=psk, scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_psk_sha256(dev, apdev, params):
"""sigma_dut controlled AP PSK SHA256"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_psk_sha256.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-psk,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-PSK-256,PSK,12345678")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("test-psk", key_mgmt="WPA-PSK-SHA256",
psk="12345678", scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_suite_b(dev, apdev, params):
"""sigma_dut controlled STA Suite B"""
check_suite_b_192_capa(dev)
logdir = params['logdir']
with open("auth_serv/ec2-ca.pem", "r") as f:
with open(os.path.join(logdir, "suite_b_ca.pem"), "w") as f2:
f2.write(f.read())
with open("auth_serv/ec2-user.pem", "r") as f:
with open("auth_serv/ec2-user.key", "r") as f2:
with open(os.path.join(logdir, "suite_b.pem"), "w") as f3:
f3.write(f.read())
f3.write(f2.read())
dev[0].flush_scan_cache()
params = suite_b_as_params()
params['ca_cert'] = 'auth_serv/ec2-ca.pem'
params['server_cert'] = 'auth_serv/ec2-server.pem'
params['private_key'] = 'auth_serv/ec2-server.key'
params['openssl_ciphers'] = 'SUITEB192'
hostapd.add_ap(apdev[1], params)
params = { "ssid": "test-suite-b",
"wpa": "2",
"wpa_key_mgmt": "WPA-EAP-SUITE-B-192",
"rsn_pairwise": "GCMP-256",
"group_mgmt_cipher": "BIP-GMAC-256",
"ieee80211w": "2",
"ieee8021x": "1",
'auth_server_addr': "127.0.0.1",
'auth_server_port': "18129",
'auth_server_shared_secret': "radius",
'nas_identifier': "nas.w1.fi" }
hapd = hostapd.add_ap(apdev[0], params)
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname, cert_path=logdir)
sigma_dut_cmd_check("sta_reset_default,interface,%s,prog,PMF" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_security,type,eaptls,interface,%s,ssid,%s,PairwiseCipher,AES-GCMP-256,GroupCipher,AES-GCMP-256,GroupMgntCipher,BIP-GMAC-256,keymgmttype,SuiteB,clientCertificate,suite_b.pem,trustedRootCA,suite_b_ca.pem,CertType,ECC" % (ifname, "test-suite-b"))
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s,channel,1" % (ifname, "test-suite-b"))
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
def test_sigma_dut_suite_b_rsa(dev, apdev, params):
"""sigma_dut controlled STA Suite B (RSA)"""
check_suite_b_192_capa(dev)
logdir = params['logdir']
with open("auth_serv/rsa3072-ca.pem", "r") as f:
with open(os.path.join(logdir, "suite_b_ca_rsa.pem"), "w") as f2:
f2.write(f.read())
with open("auth_serv/rsa3072-user.pem", "r") as f:
with open("auth_serv/rsa3072-user.key", "r") as f2:
with open(os.path.join(logdir, "suite_b_rsa.pem"), "w") as f3:
f3.write(f.read())
f3.write(f2.read())
dev[0].flush_scan_cache()
params = suite_b_192_rsa_ap_params()
hapd = hostapd.add_ap(apdev[0], params)
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname, cert_path=logdir)
cmd = "sta_set_security,type,eaptls,interface,%s,ssid,%s,PairwiseCipher,AES-GCMP-256,GroupCipher,AES-GCMP-256,GroupMgntCipher,BIP-GMAC-256,keymgmttype,SuiteB,clientCertificate,suite_b_rsa.pem,trustedRootCA,suite_b_ca_rsa.pem,CertType,RSA" % (ifname, "test-suite-b")
tests = [ "",
",TLSCipher,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
",TLSCipher,TLS_DHE_RSA_WITH_AES_256_GCM_SHA384" ]
for extra in tests:
sigma_dut_cmd_check("sta_reset_default,interface,%s,prog,PMF" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check(cmd + extra)
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s,channel,1" % (ifname, "test-suite-b"))
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
def test_sigma_dut_ap_suite_b(dev, apdev, params):
"""sigma_dut controlled AP Suite B"""
check_suite_b_192_capa(dev)
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_suite_b.sigma-hostapd")
params = suite_b_as_params()
params['ca_cert'] = 'auth_serv/ec2-ca.pem'
params['server_cert'] = 'auth_serv/ec2-server.pem'
params['private_key'] = 'auth_serv/ec2-server.key'
params['openssl_ciphers'] = 'SUITEB192'
hostapd.add_ap(apdev[1], params)
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-suite-b,MODE,11ng")
sigma_dut_cmd_check("ap_set_radius,NAME,AP,IPADDR,127.0.0.1,PORT,18129,PASSWORD,radius")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,SuiteB")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
openssl_ciphers="SUITEB192",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec2-ca.pem",
client_cert="auth_serv/ec2-user.pem",
private_key="auth_serv/ec2-user.key",
pairwise="GCMP-256", group="GCMP-256",
scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_cipher_gcmp_128(dev, apdev, params):
"""sigma_dut controlled AP with GCMP-128/BIP-GMAC-128 cipher"""
run_sigma_dut_ap_cipher(dev, apdev, params, "AES-GCMP-128", "BIP-GMAC-128",
"GCMP")
def test_sigma_dut_ap_cipher_gcmp_256(dev, apdev, params):
"""sigma_dut controlled AP with GCMP-256/BIP-GMAC-256 cipher"""
run_sigma_dut_ap_cipher(dev, apdev, params, "AES-GCMP-256", "BIP-GMAC-256",
"GCMP-256")
def test_sigma_dut_ap_cipher_ccmp_128(dev, apdev, params):
"""sigma_dut controlled AP with CCMP-128/BIP-CMAC-128 cipher"""
run_sigma_dut_ap_cipher(dev, apdev, params, "AES-CCMP-128", "BIP-CMAC-128",
"CCMP")
def test_sigma_dut_ap_cipher_ccmp_256(dev, apdev, params):
"""sigma_dut controlled AP with CCMP-256/BIP-CMAC-256 cipher"""
run_sigma_dut_ap_cipher(dev, apdev, params, "AES-CCMP-256", "BIP-CMAC-256",
"CCMP-256")
def test_sigma_dut_ap_cipher_ccmp_gcmp_1(dev, apdev, params):
"""sigma_dut controlled AP with CCMP-128+GCMP-256 ciphers (1)"""
run_sigma_dut_ap_cipher(dev, apdev, params, "AES-CCMP-128 AES-GCMP-256",
"BIP-GMAC-256", "CCMP")
def test_sigma_dut_ap_cipher_ccmp_gcmp_2(dev, apdev, params):
"""sigma_dut controlled AP with CCMP-128+GCMP-256 ciphers (2)"""
run_sigma_dut_ap_cipher(dev, apdev, params, "AES-CCMP-128 AES-GCMP-256",
"BIP-GMAC-256", "GCMP-256", "CCMP")
def test_sigma_dut_ap_cipher_gcmp_256_group_ccmp(dev, apdev, params):
"""sigma_dut controlled AP with GCMP-256/CCMP/BIP-GMAC-256 cipher"""
run_sigma_dut_ap_cipher(dev, apdev, params, "AES-GCMP-256", "BIP-GMAC-256",
"GCMP-256", "CCMP", "AES-CCMP-128")
def run_sigma_dut_ap_cipher(dev, apdev, params, ap_pairwise, ap_group_mgmt,
sta_cipher, sta_cipher_group=None, ap_group=None):
check_suite_b_192_capa(dev)
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_cipher.sigma-hostapd")
params = suite_b_as_params()
params['ca_cert'] = 'auth_serv/ec2-ca.pem'
params['server_cert'] = 'auth_serv/ec2-server.pem'
params['private_key'] = 'auth_serv/ec2-server.key'
params['openssl_ciphers'] = 'SUITEB192'
hostapd.add_ap(apdev[1], params)
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-suite-b,MODE,11ng")
sigma_dut_cmd_check("ap_set_radius,NAME,AP,IPADDR,127.0.0.1,PORT,18129,PASSWORD,radius")
cmd = "ap_set_security,NAME,AP,KEYMGNT,SuiteB,PMF,Required,PairwiseCipher,%s,GroupMgntCipher,%s" % (ap_pairwise, ap_group_mgmt)
if ap_group:
cmd += ",GroupCipher,%s" % ap_group
sigma_dut_cmd_check(cmd)
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
if sta_cipher_group is None:
sta_cipher_group = sta_cipher
dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192",
ieee80211w="2",
openssl_ciphers="SUITEB192",
eap="TLS", identity="tls user",
ca_cert="auth_serv/ec2-ca.pem",
client_cert="auth_serv/ec2-user.pem",
private_key="auth_serv/ec2-user.key",
pairwise=sta_cipher, group=sta_cipher_group,
scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_override_rsne(dev, apdev):
"""sigma_dut controlled AP overriding RSNE"""
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-psk,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-PSK,PSK,12345678")
sigma_dut_cmd_check("dev_configure_ie,NAME,AP,interface,%s,IE_Name,RSNE,Contents,30180100000fac040200ffffffff000fac040100000fac020c00" % iface)
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("test-psk", psk="12345678", scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_sae(dev, apdev, params):
"""sigma_dut controlled AP with SAE"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_sae.sigma-hostapd")
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-sae,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-SAE,PSK,12345678")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", key_mgmt="SAE", psk="12345678",
ieee80211w="2", scan_freq="2412")
if dev[0].get_status_field('sae_group') != '19':
raise Exception("Expected default SAE group not used")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_sae_password(dev, apdev, params):
"""sigma_dut controlled AP with SAE and long password"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_sae_password.sigma-hostapd")
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-sae,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-SAE,PSK," + 100*'C')
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", key_mgmt="SAE", sae_password=100*'C',
ieee80211w="2", scan_freq="2412")
if dev[0].get_status_field('sae_group') != '19':
raise Exception("Expected default SAE group not used")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_sae_group(dev, apdev, params):
"""sigma_dut controlled AP with SAE and specific group"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_sae_group.sigma-hostapd")
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-sae,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-SAE,PSK,12345678,ECGroupID,20")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", key_mgmt="SAE", psk="12345678",
ieee80211w="2", scan_freq="2412")
if dev[0].get_status_field('sae_group') != '20':
raise Exception("Expected SAE group not used")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_psk_sae(dev, apdev, params):
"""sigma_dut controlled AP with PSK+SAE"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_psk_sae.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-sae,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-PSK-SAE,PSK,12345678")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[2].request("SET sae_groups ")
dev[2].connect("test-sae", key_mgmt="SAE", psk="12345678",
scan_freq="2412", ieee80211w="0", wait_connect=False)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", key_mgmt="SAE", psk="12345678",
scan_freq="2412", ieee80211w="2")
dev[1].connect("test-sae", psk="12345678", scan_freq="2412")
ev = dev[2].wait_event(["CTRL-EVENT-CONNECTED"], timeout=0.1)
dev[2].request("DISCONNECT")
if ev is not None:
raise Exception("Unexpected connection without PMF")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_owe(dev, apdev):
"""sigma_dut controlled OWE station"""
try:
run_sigma_dut_owe(dev, apdev)
finally:
dev[0].set("ignore_old_scan_res", "0")
def run_sigma_dut_owe(dev, apdev):
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname)
try:
params = { "ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"ieee80211w": "2",
"rsn_pairwise": "CCMP" }
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
sigma_dut_cmd_check("sta_reset_default,interface,%s,prog,WPA3" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_security,interface,%s,ssid,owe,Type,OWE" % ifname)
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,owe,channel,1" % ifname)
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
dev[0].dump_monitor()
sigma_dut_cmd("sta_reassoc,interface,%s,Channel,1,bssid,%s" % (ifname, bssid))
dev[0].wait_connected()
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
dev[0].wait_disconnected()
dev[0].dump_monitor()
sigma_dut_cmd_check("sta_reset_default,interface,%s,prog,WPA3" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_security,interface,%s,ssid,owe,Type,OWE,ECGroupID,20" % ifname)
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,owe,channel,1" % ifname)
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
dev[0].wait_disconnected()
dev[0].dump_monitor()
sigma_dut_cmd_check("sta_reset_default,interface,%s,prog,WPA3" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_security,interface,%s,ssid,owe,Type,OWE,ECGroupID,0" % ifname)
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,owe,channel,1" % ifname)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
if ev is None:
raise Exception("Association not rejected")
if "status_code=77" not in ev:
raise Exception("Unexpected rejection reason: " + ev)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_owe(dev, apdev, params):
"""sigma_dut controlled AP with OWE"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_owe.sigma-hostapd")
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default,NAME,AP,Program,WPA3")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,owe,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,OWE")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_owe_ecgroupid(dev, apdev):
"""sigma_dut controlled AP with OWE and ECGroupID"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface)
try:
sigma_dut_cmd_check("ap_reset_default,NAME,AP,Program,WPA3")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,owe,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,OWE,ECGroupID,20 21,PMF,Required")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2",
owe_group="20", scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2",
owe_group="21", scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2",
owe_group="19", scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-ASSOC-REJECT"], timeout=10)
dev[0].request("DISCONNECT")
if ev is None:
raise Exception("Association not rejected")
if "status_code=77" not in ev:
raise Exception("Unexpected rejection reason: " + ev)
dev[0].dump_monitor()
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_owe_transition_mode(dev, apdev, params):
"""sigma_dut controlled AP with OWE and transition mode"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_owe_transition_mode.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default,NAME,AP,Program,WPA3")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,WLAN_TAG,1,CHANNEL,1,SSID,owe,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,WLAN_TAG,1,KEYMGNT,OWE")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,WLAN_TAG,2,CHANNEL,1,SSID,owe,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,WLAN_TAG,2,KEYMGNT,NONE")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
res1 = sigma_dut_cmd_check("ap_get_mac_address,NAME,AP,WLAN_TAG,1,Interface,24G")
res2 = sigma_dut_cmd_check("ap_get_mac_address,NAME,AP,WLAN_TAG,2,Interface,24G")
dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412")
dev[1].connect("owe", key_mgmt="NONE", scan_freq="2412")
if dev[0].get_status_field('bssid') not in res1:
raise Exception("Unexpected ap_get_mac_address WLAN_TAG,1: " + res1)
if dev[1].get_status_field('bssid') not in res2:
raise Exception("Unexpected ap_get_mac_address WLAN_TAG,2: " + res2)
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_owe_transition_mode_2(dev, apdev, params):
"""sigma_dut controlled AP with OWE and transition mode (2)"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_owe_transition_mode_2.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default,NAME,AP,Program,WPA3")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,WLAN_TAG,1,CHANNEL,1,SSID,owe,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,WLAN_TAG,1,KEYMGNT,NONE")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,WLAN_TAG,2,CHANNEL,1,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,WLAN_TAG,2,KEYMGNT,OWE")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
res1 = sigma_dut_cmd_check("ap_get_mac_address,NAME,AP,WLAN_TAG,1,Interface,24G")
res2 = sigma_dut_cmd_check("ap_get_mac_address,NAME,AP,WLAN_TAG,2,Interface,24G")
dev[0].connect("owe", key_mgmt="OWE", ieee80211w="2",
scan_freq="2412")
dev[1].connect("owe", key_mgmt="NONE", scan_freq="2412")
if dev[0].get_status_field('bssid') not in res2:
raise Exception("Unexpected ap_get_mac_address WLAN_TAG,2: " + res1)
if dev[1].get_status_field('bssid') not in res1:
raise Exception("Unexpected ap_get_mac_address WLAN_TAG,1: " + res2)
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def dpp_init_enrollee(dev, id1):
logger.info("Starting DPP initiator/enrollee in a thread")
time.sleep(1)
cmd = "DPP_AUTH_INIT peer=%d role=enrollee" % id1
if "OK" not in dev.request(cmd):
raise Exception("Failed to initiate DPP Authentication")
ev = dev.wait_event(["DPP-CONF-RECEIVED"], timeout=5)
if ev is None:
raise Exception("DPP configuration not completed (Enrollee)")
logger.info("DPP initiator/enrollee done")
def test_sigma_dut_dpp_qr_resp_1(dev, apdev):
"""sigma_dut DPP/QR responder (conf index 1)"""
run_sigma_dut_dpp_qr_resp(dev, apdev, 1)
def test_sigma_dut_dpp_qr_resp_2(dev, apdev):
"""sigma_dut DPP/QR responder (conf index 2)"""
run_sigma_dut_dpp_qr_resp(dev, apdev, 2)
def test_sigma_dut_dpp_qr_resp_3(dev, apdev):
"""sigma_dut DPP/QR responder (conf index 3)"""
run_sigma_dut_dpp_qr_resp(dev, apdev, 3)
def test_sigma_dut_dpp_qr_resp_4(dev, apdev):
"""sigma_dut DPP/QR responder (conf index 4)"""
run_sigma_dut_dpp_qr_resp(dev, apdev, 4)
def test_sigma_dut_dpp_qr_resp_5(dev, apdev):
"""sigma_dut DPP/QR responder (conf index 5)"""
run_sigma_dut_dpp_qr_resp(dev, apdev, 5)
def test_sigma_dut_dpp_qr_resp_6(dev, apdev):
"""sigma_dut DPP/QR responder (conf index 6)"""
run_sigma_dut_dpp_qr_resp(dev, apdev, 6)
def test_sigma_dut_dpp_qr_resp_7(dev, apdev):
"""sigma_dut DPP/QR responder (conf index 7)"""
run_sigma_dut_dpp_qr_resp(dev, apdev, 7)
def test_sigma_dut_dpp_qr_resp_chan_list(dev, apdev):
"""sigma_dut DPP/QR responder (channel list override)"""
run_sigma_dut_dpp_qr_resp(dev, apdev, 1, chan_list='81/2 81/6 81/1',
listen_chan=2)
def run_sigma_dut_dpp_qr_resp(dev, apdev, conf_idx, chan_list=None,
listen_chan=None):
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
sigma = start_sigma_dut(dev[0].ifname)
try:
cmd = "dev_exec_action,program,DPP,DPPActionType,GetLocalBootstrap,DPPCryptoIdentifier,P-256,DPPBS,QR"
if chan_list:
cmd += ",DPPChannelList," + chan_list
res = sigma_dut_cmd(cmd)
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
hex = res.split(',')[3]
uri = from_hex(hex)
logger.info("URI from sigma_dut: " + uri)
res = dev[1].request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
id1 = int(res)
t = threading.Thread(target=dpp_init_enrollee, args=(dev[1], id1))
t.start()
cmd = "dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Responder,DPPConfIndex,%d,DPPAuthDirection,Single,DPPProvisioningRole,Configurator,DPPConfEnrolleeRole,STA,DPPSigningKeyECC,P-256,DPPBS,QR,DPPTimeout,6" % conf_idx
if listen_chan:
cmd += ",DPPListenChannel," + str(listen_chan)
res = sigma_dut_cmd(cmd, timeout=10)
t.join()
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK" not in res:
raise Exception("Unexpected result: " + res)
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_dpp_qr_init_enrollee(dev, apdev):
"""sigma_dut DPP/QR initiator as Enrollee"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
csign = "30770201010420768240a3fc89d6662d9782f120527fe7fb9edc6366ab0b9c7dde96125cfd250fa00a06082a8648ce3d030107a144034200042908e1baf7bf413cc66f9e878a03e8bb1835ba94b033dbe3d6969fc8575d5eb5dfda1cb81c95cee21d0cd7d92ba30541ffa05cb6296f5dd808b0c1c2a83c0708"
csign_pub = "3059301306072a8648ce3d020106082a8648ce3d030107034200042908e1baf7bf413cc66f9e878a03e8bb1835ba94b033dbe3d6969fc8575d5eb5dfda1cb81c95cee21d0cd7d92ba30541ffa05cb6296f5dd808b0c1c2a83c0708"
ap_connector = "eyJ0eXAiOiJkcHBDb24iLCJraWQiOiJwYWtZbXVzd1dCdWpSYTl5OEsweDViaTVrT3VNT3dzZHRlaml2UG55ZHZzIiwiYWxnIjoiRVMyNTYifQ.eyJncm91cHMiOlt7Imdyb3VwSWQiOiIqIiwibmV0Um9sZSI6ImFwIn1dLCJuZXRBY2Nlc3NLZXkiOnsia3R5IjoiRUMiLCJjcnYiOiJQLTI1NiIsIngiOiIybU5vNXZuRkI5bEw3d1VWb1hJbGVPYzBNSEE1QXZKbnpwZXZULVVTYzVNIiwieSI6IlhzS3dqVHJlLTg5WWdpU3pKaG9CN1haeUttTU05OTl3V2ZaSVl0bi01Q3MifX0.XhjFpZgcSa7G2lHy0OCYTvaZFRo5Hyx6b7g7oYyusLC7C_73AJ4_BxEZQVYJXAtDuGvb3dXSkHEKxREP9Q6Qeg"
ap_netaccesskey = "30770201010420ceba752db2ad5200fa7bc565b9c05c69b7eb006751b0b329b0279de1c19ca67ca00a06082a8648ce3d030107a14403420004da6368e6f9c507d94bef0515a1722578e73430703902f267ce97af4fe51273935ec2b08d3adefbcf588224b3261a01ed76722a630cf7df7059f64862d9fee42b"
params = { "ssid": "DPPNET01",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "DPP",
"rsn_pairwise": "CCMP",
"dpp_connector": ap_connector,
"dpp_csign": csign_pub,
"dpp_netaccesskey": ap_netaccesskey }
try:
hapd = hostapd.add_ap(apdev[0], params)
except:
raise HwsimSkip("DPP not supported")
sigma = start_sigma_dut(dev[0].ifname)
try:
dev[0].set("dpp_config_processing", "2")
cmd = "DPP_CONFIGURATOR_ADD key=" + csign
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
dev[1].set("dpp_configurator_params",
" conf=sta-dpp ssid=%s configurator=%d" % (to_hex("DPPNET01"), conf_id))
cmd = "DPP_LISTEN 2437 role=configurator"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,6,DPPWaitForConnect,Yes", timeout=10)
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK,NetworkIntroResult,OK,NetworkConnectResult,OK" not in res:
raise Exception("Unexpected result: " + res)
finally:
dev[0].set("dpp_config_processing", "0")
stop_sigma_dut(sigma)
def test_sigma_dut_dpp_qr_mutual_init_enrollee(dev, apdev):
"""sigma_dut DPP/QR (mutual) initiator as Enrollee"""
run_sigma_dut_dpp_qr_mutual_init_enrollee_check(dev, apdev)
def test_sigma_dut_dpp_qr_mutual_init_enrollee_check(dev, apdev):
"""sigma_dut DPP/QR (mutual) initiator as Enrollee (extra check)"""
run_sigma_dut_dpp_qr_mutual_init_enrollee_check(dev, apdev,
extra="DPPAuthDirection,Mutual,")
def run_sigma_dut_dpp_qr_mutual_init_enrollee_check(dev, apdev, extra=''):
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
csign = "30770201010420768240a3fc89d6662d9782f120527fe7fb9edc6366ab0b9c7dde96125cfd250fa00a06082a8648ce3d030107a144034200042908e1baf7bf413cc66f9e878a03e8bb1835ba94b033dbe3d6969fc8575d5eb5dfda1cb81c95cee21d0cd7d92ba30541ffa05cb6296f5dd808b0c1c2a83c0708"
csign_pub = "3059301306072a8648ce3d020106082a8648ce3d030107034200042908e1baf7bf413cc66f9e878a03e8bb1835ba94b033dbe3d6969fc8575d5eb5dfda1cb81c95cee21d0cd7d92ba30541ffa05cb6296f5dd808b0c1c2a83c0708"
ap_connector = "eyJ0eXAiOiJkcHBDb24iLCJraWQiOiJwYWtZbXVzd1dCdWpSYTl5OEsweDViaTVrT3VNT3dzZHRlaml2UG55ZHZzIiwiYWxnIjoiRVMyNTYifQ.eyJncm91cHMiOlt7Imdyb3VwSWQiOiIqIiwibmV0Um9sZSI6ImFwIn1dLCJuZXRBY2Nlc3NLZXkiOnsia3R5IjoiRUMiLCJjcnYiOiJQLTI1NiIsIngiOiIybU5vNXZuRkI5bEw3d1VWb1hJbGVPYzBNSEE1QXZKbnpwZXZULVVTYzVNIiwieSI6IlhzS3dqVHJlLTg5WWdpU3pKaG9CN1haeUttTU05OTl3V2ZaSVl0bi01Q3MifX0.XhjFpZgcSa7G2lHy0OCYTvaZFRo5Hyx6b7g7oYyusLC7C_73AJ4_BxEZQVYJXAtDuGvb3dXSkHEKxREP9Q6Qeg"
ap_netaccesskey = "30770201010420ceba752db2ad5200fa7bc565b9c05c69b7eb006751b0b329b0279de1c19ca67ca00a06082a8648ce3d030107a14403420004da6368e6f9c507d94bef0515a1722578e73430703902f267ce97af4fe51273935ec2b08d3adefbcf588224b3261a01ed76722a630cf7df7059f64862d9fee42b"
params = { "ssid": "DPPNET01",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "DPP",
"rsn_pairwise": "CCMP",
"dpp_connector": ap_connector,
"dpp_csign": csign_pub,
"dpp_netaccesskey": ap_netaccesskey }
try:
hapd = hostapd.add_ap(apdev[0], params)
except:
raise HwsimSkip("DPP not supported")
sigma = start_sigma_dut(dev[0].ifname)
try:
dev[0].set("dpp_config_processing", "2")
cmd = "DPP_CONFIGURATOR_ADD key=" + csign
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
dev[1].set("dpp_configurator_params",
" conf=sta-dpp ssid=%s configurator=%d" % (to_hex("DPPNET01"), conf_id))
cmd = "DPP_LISTEN 2437 role=configurator qr=mutual"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,GetLocalBootstrap,DPPCryptoIdentifier,P-256,DPPBS,QR")
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
hex = res.split(',')[3]
uri = from_hex(hex)
logger.info("URI from sigma_dut: " + uri)
res = dev[1].request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
id1 = int(res)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,%sDPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,6,DPPWaitForConnect,Yes" % extra, timeout=10)
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK,NetworkIntroResult,OK,NetworkConnectResult,OK" not in res:
raise Exception("Unexpected result: " + res)
finally:
dev[0].set("dpp_config_processing", "0")
stop_sigma_dut(sigma)
def dpp_init_conf_mutual(dev, id1, conf_id, own_id=None):
time.sleep(1)
logger.info("Starting DPP initiator/configurator in a thread")
cmd = "DPP_AUTH_INIT peer=%d conf=sta-dpp ssid=%s configurator=%d" % (id1, to_hex("DPPNET01"), conf_id)
if own_id is not None:
cmd += " own=%d" % own_id
if "OK" not in dev.request(cmd):
raise Exception("Failed to initiate DPP Authentication")
ev = dev.wait_event(["DPP-CONF-SENT"], timeout=10)
if ev is None:
raise Exception("DPP configuration not completed (Configurator)")
logger.info("DPP initiator/configurator done")
def test_sigma_dut_dpp_qr_mutual_resp_enrollee(dev, apdev):
"""sigma_dut DPP/QR (mutual) responder as Enrollee"""
run_sigma_dut_dpp_qr_mutual_resp_enrollee(dev, apdev)
def test_sigma_dut_dpp_qr_mutual_resp_enrollee_pending(dev, apdev):
"""sigma_dut DPP/QR (mutual) responder as Enrollee (response pending)"""
run_sigma_dut_dpp_qr_mutual_resp_enrollee(dev, apdev, ',DPPDelayQRResponse,1')
def run_sigma_dut_dpp_qr_mutual_resp_enrollee(dev, apdev, extra=None):
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
csign = "30770201010420768240a3fc89d6662d9782f120527fe7fb9edc6366ab0b9c7dde96125cfd250fa00a06082a8648ce3d030107a144034200042908e1baf7bf413cc66f9e878a03e8bb1835ba94b033dbe3d6969fc8575d5eb5dfda1cb81c95cee21d0cd7d92ba30541ffa05cb6296f5dd808b0c1c2a83c0708"
csign_pub = "3059301306072a8648ce3d020106082a8648ce3d030107034200042908e1baf7bf413cc66f9e878a03e8bb1835ba94b033dbe3d6969fc8575d5eb5dfda1cb81c95cee21d0cd7d92ba30541ffa05cb6296f5dd808b0c1c2a83c0708"
ap_connector = "eyJ0eXAiOiJkcHBDb24iLCJraWQiOiJwYWtZbXVzd1dCdWpSYTl5OEsweDViaTVrT3VNT3dzZHRlaml2UG55ZHZzIiwiYWxnIjoiRVMyNTYifQ.eyJncm91cHMiOlt7Imdyb3VwSWQiOiIqIiwibmV0Um9sZSI6ImFwIn1dLCJuZXRBY2Nlc3NLZXkiOnsia3R5IjoiRUMiLCJjcnYiOiJQLTI1NiIsIngiOiIybU5vNXZuRkI5bEw3d1VWb1hJbGVPYzBNSEE1QXZKbnpwZXZULVVTYzVNIiwieSI6IlhzS3dqVHJlLTg5WWdpU3pKaG9CN1haeUttTU05OTl3V2ZaSVl0bi01Q3MifX0.XhjFpZgcSa7G2lHy0OCYTvaZFRo5Hyx6b7g7oYyusLC7C_73AJ4_BxEZQVYJXAtDuGvb3dXSkHEKxREP9Q6Qeg"
ap_netaccesskey = "30770201010420ceba752db2ad5200fa7bc565b9c05c69b7eb006751b0b329b0279de1c19ca67ca00a06082a8648ce3d030107a14403420004da6368e6f9c507d94bef0515a1722578e73430703902f267ce97af4fe51273935ec2b08d3adefbcf588224b3261a01ed76722a630cf7df7059f64862d9fee42b"
params = { "ssid": "DPPNET01",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "DPP",
"rsn_pairwise": "CCMP",
"dpp_connector": ap_connector,
"dpp_csign": csign_pub,
"dpp_netaccesskey": ap_netaccesskey }
try:
hapd = hostapd.add_ap(apdev[0], params)
except:
raise HwsimSkip("DPP not supported")
sigma = start_sigma_dut(dev[0].ifname)
try:
dev[0].set("dpp_config_processing", "2")
cmd = "DPP_CONFIGURATOR_ADD key=" + csign
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,GetLocalBootstrap,DPPCryptoIdentifier,P-256,DPPBS,QR")
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
hex = res.split(',')[3]
uri = from_hex(hex)
logger.info("URI from sigma_dut: " + uri)
res = dev[1].request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
id1 = int(res)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
t = threading.Thread(target=dpp_init_conf_mutual,
args=(dev[1], id1, conf_id, id0))
t.start()
cmd = "dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Responder,DPPAuthDirection,Mutual,DPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,20,DPPWaitForConnect,Yes"
if extra:
cmd += extra
res = sigma_dut_cmd(cmd, timeout=25)
t.join()
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK,NetworkIntroResult,OK,NetworkConnectResult,OK" not in res:
raise Exception("Unexpected result: " + res)
finally:
dev[0].set("dpp_config_processing", "0")
stop_sigma_dut(sigma)
def dpp_resp_conf_mutual(dev, conf_id, uri):
logger.info("Starting DPP responder/configurator in a thread")
dev.set("dpp_configurator_params",
" conf=sta-dpp ssid=%s configurator=%d" % (to_hex("DPPNET01"),
conf_id))
cmd = "DPP_LISTEN 2437 role=configurator qr=mutual"
if "OK" not in dev.request(cmd):
raise Exception("Failed to initiate DPP listen")
if uri:
ev = dev.wait_event(["DPP-SCAN-PEER-QR-CODE"], timeout=10)
if ev is None:
raise Exception("QR Code scan for mutual authentication not requested")
res = dev.request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
ev = dev.wait_event(["DPP-CONF-SENT"], timeout=10)
if ev is None:
raise Exception("DPP configuration not completed (Configurator)")
logger.info("DPP responder/configurator done")
def test_sigma_dut_dpp_qr_mutual_init_enrollee(dev, apdev):
"""sigma_dut DPP/QR (mutual) initiator as Enrollee"""
run_sigma_dut_dpp_qr_mutual_init_enrollee(dev, apdev, False)
def test_sigma_dut_dpp_qr_mutual_init_enrollee_pending(dev, apdev):
"""sigma_dut DPP/QR (mutual) initiator as Enrollee (response pending)"""
run_sigma_dut_dpp_qr_mutual_init_enrollee(dev, apdev, True)
def run_sigma_dut_dpp_qr_mutual_init_enrollee(dev, apdev, resp_pending):
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
csign = "30770201010420768240a3fc89d6662d9782f120527fe7fb9edc6366ab0b9c7dde96125cfd250fa00a06082a8648ce3d030107a144034200042908e1baf7bf413cc66f9e878a03e8bb1835ba94b033dbe3d6969fc8575d5eb5dfda1cb81c95cee21d0cd7d92ba30541ffa05cb6296f5dd808b0c1c2a83c0708"
csign_pub = "3059301306072a8648ce3d020106082a8648ce3d030107034200042908e1baf7bf413cc66f9e878a03e8bb1835ba94b033dbe3d6969fc8575d5eb5dfda1cb81c95cee21d0cd7d92ba30541ffa05cb6296f5dd808b0c1c2a83c0708"
ap_connector = "eyJ0eXAiOiJkcHBDb24iLCJraWQiOiJwYWtZbXVzd1dCdWpSYTl5OEsweDViaTVrT3VNT3dzZHRlaml2UG55ZHZzIiwiYWxnIjoiRVMyNTYifQ.eyJncm91cHMiOlt7Imdyb3VwSWQiOiIqIiwibmV0Um9sZSI6ImFwIn1dLCJuZXRBY2Nlc3NLZXkiOnsia3R5IjoiRUMiLCJjcnYiOiJQLTI1NiIsIngiOiIybU5vNXZuRkI5bEw3d1VWb1hJbGVPYzBNSEE1QXZKbnpwZXZULVVTYzVNIiwieSI6IlhzS3dqVHJlLTg5WWdpU3pKaG9CN1haeUttTU05OTl3V2ZaSVl0bi01Q3MifX0.XhjFpZgcSa7G2lHy0OCYTvaZFRo5Hyx6b7g7oYyusLC7C_73AJ4_BxEZQVYJXAtDuGvb3dXSkHEKxREP9Q6Qeg"
ap_netaccesskey = "30770201010420ceba752db2ad5200fa7bc565b9c05c69b7eb006751b0b329b0279de1c19ca67ca00a06082a8648ce3d030107a14403420004da6368e6f9c507d94bef0515a1722578e73430703902f267ce97af4fe51273935ec2b08d3adefbcf588224b3261a01ed76722a630cf7df7059f64862d9fee42b"
params = { "ssid": "DPPNET01",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "DPP",
"rsn_pairwise": "CCMP",
"dpp_connector": ap_connector,
"dpp_csign": csign_pub,
"dpp_netaccesskey": ap_netaccesskey }
try:
hapd = hostapd.add_ap(apdev[0], params)
except:
raise HwsimSkip("DPP not supported")
sigma = start_sigma_dut(dev[0].ifname)
try:
dev[0].set("dpp_config_processing", "2")
cmd = "DPP_CONFIGURATOR_ADD key=" + csign
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,GetLocalBootstrap,DPPCryptoIdentifier,P-256,DPPBS,QR")
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
hex = res.split(',')[3]
uri = from_hex(hex)
logger.info("URI from sigma_dut: " + uri)
if not resp_pending:
res = dev[1].request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
uri = None
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
t = threading.Thread(target=dpp_resp_conf_mutual,
args=(dev[1], conf_id, uri))
t.start()
time.sleep(1)
cmd = "dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,10,DPPWaitForConnect,Yes"
res = sigma_dut_cmd(cmd, timeout=15)
t.join()
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK,NetworkIntroResult,OK,NetworkConnectResult,OK" not in res:
raise Exception("Unexpected result: " + res)
finally:
dev[0].set("dpp_config_processing", "0")
stop_sigma_dut(sigma)
def test_sigma_dut_dpp_qr_init_enrollee_psk(dev, apdev):
"""sigma_dut DPP/QR initiator as Enrollee (PSK)"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
params = hostapd.wpa2_params(ssid="DPPNET01",
passphrase="ThisIsDppPassphrase")
hapd = hostapd.add_ap(apdev[0], params)
sigma = start_sigma_dut(dev[0].ifname)
try:
dev[0].set("dpp_config_processing", "2")
cmd = "DPP_CONFIGURATOR_ADD"
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
dev[1].set("dpp_configurator_params",
" conf=sta-psk ssid=%s pass=%s configurator=%d" % (to_hex("DPPNET01"), to_hex("ThisIsDppPassphrase"), conf_id))
cmd = "DPP_LISTEN 2437 role=configurator"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,6,DPPWaitForConnect,Yes", timeout=10)
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK,NetworkConnectResult,OK" not in res:
raise Exception("Unexpected result: " + res)
finally:
dev[0].set("dpp_config_processing", "0")
stop_sigma_dut(sigma)
def test_sigma_dut_dpp_qr_init_enrollee_sae(dev, apdev):
"""sigma_dut DPP/QR initiator as Enrollee (SAE)"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="DPPNET01",
passphrase="ThisIsDppPassphrase")
params['wpa_key_mgmt'] = 'SAE'
params["ieee80211w"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
sigma = start_sigma_dut(dev[0].ifname)
try:
dev[0].set("dpp_config_processing", "2")
cmd = "DPP_CONFIGURATOR_ADD"
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
dev[1].set("dpp_configurator_params",
" conf=sta-sae ssid=%s pass=%s configurator=%d" % (to_hex("DPPNET01"), to_hex("ThisIsDppPassphrase"), conf_id))
cmd = "DPP_LISTEN 2437 role=configurator"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,6,DPPWaitForConnect,Yes", timeout=10)
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK,NetworkConnectResult,OK" not in res:
raise Exception("Unexpected result: " + res)
finally:
dev[0].set("dpp_config_processing", "0")
stop_sigma_dut(sigma)
def test_sigma_dut_dpp_qr_init_configurator_1(dev, apdev):
"""sigma_dut DPP/QR initiator as Configurator (conf index 1)"""
run_sigma_dut_dpp_qr_init_configurator(dev, apdev, 1)
def test_sigma_dut_dpp_qr_init_configurator_2(dev, apdev):
"""sigma_dut DPP/QR initiator as Configurator (conf index 2)"""
run_sigma_dut_dpp_qr_init_configurator(dev, apdev, 2)
def test_sigma_dut_dpp_qr_init_configurator_3(dev, apdev):
"""sigma_dut DPP/QR initiator as Configurator (conf index 3)"""
run_sigma_dut_dpp_qr_init_configurator(dev, apdev, 3)
def test_sigma_dut_dpp_qr_init_configurator_4(dev, apdev):
"""sigma_dut DPP/QR initiator as Configurator (conf index 4)"""
run_sigma_dut_dpp_qr_init_configurator(dev, apdev, 4)
def test_sigma_dut_dpp_qr_init_configurator_5(dev, apdev):
"""sigma_dut DPP/QR initiator as Configurator (conf index 5)"""
run_sigma_dut_dpp_qr_init_configurator(dev, apdev, 5)
def test_sigma_dut_dpp_qr_init_configurator_6(dev, apdev):
"""sigma_dut DPP/QR initiator as Configurator (conf index 6)"""
run_sigma_dut_dpp_qr_init_configurator(dev, apdev, 6)
def test_sigma_dut_dpp_qr_init_configurator_7(dev, apdev):
"""sigma_dut DPP/QR initiator as Configurator (conf index 7)"""
run_sigma_dut_dpp_qr_init_configurator(dev, apdev, 7)
def test_sigma_dut_dpp_qr_init_configurator_both(dev, apdev):
"""sigma_dut DPP/QR initiator as Configurator or Enrollee (conf index 1)"""
run_sigma_dut_dpp_qr_init_configurator(dev, apdev, 1, "Both")
def test_sigma_dut_dpp_qr_init_configurator_neg_freq(dev, apdev):
"""sigma_dut DPP/QR initiator as Configurator (neg_freq)"""
run_sigma_dut_dpp_qr_init_configurator(dev, apdev, 1, extra='DPPSubsequentChannel,81/11')
def run_sigma_dut_dpp_qr_init_configurator(dev, apdev, conf_idx,
prov_role="Configurator",
extra=None):
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
sigma = start_sigma_dut(dev[0].ifname)
try:
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
cmd = "DPP_LISTEN 2437 role=enrollee"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
cmd = "dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,%s,DPPConfIndex,%d,DPPSigningKeyECC,P-256,DPPConfEnrolleeRole,STA,DPPBS,QR,DPPTimeout,6" % (prov_role, conf_idx)
if extra:
cmd += "," + extra
res = sigma_dut_cmd(cmd)
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK" not in res:
raise Exception("Unexpected result: " + res)
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_dpp_incompatible_roles_init(dev, apdev):
"""sigma_dut DPP roles incompatible (Initiator)"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
sigma = start_sigma_dut(dev[0].ifname)
try:
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,GetLocalBootstrap,DPPCryptoIdentifier,P-256,DPPBS,QR")
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
hex = res.split(',')[3]
uri = from_hex(hex)
logger.info("URI from sigma_dut: " + uri)
res = dev[1].request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
id1 = int(res)
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
cmd = "DPP_LISTEN 2437 role=enrollee"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
cmd = "dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Mutual,DPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,6"
res = sigma_dut_cmd(cmd)
if "BootstrapResult,OK,AuthResult,ROLES_NOT_COMPATIBLE" not in res:
raise Exception("Unexpected result: " + res)
finally:
stop_sigma_dut(sigma)
def dpp_init_enrollee_mutual(dev, id1, own_id):
logger.info("Starting DPP initiator/enrollee in a thread")
time.sleep(1)
cmd = "DPP_AUTH_INIT peer=%d own=%d role=enrollee" % (id1, own_id)
if "OK" not in dev.request(cmd):
raise Exception("Failed to initiate DPP Authentication")
ev = dev.wait_event(["DPP-CONF-RECEIVED",
"DPP-NOT-COMPATIBLE"], timeout=5)
if ev is None:
raise Exception("DPP configuration not completed (Enrollee)")
logger.info("DPP initiator/enrollee done")
def test_sigma_dut_dpp_incompatible_roles_resp(dev, apdev):
"""sigma_dut DPP roles incompatible (Responder)"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
sigma = start_sigma_dut(dev[0].ifname)
try:
cmd = "dev_exec_action,program,DPP,DPPActionType,GetLocalBootstrap,DPPCryptoIdentifier,P-256,DPPBS,QR"
res = sigma_dut_cmd(cmd)
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
hex = res.split(',')[3]
uri = from_hex(hex)
logger.info("URI from sigma_dut: " + uri)
res = dev[1].request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
id1 = int(res)
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
t = threading.Thread(target=dpp_init_enrollee_mutual, args=(dev[1], id1, id0))
t.start()
cmd = "dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Responder,DPPAuthDirection,Mutual,DPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,6"
res = sigma_dut_cmd(cmd, timeout=10)
t.join()
if "BootstrapResult,OK,AuthResult,ROLES_NOT_COMPATIBLE" not in res:
raise Exception("Unexpected result: " + res)
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_dpp_pkex_init_configurator(dev, apdev):
"""sigma_dut DPP/PKEX initiator as Configurator"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
sigma = start_sigma_dut(dev[0].ifname)
try:
cmd = "DPP_BOOTSTRAP_GEN type=pkex"
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id1 = int(res)
cmd = "DPP_PKEX_ADD own=%d identifier=test code=secret" % (id1)
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to set PKEX data (responder)")
cmd = "DPP_LISTEN 2437 role=enrollee"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPProvisioningRole,Configurator,DPPConfIndex,1,DPPSigningKeyECC,P-256,DPPConfEnrolleeRole,STA,DPPBS,PKEX,DPPPKEXCodeIdentifier,test,DPPPKEXCode,secret,DPPTimeout,6")
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK" not in res:
raise Exception("Unexpected result: " + res)
finally:
stop_sigma_dut(sigma)
def dpp_init_conf(dev, id1, conf, conf_id, extra):
logger.info("Starting DPP initiator/configurator in a thread")
cmd = "DPP_AUTH_INIT peer=%d conf=%s %s configurator=%d" % (id1, conf, extra, conf_id)
if "OK" not in dev.request(cmd):
raise Exception("Failed to initiate DPP Authentication")
ev = dev.wait_event(["DPP-CONF-SENT"], timeout=5)
if ev is None:
raise Exception("DPP configuration not completed (Configurator)")
logger.info("DPP initiator/configurator done")
def test_sigma_dut_ap_dpp_qr(dev, apdev, params):
"""sigma_dut controlled AP (DPP)"""
run_sigma_dut_ap_dpp_qr(dev, apdev, params, "ap-dpp", "sta-dpp")
def test_sigma_dut_ap_dpp_qr_legacy(dev, apdev, params):
"""sigma_dut controlled AP (legacy)"""
run_sigma_dut_ap_dpp_qr(dev, apdev, params, "ap-psk", "sta-psk",
extra="pass=%s" % to_hex("qwertyuiop"))
def test_sigma_dut_ap_dpp_qr_legacy_psk(dev, apdev, params):
"""sigma_dut controlled AP (legacy)"""
run_sigma_dut_ap_dpp_qr(dev, apdev, params, "ap-psk", "sta-psk",
extra="psk=%s" % (32*"12"))
def run_sigma_dut_ap_dpp_qr(dev, apdev, params, ap_conf, sta_conf, extra=""):
check_dpp_capab(dev[0])
logdir = os.path.join(params['logdir'], "sigma_dut_ap_dpp_qr.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default,program,DPP")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,GetLocalBootstrap,DPPCryptoIdentifier,P-256,DPPBS,QR")
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
hex = res.split(',')[3]
uri = from_hex(hex)
logger.info("URI from sigma_dut: " + uri)
cmd = "DPP_CONFIGURATOR_ADD"
res = dev[0].request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
res = dev[0].request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
id1 = int(res)
t = threading.Thread(target=dpp_init_conf,
args=(dev[0], id1, ap_conf, conf_id, extra))
t.start()
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Responder,DPPAuthDirection,Single,DPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,6")
t.join()
if "ConfResult,OK" not in res:
raise Exception("Unexpected result: " + res)
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/1 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id1 = int(res)
uri1 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id1)
res = dev[0].request("DPP_QR_CODE " + uri1)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
id0b = int(res)
dev[1].set("dpp_config_processing", "2")
cmd = "DPP_LISTEN 2412"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
cmd = "DPP_AUTH_INIT peer=%d conf=%s %s configurator=%d" % (id0b, sta_conf, extra, conf_id)
if "OK" not in dev[0].request(cmd):
raise Exception("Failed to initiate DPP Authentication")
dev[1].wait_connected()
sigma_dut_cmd_check("ap_reset_default")
finally:
dev[1].set("dpp_config_processing", "0")
stop_sigma_dut(sigma)
def test_sigma_dut_ap_dpp_pkex_responder(dev, apdev, params):
"""sigma_dut controlled AP as DPP PKEX responder"""
check_dpp_capab(dev[0])
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_dpp_pkex_responder.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
run_sigma_dut_ap_dpp_pkex_responder(dev, apdev)
finally:
stop_sigma_dut(sigma)
def dpp_init_conf_pkex(dev, conf_id, check_config=True):
logger.info("Starting DPP PKEX initiator/configurator in a thread")
time.sleep(1.5)
cmd = "DPP_BOOTSTRAP_GEN type=pkex"
res = dev.request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id = int(res)
cmd = "DPP_PKEX_ADD own=%d init=1 conf=ap-dpp configurator=%d code=password" % (id, conf_id)
res = dev.request(cmd)
if "FAIL" in res:
raise Exception("Failed to initiate DPP PKEX")
if not check_config:
return
ev = dev.wait_event(["DPP-CONF-SENT"], timeout=5)
if ev is None:
raise Exception("DPP configuration not completed (Configurator)")
logger.info("DPP initiator/configurator done")
def run_sigma_dut_ap_dpp_pkex_responder(dev, apdev):
sigma_dut_cmd_check("ap_reset_default,program,DPP")
cmd = "DPP_CONFIGURATOR_ADD"
res = dev[0].request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
t = threading.Thread(target=dpp_init_conf_pkex, args=(dev[0], conf_id))
t.start()
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Responder,DPPAuthDirection,Mutual,DPPProvisioningRole,Enrollee,DPPBS,PKEX,DPPPKEXCode,password,DPPTimeout,6,DPPWaitForConnect,No", timeout=10)
t.join()
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK" not in res:
raise Exception("Unexpected result: " + res)
sigma_dut_cmd_check("ap_reset_default")
def test_sigma_dut_dpp_pkex_responder_proto(dev, apdev):
"""sigma_dut controlled STA as DPP PKEX responder and error case"""
check_dpp_capab(dev[0])
sigma = start_sigma_dut(dev[0].ifname)
try:
run_sigma_dut_dpp_pkex_responder_proto(dev, apdev)
finally:
stop_sigma_dut(sigma)
def run_sigma_dut_dpp_pkex_responder_proto(dev, apdev):
cmd = "DPP_CONFIGURATOR_ADD"
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
dev[1].set("dpp_test", "44")
t = threading.Thread(target=dpp_init_conf_pkex, args=(dev[1], conf_id,
False))
t.start()
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Responder,DPPProvisioningRole,Enrollee,DPPBS,PKEX,DPPPKEXCode,password,DPPTimeout,6", timeout=10)
t.join()
if "BootstrapResult,Timeout" not in res:
raise Exception("Unexpected result: " + res)
def dpp_proto_init(dev, id1):
time.sleep(1)
logger.info("Starting DPP initiator/configurator in a thread")
cmd = "DPP_CONFIGURATOR_ADD"
res = dev.request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
cmd = "DPP_AUTH_INIT peer=%d conf=sta-dpp configurator=%d" % (id1, conf_id)
if "OK" not in dev.request(cmd):
raise Exception("Failed to initiate DPP Authentication")
def test_sigma_dut_dpp_proto_initiator(dev, apdev):
"""sigma_dut DPP protocol testing - Initiator"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
tests = [ ("InvalidValue", "AuthenticationRequest", "WrappedData",
"BootstrapResult,OK,AuthResult,Errorsent",
None),
("InvalidValue", "AuthenticationConfirm", "WrappedData",
"BootstrapResult,OK,AuthResult,Errorsent",
None),
("MissingAttribute", "AuthenticationRequest", "InitCapabilities",
"BootstrapResult,OK,AuthResult,Errorsent",
"Missing or invalid I-capabilities"),
("InvalidValue", "AuthenticationConfirm", "InitAuthTag",
"BootstrapResult,OK,AuthResult,Errorsent",
"Mismatching Initiator Authenticating Tag"),
("MissingAttribute", "ConfigurationResponse", "EnrolleeNonce",
"BootstrapResult,OK,AuthResult,OK,ConfResult,Errorsent",
"Missing or invalid Enrollee Nonce attribute") ]
for step, frame, attr, result, fail in tests:
dev[0].request("FLUSH")
dev[1].request("FLUSH")
sigma = start_sigma_dut(dev[0].ifname)
try:
run_sigma_dut_dpp_proto_initiator(dev, step, frame, attr, result,
fail)
finally:
stop_sigma_dut(sigma)
def run_sigma_dut_dpp_proto_initiator(dev, step, frame, attr, result, fail):
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
cmd = "DPP_LISTEN 2437 role=enrollee"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Configurator,DPPConfIndex,1,DPPSigningKeyECC,P-256,DPPConfEnrolleeRole,STA,DPPBS,QR,DPPTimeout,6,DPPStep,%s,DPPFrameType,%s,DPPIEAttribute,%s" % (step, frame, attr),
timeout=10)
if result not in res:
raise Exception("Unexpected result: " + res)
if fail:
ev = dev[1].wait_event(["DPP-FAIL"], timeout=5)
if ev is None or fail not in ev:
raise Exception("Failure not reported correctly: " + str(ev))
dev[1].request("DPP_STOP_LISTEN")
dev[0].dump_monitor()
dev[1].dump_monitor()
def test_sigma_dut_dpp_proto_responder(dev, apdev):
"""sigma_dut DPP protocol testing - Responder"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
tests = [ ("MissingAttribute", "AuthenticationResponse", "DPPStatus",
"BootstrapResult,OK,AuthResult,Errorsent",
"Missing or invalid required DPP Status attribute"),
("MissingAttribute", "ConfigurationRequest", "EnrolleeNonce",
"BootstrapResult,OK,AuthResult,OK,ConfResult,Errorsent",
"Missing or invalid Enrollee Nonce attribute") ]
for step, frame, attr, result, fail in tests:
dev[0].request("FLUSH")
dev[1].request("FLUSH")
sigma = start_sigma_dut(dev[0].ifname)
try:
run_sigma_dut_dpp_proto_responder(dev, step, frame, attr, result,
fail)
finally:
stop_sigma_dut(sigma)
def run_sigma_dut_dpp_proto_responder(dev, step, frame, attr, result, fail):
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,GetLocalBootstrap,DPPCryptoIdentifier,P-256,DPPBS,QR")
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
hex = res.split(',')[3]
uri = from_hex(hex)
logger.info("URI from sigma_dut: " + uri)
res = dev[1].request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
id1 = int(res)
t = threading.Thread(target=dpp_proto_init, args=(dev[1], id1))
t.start()
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Responder,DPPAuthDirection,Single,DPPProvisioningRole,Enrollee,DPPConfIndex,1,DPPSigningKeyECC,P-256,DPPConfEnrolleeRole,STA,DPPBS,QR,DPPTimeout,6,DPPStep,%s,DPPFrameType,%s,DPPIEAttribute,%s" % (step, frame, attr), timeout=10)
t.join()
if result not in res:
raise Exception("Unexpected result: " + res)
if fail:
ev = dev[1].wait_event(["DPP-FAIL"], timeout=5)
if ev is None or fail not in ev:
raise Exception("Failure not reported correctly:" + str(ev))
dev[1].request("DPP_STOP_LISTEN")
dev[0].dump_monitor()
dev[1].dump_monitor()
def test_sigma_dut_dpp_proto_stop_at_initiator(dev, apdev):
"""sigma_dut DPP protocol testing - Stop at RX on Initiator"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
tests = [ ("AuthenticationResponse",
"BootstrapResult,OK,AuthResult,Errorsent",
None),
("ConfigurationRequest",
"BootstrapResult,OK,AuthResult,OK,ConfResult,Errorsent",
None)]
for frame, result, fail in tests:
dev[0].request("FLUSH")
dev[1].request("FLUSH")
sigma = start_sigma_dut(dev[0].ifname)
try:
run_sigma_dut_dpp_proto_stop_at_initiator(dev, frame, result, fail)
finally:
stop_sigma_dut(sigma)
def run_sigma_dut_dpp_proto_stop_at_initiator(dev, frame, result, fail):
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
cmd = "DPP_LISTEN 2437 role=enrollee"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Configurator,DPPConfIndex,1,DPPSigningKeyECC,P-256,DPPConfEnrolleeRole,STA,DPPBS,QR,DPPTimeout,6,DPPStep,Timeout,DPPFrameType,%s" % (frame))
if result not in res:
raise Exception("Unexpected result: " + res)
if fail:
ev = dev[1].wait_event(["DPP-FAIL"], timeout=5)
if ev is None or fail not in ev:
raise Exception("Failure not reported correctly: " + str(ev))
dev[1].request("DPP_STOP_LISTEN")
dev[0].dump_monitor()
dev[1].dump_monitor()
def test_sigma_dut_dpp_proto_stop_at_initiator_enrollee(dev, apdev):
"""sigma_dut DPP protocol testing - Stop at TX on Initiator/Enrollee"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
tests = [ ("AuthenticationConfirm",
"BootstrapResult,OK,AuthResult,Errorsent,LastFrameReceived,AuthenticationResponse",
None) ]
for frame, result, fail in tests:
dev[0].request("FLUSH")
dev[1].request("FLUSH")
sigma = start_sigma_dut(dev[0].ifname, debug=True)
try:
run_sigma_dut_dpp_proto_stop_at_initiator_enrollee(dev, frame,
result, fail)
finally:
stop_sigma_dut(sigma)
def run_sigma_dut_dpp_proto_stop_at_initiator_enrollee(dev, frame, result,
fail):
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
cmd = "DPP_LISTEN 2437 role=configurator"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,6,DPPStep,Timeout,DPPFrameType,%s" % (frame), timeout=10)
if result not in res:
raise Exception("Unexpected result: " + res)
if fail:
ev = dev[1].wait_event(["DPP-FAIL"], timeout=5)
if ev is None or fail not in ev:
raise Exception("Failure not reported correctly: " + str(ev))
dev[1].request("DPP_STOP_LISTEN")
dev[0].dump_monitor()
dev[1].dump_monitor()
def test_sigma_dut_dpp_proto_stop_at_responder(dev, apdev):
"""sigma_dut DPP protocol testing - Stop at RX on Responder"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
tests = [ ("AuthenticationRequest",
"BootstrapResult,OK,AuthResult,Errorsent",
None),
("AuthenticationConfirm",
"BootstrapResult,OK,AuthResult,Errorsent",
None) ]
for frame, result, fail in tests:
dev[0].request("FLUSH")
dev[1].request("FLUSH")
sigma = start_sigma_dut(dev[0].ifname)
try:
run_sigma_dut_dpp_proto_stop_at_responder(dev, frame, result, fail)
finally:
stop_sigma_dut(sigma)
def run_sigma_dut_dpp_proto_stop_at_responder(dev, frame, result, fail):
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,GetLocalBootstrap,DPPCryptoIdentifier,P-256,DPPBS,QR")
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
hex = res.split(',')[3]
uri = from_hex(hex)
logger.info("URI from sigma_dut: " + uri)
res = dev[1].request("DPP_QR_CODE " + uri)
if "FAIL" in res:
raise Exception("Failed to parse QR Code URI")
id1 = int(res)
t = threading.Thread(target=dpp_proto_init, args=(dev[1], id1))
t.start()
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Responder,DPPAuthDirection,Single,DPPProvisioningRole,Enrollee,DPPConfIndex,1,DPPSigningKeyECC,P-256,DPPConfEnrolleeRole,STA,DPPBS,QR,DPPTimeout,6,DPPStep,Timeout,DPPFrameType,%s" % (frame), timeout=10)
t.join()
if result not in res:
raise Exception("Unexpected result: " + res)
if fail:
ev = dev[1].wait_event(["DPP-FAIL"], timeout=5)
if ev is None or fail not in ev:
raise Exception("Failure not reported correctly:" + str(ev))
dev[1].request("DPP_STOP_LISTEN")
dev[0].dump_monitor()
dev[1].dump_monitor()
def dpp_proto_init_pkex(dev):
time.sleep(1)
logger.info("Starting DPP PKEX initiator/configurator in a thread")
cmd = "DPP_CONFIGURATOR_ADD"
res = dev.request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
cmd = "DPP_BOOTSTRAP_GEN type=pkex"
res = dev.request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id = int(res)
cmd = "DPP_PKEX_ADD own=%d init=1 conf=sta-dpp configurator=%d code=secret" % (id, conf_id)
if "FAIL" in dev.request(cmd):
raise Exception("Failed to initiate DPP PKEX")
def test_sigma_dut_dpp_proto_initiator_pkex(dev, apdev):
"""sigma_dut DPP protocol testing - Initiator (PKEX)"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
tests = [ ("InvalidValue", "PKEXCRRequest", "WrappedData",
"BootstrapResult,Errorsent",
None),
("MissingAttribute", "PKEXExchangeRequest", "FiniteCyclicGroup",
"BootstrapResult,Errorsent",
"Missing or invalid Finite Cyclic Group attribute"),
("MissingAttribute", "PKEXCRRequest", "BSKey",
"BootstrapResult,Errorsent",
"No valid peer bootstrapping key found") ]
for step, frame, attr, result, fail in tests:
dev[0].request("FLUSH")
dev[1].request("FLUSH")
sigma = start_sigma_dut(dev[0].ifname)
try:
run_sigma_dut_dpp_proto_initiator_pkex(dev, step, frame, attr,
result, fail)
finally:
stop_sigma_dut(sigma)
def run_sigma_dut_dpp_proto_initiator_pkex(dev, step, frame, attr, result, fail):
cmd = "DPP_BOOTSTRAP_GEN type=pkex"
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id1 = int(res)
cmd = "DPP_PKEX_ADD own=%d code=secret" % (id1)
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to set PKEX data (responder)")
cmd = "DPP_LISTEN 2437 role=enrollee"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Configurator,DPPConfIndex,1,DPPSigningKeyECC,P-256,DPPConfEnrolleeRole,STA,DPPBS,PKEX,DPPPKEXCode,secret,DPPTimeout,6,DPPStep,%s,DPPFrameType,%s,DPPIEAttribute,%s" % (step, frame, attr))
if result not in res:
raise Exception("Unexpected result: " + res)
if fail:
ev = dev[1].wait_event(["DPP-FAIL"], timeout=5)
if ev is None or fail not in ev:
raise Exception("Failure not reported correctly: " + str(ev))
dev[1].request("DPP_STOP_LISTEN")
dev[0].dump_monitor()
dev[1].dump_monitor()
def test_sigma_dut_dpp_proto_responder_pkex(dev, apdev):
"""sigma_dut DPP protocol testing - Responder (PKEX)"""
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
tests = [ ("InvalidValue", "PKEXCRResponse", "WrappedData",
"BootstrapResult,Errorsent",
None),
("MissingAttribute", "PKEXExchangeResponse", "DPPStatus",
"BootstrapResult,Errorsent",
"No DPP Status attribute"),
("MissingAttribute", "PKEXCRResponse", "BSKey",
"BootstrapResult,Errorsent",
"No valid peer bootstrapping key found") ]
for step, frame, attr, result, fail in tests:
dev[0].request("FLUSH")
dev[1].request("FLUSH")
sigma = start_sigma_dut(dev[0].ifname)
try:
run_sigma_dut_dpp_proto_responder_pkex(dev, step, frame, attr,
result, fail)
finally:
stop_sigma_dut(sigma)
def run_sigma_dut_dpp_proto_responder_pkex(dev, step, frame, attr, result, fail):
t = threading.Thread(target=dpp_proto_init_pkex, args=(dev[1],))
t.start()
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Responder,DPPAuthDirection,Single,DPPProvisioningRole,Enrollee,DPPConfIndex,1,DPPSigningKeyECC,P-256,DPPConfEnrolleeRole,STA,DPPBS,PKEX,DPPPKEXCode,secret,DPPTimeout,6,DPPStep,%s,DPPFrameType,%s,DPPIEAttribute,%s" % (step, frame, attr), timeout=10)
t.join()
if result not in res:
raise Exception("Unexpected result: " + res)
if fail:
ev = dev[1].wait_event(["DPP-FAIL"], timeout=5)
if ev is None or fail not in ev:
raise Exception("Failure not reported correctly:" + str(ev))
dev[1].request("DPP_STOP_LISTEN")
dev[0].dump_monitor()
dev[1].dump_monitor()
def init_sigma_dut_dpp_proto_peer_disc_req(dev, apdev):
check_dpp_capab(dev[0])
check_dpp_capab(dev[1])
csign = "30770201010420768240a3fc89d6662d9782f120527fe7fb9edc6366ab0b9c7dde96125cfd250fa00a06082a8648ce3d030107a144034200042908e1baf7bf413cc66f9e878a03e8bb1835ba94b033dbe3d6969fc8575d5eb5dfda1cb81c95cee21d0cd7d92ba30541ffa05cb6296f5dd808b0c1c2a83c0708"
csign_pub = "3059301306072a8648ce3d020106082a8648ce3d030107034200042908e1baf7bf413cc66f9e878a03e8bb1835ba94b033dbe3d6969fc8575d5eb5dfda1cb81c95cee21d0cd7d92ba30541ffa05cb6296f5dd808b0c1c2a83c0708"
ap_connector = "eyJ0eXAiOiJkcHBDb24iLCJraWQiOiJwYWtZbXVzd1dCdWpSYTl5OEsweDViaTVrT3VNT3dzZHRlaml2UG55ZHZzIiwiYWxnIjoiRVMyNTYifQ.eyJncm91cHMiOlt7Imdyb3VwSWQiOiIqIiwibmV0Um9sZSI6ImFwIn1dLCJuZXRBY2Nlc3NLZXkiOnsia3R5IjoiRUMiLCJjcnYiOiJQLTI1NiIsIngiOiIybU5vNXZuRkI5bEw3d1VWb1hJbGVPYzBNSEE1QXZKbnpwZXZULVVTYzVNIiwieSI6IlhzS3dqVHJlLTg5WWdpU3pKaG9CN1haeUttTU05OTl3V2ZaSVl0bi01Q3MifX0.XhjFpZgcSa7G2lHy0OCYTvaZFRo5Hyx6b7g7oYyusLC7C_73AJ4_BxEZQVYJXAtDuGvb3dXSkHEKxREP9Q6Qeg"
ap_netaccesskey = "30770201010420ceba752db2ad5200fa7bc565b9c05c69b7eb006751b0b329b0279de1c19ca67ca00a06082a8648ce3d030107a14403420004da6368e6f9c507d94bef0515a1722578e73430703902f267ce97af4fe51273935ec2b08d3adefbcf588224b3261a01ed76722a630cf7df7059f64862d9fee42b"
params = { "ssid": "DPPNET01",
"wpa": "2",
"ieee80211w": "2",
"wpa_key_mgmt": "DPP",
"rsn_pairwise": "CCMP",
"dpp_connector": ap_connector,
"dpp_csign": csign_pub,
"dpp_netaccesskey": ap_netaccesskey }
try:
hapd = hostapd.add_ap(apdev[0], params)
except:
raise HwsimSkip("DPP not supported")
dev[0].set("dpp_config_processing", "2")
cmd = "DPP_CONFIGURATOR_ADD key=" + csign
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to add configurator")
conf_id = int(res)
addr = dev[1].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/6 mac=" + addr
res = dev[1].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id0 = int(res)
uri0 = dev[1].request("DPP_BOOTSTRAP_GET_URI %d" % id0)
dev[1].set("dpp_configurator_params",
" conf=sta-dpp ssid=%s configurator=%d" % (to_hex("DPPNET01"),
conf_id))
cmd = "DPP_LISTEN 2437 role=configurator"
if "OK" not in dev[1].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri0))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
def test_sigma_dut_dpp_proto_peer_disc_req(dev, apdev):
"""sigma_dut DPP protocol testing - Peer Discovery Request"""
sigma = start_sigma_dut(dev[0].ifname)
try:
init_sigma_dut_dpp_proto_peer_disc_req(dev, apdev)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Enrollee,DPPBS,QR,DPPTimeout,6,DPPWaitForConnect,Yes,DPPStep,MissingAttribute,DPPFrameType,PeerDiscoveryRequest,DPPIEAttribute,TransactionID", timeout=10)
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK,NetworkIntroResult,Errorsent" not in res:
raise Exception("Unexpected result: " + res)
finally:
dev[0].set("dpp_config_processing", "0")
stop_sigma_dut(sigma)
def test_sigma_dut_dpp_self_config(dev, apdev):
"""sigma_dut DPP Configurator enrolling an AP and using self-configuration"""
check_dpp_capab(dev[0])
hapd = hostapd.add_ap(apdev[0], { "ssid": "unconfigured" })
check_dpp_capab(hapd)
sigma = start_sigma_dut(dev[0].ifname)
try:
dev[0].set("dpp_config_processing", "2")
addr = hapd.own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/1 mac=" + addr
res = hapd.request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id = int(res)
uri = hapd.request("DPP_BOOTSTRAP_GET_URI %d" % id)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Configurator,DPPConfIndex,1,DPPSigningKeyECC,P-256,DPPConfEnrolleeRole,AP,DPPBS,QR,DPPTimeout,6")
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK" not in res:
raise Exception("Unexpected result: " + res)
update_hapd_config(hapd)
cmd = "dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPCryptoIdentifier,P-256,DPPBS,QR,DPPAuthRole,Initiator,DPPProvisioningRole,Configurator,DPPAuthDirection,Single,DPPConfIndex,1,DPPTimeout,6,DPPWaitForConnect,Yes,DPPSelfConfigure,Yes"
res = sigma_dut_cmd(cmd, timeout=10)
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK,NetworkIntroResult,OK,NetworkConnectResult,OK" not in res:
raise Exception("Unexpected result: " + res)
finally:
stop_sigma_dut(sigma)
dev[0].set("dpp_config_processing", "0")
def test_sigma_dut_ap_dpp_self_config(dev, apdev, params):
"""sigma_dut DPP AP Configurator using self-configuration"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_dpp_self_config.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
run_sigma_dut_ap_dpp_self_config(dev, apdev)
finally:
stop_sigma_dut(sigma)
dev[0].set("dpp_config_processing", "0")
def run_sigma_dut_ap_dpp_self_config(dev, apdev):
check_dpp_capab(dev[0])
sigma_dut_cmd_check("ap_reset_default,program,DPP")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Configurator,DPPConfEnrolleeRole,AP,DPPBS,QR,DPPConfIndex,1,DPPSelfConfigure,Yes,DPPTimeout,6", timeout=10)
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK" not in res:
raise Exception("Unexpected result: " + res)
dev[0].set("dpp_config_processing", "2")
addr = dev[0].own_addr().replace(':', '')
cmd = "DPP_BOOTSTRAP_GEN type=qrcode chan=81/11 mac=" + addr
res = dev[0].request(cmd)
if "FAIL" in res:
raise Exception("Failed to generate bootstrapping info")
id = int(res)
uri = dev[0].request("DPP_BOOTSTRAP_GET_URI %d" % id)
cmd = "DPP_LISTEN 2462 role=enrollee"
if "OK" not in dev[0].request(cmd):
raise Exception("Failed to start listen operation")
res = sigma_dut_cmd("dev_exec_action,program,DPP,DPPActionType,SetPeerBootstrap,DPPBootstrappingdata,%s,DPPBS,QR" % to_hex(uri))
if "status,COMPLETE" not in res:
raise Exception("dev_exec_action did not succeed: " + res)
cmd = "dev_exec_action,program,DPP,DPPActionType,AutomaticDPP,DPPAuthRole,Initiator,DPPAuthDirection,Single,DPPProvisioningRole,Configurator,DPPConfIndex,1,DPPSigningKeyECC,P-256,DPPConfEnrolleeRole,STA,DPPBS,QR,DPPTimeout,6"
res = sigma_dut_cmd(cmd)
if "BootstrapResult,OK,AuthResult,OK,ConfResult,OK" not in res:
raise Exception("Unexpected result: " + res)
dev[0].wait_connected()
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
sigma_dut_cmd_check("ap_reset_default")
def test_sigma_dut_preconfigured_profile(dev, apdev):
"""sigma_dut controlled connection using preconfigured profile"""
try:
run_sigma_dut_preconfigured_profile(dev, apdev)
finally:
dev[0].set("ignore_old_scan_res", "0")
def run_sigma_dut_preconfigured_profile(dev, apdev):
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname)
params = hostapd.wpa2_params(ssid="test-psk", passphrase="12345678")
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-psk", psk="12345678", scan_freq="2412",
only_add_network=True)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s" % (ifname, "test-psk"))
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
def test_sigma_dut_wps_pbc(dev, apdev):
"""sigma_dut and WPS PBC Enrollee"""
try:
run_sigma_dut_wps_pbc(dev, apdev)
finally:
dev[0].set("ignore_old_scan_res", "0")
def run_sigma_dut_wps_pbc(dev, apdev):
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{ "ssid": "wps", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP" })
hapd.request("WPS_PBC")
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname)
cmd = "start_wps_registration,interface,%s" % ifname
cmd += ",WpsRole,Enrollee"
cmd += ",WpsConfigMethod,PBC"
sigma_dut_cmd_check(cmd, timeout=15)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
hapd.disable()
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
dev[0].flush_scan_cache()
def test_sigma_dut_sta_scan_bss(dev, apdev):
"""sigma_dut sta_scan_bss"""
hapd = hostapd.add_ap(apdev[0], { "ssid": "test" })
sigma = start_sigma_dut(dev[0].ifname)
try:
cmd = "sta_scan_bss,Interface,%s,BSSID,%s" % (dev[0].ifname, \
hapd.own_addr())
res = sigma_dut_cmd(cmd, timeout=10)
if "ssid,test,bsschannel,1" not in res:
raise Exception("Unexpected result: " + res)
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_osen(dev, apdev, params):
"""sigma_dut controlled AP with OSEN"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_osen.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-hs20,MODE,11ng")
sigma_dut_cmd_check("ap_set_radius,NAME,AP,IPADDR,127.0.0.1,PORT,1812,PASSWORD,radius")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,OSEN,PMF,Optional")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
# RSN-OSEN (for OSU)
dev[0].connect("test-hs20", proto="OSEN", key_mgmt="OSEN",
pairwise="CCMP", group="GTK_NOT_USED",
eap="WFA-UNAUTH-TLS", identity="osen@example.com",
ca_cert="auth_serv/ca.pem", scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_eap_osen(dev, apdev, params):
"""sigma_dut controlled AP with EAP+OSEN"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_eap_osen.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, bridge="ap-br0", hostapd_logdir=logdir)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-hs20,MODE,11ng")
sigma_dut_cmd_check("ap_set_radius,NAME,AP,IPADDR,127.0.0.1,PORT,1812,PASSWORD,radius")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-ENT-OSEN,PMF,Optional")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
subprocess.call(['brctl', 'setfd', 'ap-br0', '0'])
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'up'])
# RSN-OSEN (for OSU)
dev[0].connect("test-hs20", proto="OSEN", key_mgmt="OSEN",
pairwise="CCMP",
eap="WFA-UNAUTH-TLS", identity="osen@example.com",
ca_cert="auth_serv/ca.pem", ieee80211w='2',
scan_freq="2412")
# RSN-EAP (for data connection)
dev[1].connect("test-hs20", key_mgmt="WPA-EAP", eap="TTLS",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
ieee80211w='2', scan_freq="2412")
hwsim_utils.test_connectivity(dev[0], dev[1], broadcast=False,
success_expected=False, timeout=1)
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def test_sigma_dut_ap_eap(dev, apdev, params):
"""sigma_dut controlled AP WPA2-Enterprise"""
logdir = os.path.join(params['logdir'], "sigma_dut_ap_eap.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir, debug=True)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-eap,MODE,11ng")
sigma_dut_cmd_check("ap_set_radius,NAME,AP,IPADDR,127.0.0.1,PORT,1812,PASSWORD,radius")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-ENT")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("test-eap", key_mgmt="WPA-EAP", eap="GPSK",
identity="gpsk user",
password="abcdefghijklmnop0123456789abcdef",
scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_eap_sha256(dev, apdev, params):
"""sigma_dut controlled AP WPA2-Enterprise SHA256"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_eap_sha256.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir, debug=True)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-eap,MODE,11ng")
sigma_dut_cmd_check("ap_set_radius,NAME,AP,IPADDR,127.0.0.1,PORT,1812,PASSWORD,radius")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-ENT-256")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("test-eap", key_mgmt="WPA-EAP-SHA256", eap="GPSK",
identity="gpsk user",
password="abcdefghijklmnop0123456789abcdef",
scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_ft_eap(dev, apdev, params):
"""sigma_dut controlled AP FT-EAP"""
logdir = os.path.join(params['logdir'], "sigma_dut_ap_ft_eap.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir, debug=True)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-ft-eap,MODE,11ng,DOMAIN,0101,FT_OA,Enable")
sigma_dut_cmd_check("ap_set_radius,NAME,AP,IPADDR,127.0.0.1,PORT,1812,PASSWORD,radius")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,FT-EAP")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("test-ft-eap", key_mgmt="FT-EAP", eap="GPSK",
identity="gpsk user",
password="abcdefghijklmnop0123456789abcdef",
scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_ft_psk(dev, apdev, params):
"""sigma_dut controlled AP FT-PSK"""
logdir = os.path.join(params['logdir'], "sigma_dut_ap_ft_psk.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir, debug=True)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-ft-psk,MODE,11ng,DOMAIN,0101,FT_OA,Enable")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,FT-PSK,PSK,12345678")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("test-ft-psk", key_mgmt="FT-PSK", psk="12345678",
scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_ap_ent_ft_eap(dev, apdev, params):
"""sigma_dut controlled AP WPA-EAP and FT-EAP"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_ent_ft_eap.sigma-hostapd")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir, debug=True)
try:
sigma_dut_cmd_check("ap_reset_default")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,CHANNEL,1,SSID,test-ent-ft-eap,MODE,11ng,DOMAIN,0101,FT_OA,Enable")
sigma_dut_cmd_check("ap_set_radius,NAME,AP,IPADDR,127.0.0.1,PORT,1812,PASSWORD,radius")
sigma_dut_cmd_check("ap_set_security,NAME,AP,KEYMGNT,WPA2-ENT-FT-EAP")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
dev[0].connect("test-ent-ft-eap", key_mgmt="FT-EAP", eap="GPSK",
identity="gpsk user",
password="abcdefghijklmnop0123456789abcdef",
scan_freq="2412")
dev[1].connect("test-ent-ft-eap", key_mgmt="WPA-EAP", eap="GPSK",
identity="gpsk user",
password="abcdefghijklmnop0123456789abcdef",
scan_freq="2412")
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
def test_sigma_dut_venue_url(dev, apdev):
"""sigma_dut controlled Venue URL fetch"""
try:
run_sigma_dut_venue_url(dev, apdev)
finally:
dev[0].set("ignore_old_scan_res", "0")
def run_sigma_dut_venue_url(dev, apdev):
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname, debug=True)
ssid = "venue"
params = hostapd.wpa2_params(ssid=ssid, passphrase="12345678")
params["wpa_key_mgmt"] = "WPA-PSK-SHA256"
params["ieee80211w"] = "2"
venue_group = 1
venue_type = 13
venue_info = struct.pack('BB', venue_group, venue_type)
lang1 = "eng"
name1 = "Example venue"
lang2 = "fin"
name2 = "Esimerkkipaikka"
venue1 = struct.pack('B', len(lang1 + name1)) + lang1.encode() + name1.encode()
venue2 = struct.pack('B', len(lang2 + name2)) + lang2.encode() + name2.encode()
venue_name = binascii.hexlify(venue_info + venue1 + venue2)
url1 = "http://example.com/venue"
url2 = "https://example.org/venue-info/"
params["venue_group"] = str(venue_group)
params["venue_type"] = str(venue_type)
params["venue_name"] = [ lang1 + ":" + name1, lang2 + ":" + name2 ]
params["venue_url"] = [ "1:" + url1, "2:" + url2 ]
hapd = hostapd.add_ap(apdev[0], params)
sigma_dut_cmd_check("sta_reset_default,interface,%s,prog,PMF" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_set_psk,interface,%s,ssid,%s,passphrase,%s,encpType,aes-ccmp,keymgmttype,wpa2,PMF,Required" % (ifname, "venue", "12345678"))
sigma_dut_cmd_check("sta_associate,interface,%s,ssid,%s,channel,1" % (ifname, "venue"))
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
sigma_dut_cmd_check("sta_hs2_venue_info,interface," + ifname + ",Display,Yes")
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
def test_sigma_dut_hs20_assoc_24(dev, apdev):
"""sigma_dut controlled Hotspot 2.0 connection (2.4 GHz)"""
run_sigma_dut_hs20_assoc(dev, apdev, True)
def test_sigma_dut_hs20_assoc_5(dev, apdev):
"""sigma_dut controlled Hotspot 2.0 connection (5 GHz)"""
run_sigma_dut_hs20_assoc(dev, apdev, False)
def run_sigma_dut_hs20_assoc(dev, apdev, band24):
hapd0 = None
hapd1 = None
try:
bssid0 = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid0
hapd0 = hostapd.add_ap(apdev[0], params)
bssid1 = apdev[1]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid0
params["hw_mode"] = "a"
params["channel"] = "36"
params["country_code"] = "US"
hapd1 = hostapd.add_ap(apdev[1], params)
band = "2.4" if band24 else "5"
exp_bssid = bssid0 if band24 else bssid1
run_sigma_dut_hs20_assoc_2(dev, apdev, band, exp_bssid)
finally:
dev[0].request("DISCONNECT")
if hapd0:
hapd0.request("DISABLE")
if hapd1:
hapd1.request("DISABLE")
subprocess.call(['iw', 'reg', 'set', '00'])
dev[0].flush_scan_cache()
def run_sigma_dut_hs20_assoc_2(dev, apdev, band, expect_bssid):
check_eap_capa(dev[0], "MSCHAPV2")
dev[0].flush_scan_cache()
ifname = dev[0].ifname
sigma = start_sigma_dut(ifname, debug=True)
sigma_dut_cmd_check("sta_reset_default,interface,%s,prog,HS2-R3" % ifname)
sigma_dut_cmd_check("sta_set_ip_config,interface,%s,dhcp,0,ip,127.0.0.11,mask,255.255.255.0" % ifname)
sigma_dut_cmd_check("sta_add_credential,interface,%s,type,uname_pwd,realm,example.com,username,hs20-test,password,password" % ifname)
res = sigma_dut_cmd_check("sta_hs2_associate,interface,%s,band,%s" % (ifname, band),
timeout=15)
sigma_dut_wait_connected(ifname)
sigma_dut_cmd_check("sta_get_ip_config,interface," + ifname)
sigma_dut_cmd_check("sta_disconnect,interface," + ifname)
sigma_dut_cmd_check("sta_reset_default,interface," + ifname)
stop_sigma_dut(sigma)
if "BSSID," + expect_bssid not in res:
raise Exception("Unexpected BSSID: " + res)
def test_sigma_dut_ap_hs20(dev, apdev, params):
"""sigma_dut controlled AP with Hotspot 2.0 parameters"""
logdir = os.path.join(params['logdir'],
"sigma_dut_ap_hs20.sigma-hostapd")
conffile = os.path.join(params['logdir'],
"sigma_dut_ap_hs20.sigma-conf")
with HWSimRadio() as (radio, iface):
sigma = start_sigma_dut(iface, hostapd_logdir=logdir, debug=True)
try:
sigma_dut_cmd_check("ap_reset_default,NAME,AP,program,HS2-R3")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,WLAN_TAG,1,CHANNEL,1,SSID,test-hs20,MODE,11ng")
sigma_dut_cmd_check("ap_set_radius,NAME,AP,WLAN_TAG,1,IPADDR,127.0.0.1,PORT,1812,PASSWORD,radius")
sigma_dut_cmd_check("ap_set_security,NAME,AP,WLAN_TAG,1,KEYMGNT,WPA2-ENT")
sigma_dut_cmd_check("ap_set_hs2,NAME,AP,WLAN_TAG,1,HESSID,02:12:34:56:78:9a,NAI_REALM_LIST,1,OPER_NAME,1")
sigma_dut_cmd_check("ap_set_hs2,NAME,AP,WLAN_TAG,1,OSU_SERVER_URI,https://example.com/ https://example.org/,OSU_SSID,test-osu,OSU_METHOD,SOAP SOAP,OSU_PROVIDER_LIST,10,OSU_PROVIDER_NAI_LIST,4")
sigma_dut_cmd_check("ap_set_hs2,NAME,AP,WLAN_TAG,1,NET_AUTH_TYPE,2")
sigma_dut_cmd_check("ap_set_hs2,NAME,AP,WLAN_TAG,1,VENUE_NAME,1")
sigma_dut_cmd_check("ap_set_hs2,NAME,AP,WLAN_TAG,1,DOMAIN_LIST,example.com")
sigma_dut_cmd_check("ap_set_hs2,NAME,AP,WLAN_TAG,1,OPERATOR_ICON_METADATA,1")
sigma_dut_cmd_check("ap_set_wireless,NAME,AP,WLAN_TAG,2,CHANNEL,1,SSID,test-osu,MODE,11ng")
sigma_dut_cmd_check("ap_set_security,NAME,AP,WLAN_TAG,2,KEYMGNT,NONE")
sigma_dut_cmd_check("ap_set_hs2,NAME,AP,WLAN_TAG,2,OSU,1")
sigma_dut_cmd_check("ap_config_commit,NAME,AP")
with open("/tmp/sigma_dut-ap.conf", "rb") as f:
with open(conffile, "wb") as f2:
f2.write(f.read())
sigma_dut_cmd_check("ap_reset_default")
finally:
stop_sigma_dut(sigma)
| 46.831949 | 466 | 0.665723 |
acf3ea4c0730abcc29808dbf0463976cce1df14a | 1,426 | py | Python | test/test_somatic_variant_with_0_supporting_rna_reads.py | openvax/isovar | 98a8bb33df5664973ff6ded5944a3024258217bd | [
"Apache-2.0"
] | 12 | 2018-05-19T23:43:32.000Z | 2021-11-16T04:10:17.000Z | test/test_somatic_variant_with_0_supporting_rna_reads.py | hammerlab/isovar | 98a8bb33df5664973ff6ded5944a3024258217bd | [
"Apache-2.0"
] | 86 | 2016-03-30T05:06:39.000Z | 2017-11-27T21:40:58.000Z | test/test_somatic_variant_with_0_supporting_rna_reads.py | openvax/isovar | 98a8bb33df5664973ff6ded5944a3024258217bd | [
"Apache-2.0"
] | 9 | 2018-06-07T09:57:33.000Z | 2021-06-01T03:42:05.000Z | from __future__ import print_function, division, absolute_import
from varcode import Variant
from testing_helpers import load_bam
from nose.tools import eq_
from isovar.read_collector import ReadCollector
from genomes_for_testing import grch38
def test_somatic_variant_with_0_supporting_rna_reads():
variant = Variant("6", 90411765, "G", "A", grch38)
base_dir = "data/somatic-variant-with-0-supporting-rna-reads/"
normal_reads = load_bam(base_dir + "normal.6.90411765.G.A.sorted.bam")
tumor_reads = load_bam(base_dir + "tumor.6.90411765.G.A.sorted.bam")
rna_reads = load_bam(base_dir + "rna.6.90411765.G.A.sorted.bam")
read_creator = ReadCollector()
normal_sample_variant_reads = read_creator.allele_reads_supporting_variant(
variant=variant,
alignment_file=normal_reads)
eq_(len(normal_sample_variant_reads), 0)
print(normal_sample_variant_reads)
tumor_sample_variant_reads = read_creator.allele_reads_supporting_variant(
variant=variant,
alignment_file=tumor_reads)
print(tumor_sample_variant_reads)
eq_(len(tumor_sample_variant_reads), 5)
rna_sample_variant_reads = read_creator.allele_reads_supporting_variant(
variant=variant,
alignment_file=rna_reads)
print(rna_sample_variant_reads)
eq_(len(rna_sample_variant_reads), 0)
if __name__ == "__main__":
test_somatic_variant_with_0_supporting_rna_reads()
| 36.564103 | 79 | 0.772791 |
acf3eab66c83a311629e71f4ce18f499ad1ba3d4 | 7,117 | py | Python | render_osm_data.py | njanakiev/openstreetmap-heatmap | 986a0a6a2db1447fc4d278c3f551a1d4b291f629 | [
"Apache-2.0"
] | 309 | 2018-02-24T18:55:58.000Z | 2022-03-30T21:03:17.000Z | render_osm_data.py | aehrath-gretel/openstreetmap-heatmap | 33ec4de2f372ad5417900c6b2b19839474934e55 | [
"Apache-2.0"
] | 5 | 2018-07-10T09:40:37.000Z | 2021-09-28T13:36:05.000Z | render_osm_data.py | njanakiev/openstreetmap-heatmap | 986a0a6a2db1447fc4d278c3f551a1d4b291f629 | [
"Apache-2.0"
] | 53 | 2018-05-30T15:52:17.000Z | 2021-09-28T22:00:46.000Z | import bpy
import bmesh
import os
import sys
import numpy as np
from pyproj import Proj
from mathutils import Matrix, Vector
from matplotlib import cm
from bpy.app.handlers import persistent
import utils
import utils_osm
from math import sin, cos, pi
TAU = 2*pi
def normalize_points(points):
"""Normalize points while preserving aspect ratio"""
data = np.array(points)
minX, minY = np.min(data, axis=0)
maxX, maxY = np.max(data, axis=0)
rangeX, rangeY = maxX - minX, maxY - minY
if rangeX > rangeY:
data[:, 0] = (data[:, 0] - minX - 0.5*rangeX) / rangeX + 0.5
data[:, 1] = (data[:, 1] - minY - 0.5*rangeY) / rangeX + 0.5
else:
data[:, 0] = (data[:, 0] - minX - 0.5*rangeX) / rangeY + 0.5
data[:, 1] = (data[:, 1] - minY - 0.5*rangeY) / rangeY + 0.5
return data
def heatmap_grid(data, sigma_sq=0.0001, n=20, m=2):
"""Create n by n grid with heatmap from data with gaussian distribution of input data set"""
X = np.ndarray((n, n), dtype=object)
for idx in np.arange(len(points)):
x, y = data[idx]
i, j = int(x * (n - 1)), int(y * (n - 1))
if X[i, j] is None:
X[i, j] = [(x, y)]
else:
X[i, j].append((x, y))
grid = np.zeros((n, n))
for i0 in range(n):
for j0 in range(n):
x0, y0 = i0 / (n - 1), j0 / (n - 1)
# Sum all available neighboring elements
for i in range(max(0, i0 - m), min(i0 + m, n)):
for j in range(max(0, j0 - m), min(j0 + m, n)):
if X[i, j] is not None:
for x, y in X[i, j]:
grid[i0][j0] += np.exp(- ((x0 - x)**2)/
(2*sigma_sq) - ((y0 - y)**2)/(2*sigma_sq))
return grid
def heatmap_barplot(grid, h=4, width=10, bar_scale=0.9, num_colors=10, colormap=cm.summer, bevel_width=0.015, logarithmic=False):
"""Create 3D barplot from heatmap grid"""
# Logarithmic scale
if logarithmic:
grid = np.log(grid + 1)
# Find maximum value
z_max = np.max(grid)
n, m = grid.shape
bar_width = bar_scale * width / max(n, m)
# List of bmesh elements for each color group
bmList = [bmesh.new() for i in range(num_colors)]
# Iterate over grid
for i in range(n):
for j in range(m):
x, y, z = i / (n - 1), j / (m - 1), grid[i][j]
if z > 0.001:
bar_height = ((h - bar_width) * z / z_max) + bar_width
t = 1 - np.exp(-(z / z_max)/0.2)
k = min(int(num_colors*t), num_colors - 1)
bm = bmList[k]
T = Matrix.Translation(Vector((
width*(x - 0.5),
width*(y - 0.5),
bar_height / 2)))
S = Matrix.Scale(bar_height / bar_width, 4, Vector((0, 0, 1)))
if bpy.app.version < (2, 80, 0):
bmesh.ops.create_cube(bm, size=bar_width, matrix=T*S)
else:
bmesh.ops.create_cube(bm, size=bar_width, matrix=T@S)
objList = []
for i, bm in enumerate(bmList):
# Create object
obj = utils.bmesh_to_object(bm)
# Create material with colormap
color = colormap(i / num_colors)
mat = utils.simple_material(color[:3])
obj.data.materials.append(mat)
objList.append(obj)
# Add bevel modifier
bevel = obj.modifiers.new('Bevel', 'BEVEL')
bevel.width = bevel_width
if __name__ == '__main__':
# Settings
iso_a2, tag_key, tag_value = 'GB', 'amenity', 'pub'
#res_x, res_y = 768, 432
#res_x, res_y = 600, 600
#res_x, res_y = 640, 480
#res_x, res_y = 640, 360
res_x, res_y = 1280, 720
animation = False
#r, camera_z = 10, 7
r, camera_z = 12, 10
num_frames = 40
#camera_position, target_position = (3, -10, 8), (0.3, -1.8, 0.5) # DE
#camera_position, target_position = (3, -10, 8), (0.3, 0.0, 0.5) # AT
#camera_position, target_position = (3, -10, 8), (-0.1, -0.4, 1.0) # CH
camera_position, target_position = (-2, -10, 8), (0.0, -2.6, 1.0) # GB
#camera_type, ortho_scale = 'ORTHO', 15
camera_type, ortho_scale = 'PERSP', 18
render_idx = 0
# Remove all elements in scene
if bpy.app.version < (2, 80, 0):
bpy.ops.object.select_by_layer()
else:
bpy.ops.object.select_all(action="SELECT")
bpy.ops.object.delete(use_global=False)
# Create scene
target = utils.create_target(target_position)
camera = utils.create_camera(camera_position, target=target,
camera_type=camera_type, ortho_scale=ortho_scale, lens=28)
#type='ORTHO', ortho_scale=12)
sun = utils.create_lamp((-5, 5, 10), 'SUN', target=target)
# Set background color
if bpy.app.version < (2, 80, 0):
bpy.context.scene.world.horizon_color = (0.7, 0.7, 0.7)
else:
bpy.context.scene.world.color = (0.7, 0.7, 0.7)
# Ambient occlusion
bpy.context.scene.world.light_settings.use_ambient_occlusion = True
if bpy.app.version < (2, 80, 0):
bpy.context.scene.world.light_settings.samples = 8
# Load points from existing geojson file or load them with Overpass API
filepath = 'data/points_{}_{}_{}.json'.format(iso_a2, tag_key, tag_value)
if os.path.exists(filepath):
points, names = utils_osm.load_points(filepath)
else:
points, names = utils_osm.overpass_load_points(
iso_a2, tag_key, tag_value)
if not os.path.exists('data'): os.mkdir('data')
utils_osm.save_points(filepath, points, names)
print("Number of points : {}".format(len(points)))
# Project points into Mercator projection
p = Proj(init="epsg:3785") # Popular Visualisation CRS / Mercator
points = np.apply_along_axis(lambda x : p(*x), 1, points)
# Create heatmap barplot
data = normalize_points(points)
#hist = heatmap_grid(data, sigma_sq=0.00005, n=80)
hist = heatmap_grid(data, sigma_sq=0.00002, n=100)
#heatmap_barplot(hist, colormap=cm.Wistia)
heatmap_barplot(hist, colormap=cm.viridis)
#heatmap_barplot(hist, colormap=cm.YlGn_r)
#heatmap_barplot(hist, colormap=cm.summer_r)
# Animate rotation of camera
if animation:
for frame in range(1, num_frames):
t = frame / num_frames
x, y = r*cos(TAU*t), r*sin(TAU*t)
camera.location = (x, y, camera_z)
camera.keyframe_insert(data_path="location", index=-1, frame=frame)
render_folder = '{}_{}_{}_{}_{}_{}_{:0>3}'.format(
iso_a2, tag_key, tag_value, camera_type, res_x, res_y, render_idx)
render_name = 'render'
else:
render_folder = 'render'
render_name = '{}_{}_{}_{}_{}_{}_{:0>3}'.format(
iso_a2, tag_key, tag_value, camera_type, res_x, res_y, render_idx)
# Render result
utils.render_to_folder(render_folder, render_name, res_x=res_x, res_y=res_y, animation=animation, frame_end=num_frames, render_opengl=False)
| 34.052632 | 144 | 0.577491 |
acf3ead18d3a00f7ef6bf5548a30c475cd53499b | 7,731 | py | Python | tepsimulation/packages/graph/graphing/base.py | hsaafan/TEPSimulation | 86743fab499d8ffb690df63308e2a835fa59c6fd | [
"MIT"
] | null | null | null | tepsimulation/packages/graph/graphing/base.py | hsaafan/TEPSimulation | 86743fab499d8ffb690df63308e2a835fa59c6fd | [
"MIT"
] | null | null | null | tepsimulation/packages/graph/graphing/base.py | hsaafan/TEPSimulation | 86743fab499d8ffb690df63308e2a835fa59c6fd | [
"MIT"
] | null | null | null | """ Base classes for flowsheet graph
The flowsheet is implemented as a graph where the streams are the edges
and the unit operations are the nodes
Classes
-------
FlowSheetObject
Base class for streams and unit operations
Stream
Generic Stream class, acts as 'edges' of flowsheet 'graph'
UnitOperation
Generic Unit Operation class, acts as 'nodes' of the flowsheet 'graph'
Inlet
A special unit operation for adding materials and energy to the flowsheet
Outlet
A special unit operation for removing materials and energy from the
flowsheet
"""
import pint
import warnings
from ... import utils
class FlowSheetObject:
""" Generic flowsheet object class
This class is used as a base for all flowsheet objects and handles common
tasks.
Attributes
----------
id: str
User set label for identifying object
"""
def __init__(self, id: str) -> None:
self.id = id
self._temperature = None
self._pressure = None
def temperature() -> dict:
doc = """Current temperature"""
def fget(self) -> pint.Quantity:
return(self._temperature)
def fset(self, value: pint.Quantity) -> None:
if utils.pint_check(value, '[temperature]'):
self._temperature = value
return({'fget': fget, 'fset': fset, 'doc': doc})
temperature = property(**temperature())
def pressure() -> dict:
doc = """Current pressure"""
def fget(self) -> pint.Quantity:
return(self._pressure)
def fset(self, value: pint.Quantity) -> None:
if utils.pint_check(value, '[pressure]'):
self._pressure = value
return({'fget': fget, 'fset': fset, 'doc': doc})
pressure = property(**pressure())
class Stream(FlowSheetObject):
""" Stream class
The streams are though of as the edges of a graph. These objects contain
information about how to direct flow of material and energy. As edges, they
only have two connections, an input and an output. For mixing and splitting
operations, see the transport module classes. Inputs to the flowsheet and
outputs out of the flowsheet are treated as nodes and the Inlet or Outlet
class should be used instead.
Attributes
----------
source: UnitOperation
The inlet of the stream
sink: UnitOperation
The outlet of the stream
"""
def __init__(self, id: str) -> None:
super().__init__(id)
def __str__(self) -> str:
try:
str_repr = f"{self.id}: {self._source.id} ----> {self._sink.id}"
except AttributeError:
str_repr = f"{self.id}: Broken connection"
return(str_repr)
def pressure() -> dict:
doc = """Pressure difference of source and sink"""
def fget(self) -> pint.Quantity:
pressure_diff = self.sink.pressure - self.source.pressure
return(pressure_diff)
def fset(self, value) -> None:
raise AttributeError(f"Attempted to set pressure of stream "
f"{self.id}")
return({'fget': fget, 'fset': fset, 'doc': doc})
pressure = property(**pressure())
def source() -> dict:
doc = """Where the stream starts"""
def fget(self) -> FlowSheetObject:
return(self._source)
def fset(self, node: FlowSheetObject) -> None:
self._source = node
return({'fget': fget, 'fset': fset, 'doc': doc})
source = property(**source())
def sink() -> dict:
doc = """Where the stream ends"""
def fget(self) -> FlowSheetObject:
return(self._sink)
def fset(self, node: FlowSheetObject) -> None:
self._sink = node
return({'fget': fget, 'fset': fset, 'doc': doc})
sink = property(**sink())
class UnitOperation(FlowSheetObject):
""" Generic Unit Operation class
Acts as a template for other unit operation classes but should not be used
directly in the flowsheet.
Attributes
-----------
outlets: dict
The outlets of the unit operation
inlets: dict
The inlets of the unit operation
Methods
--------
add_inlet
Adds a Stream object to the unit operation inlets dictionary
add_outlet
Adds a Stream object to the unit operation outlets dictionary
step
Moves the unit operation forward by a timestep; this should not be
overriden in child classes, override step_preprocess, step_events, or
step_postprocess instead
step_preprocess
Error checking that happens before a time step occurs
step_events
All the events that constitute a time step. For example, a reactor
would include reactions, temperature changes, pressure changes, etc.
here. This should be overriden by any child classes, as it will throw
an error otherwise.
step_postprocess
Cleanup operations after a step has occurred, these could be included
in step_events instead but this is provided as a way of keeping the
code clean
"""
def __init__(self, id: str) -> None:
super().__init__(id)
self.outlets = dict()
self.inlets = dict()
self._temperature = None
self._pressure = None
def __str__(self) -> str:
incoming = []
outgoing = []
for stream in self.inlets:
incoming.append(stream.source.id)
for stream in self.outlets:
outgoing.append(stream.sink.id)
return(f'{incoming} ---> {self.id} ---> {outgoing}')
def add_inlet(self, stream: Stream, inlet_id: str = "inlet") -> None:
if inlet_id in self.inlets.keys():
warnings.warn(f"{stream.id} is overwriting an inlet of {self.id}")
self.inlets[inlet_id] = stream
stream.sink = self
def add_outlet(self, stream: Stream, outlet_id: str = "outlet") -> None:
if outlet_id in self.inlets.keys():
warnings.warn(f"{stream.id} is overwriting an outlet of {self.id}")
self.outlets[outlet_id] = stream
stream.source = self
def step_preprocess(self, time_step: pint.Quantity) -> None:
utils.pint_check(time_step, '[time]')
def step_events(self, time_step: pint.Quantity) -> None:
raise NotImplementedError
def step_postprocess(self) -> None:
pass
def step(self, time_step: pint.Quantity) -> None:
self.step_preprocess(time_step)
self.step_events(time_step)
self.step_postprocess()
class Inlet(UnitOperation):
""" Flowsheet inlets class
Any input streams to the flowsheet (e.g. feed streams) should be here
Methods
-------
add_inlet
Overrides parent method and raises an error
"""
def __str__(self) -> str:
outgoing = []
for stream in self.outlets:
outgoing.append(stream.sink.id)
return(f'{self.id} ---> {outgoing}')
def add_inlet(self, stream: Stream, inlet_id: str = "") -> None:
raise RuntimeError("Cannot add inlet streams to flowsheet inlets")
class Outlet(UnitOperation):
""" Flowsheet outlets class
Any outlet streams from the flowsheet (e.g. product streams) should be here
Methods
-------
add_outlet
Overrides parent method and raises an error
"""
def __str__(self) -> str:
incoming = []
for stream in self.inlets:
incoming.append(stream.source.id)
return(f'{incoming} ---> {self.id}')
def add_outlet(self, stream: Stream, outlet_id: str = "") -> None:
raise RuntimeError("Cannot add outlet streams to flowsheet outlets")
| 29.965116 | 79 | 0.622817 |
acf3ed24de0169c9eeaf1d9c724d92b1925e62f6 | 458 | py | Python | tower_of_hanoi/tower_of_hanoi.py | Isonzo/Practice-Projects | 02a82c3c7cf3cee1f363f800d5ea3c725c91ac86 | [
"MIT"
] | null | null | null | tower_of_hanoi/tower_of_hanoi.py | Isonzo/Practice-Projects | 02a82c3c7cf3cee1f363f800d5ea3c725c91ac86 | [
"MIT"
] | null | null | null | tower_of_hanoi/tower_of_hanoi.py | Isonzo/Practice-Projects | 02a82c3c7cf3cee1f363f800d5ea3c725c91ac86 | [
"MIT"
] | null | null | null |
def pm(start, end):
print(start, "-->", end)
def hanoi(n, start, end):
if n == 1:
pm(start, end)
else:
other = 6 - (start + end)
hanoi(n - 1, start, other)
pm(start, end)
hanoi(n - 1, other, end)
rings = int(input("Number of rings: "))
start_rod = int(input("Starting rod (1 - 3)"))
end_rod = int(input("Destination rod (1 - 3)"))
hanoi(rings, start_rod, end_rod)
input("\nPress anything to exit ")
| 19.913043 | 47 | 0.556769 |
acf3ed5c1a22573494b6035f90d20b1238b69d64 | 13,320 | py | Python | profilerender.py | JimKnowler/profile-visualiser | 2398b17c68ea748fc82e7cc15e43ccbfb64f8e2c | [
"MIT"
] | 3 | 2018-06-19T16:23:35.000Z | 2021-07-15T05:35:21.000Z | profilerender.py | JimKnowler/profile-visualiser | 2398b17c68ea748fc82e7cc15e43ccbfb64f8e2c | [
"MIT"
] | null | null | null | profilerender.py | JimKnowler/profile-visualiser | 2398b17c68ea748fc82e7cc15e43ccbfb64f8e2c | [
"MIT"
] | null | null | null | import cairo
import colorsys
TITLE_HEIGHT = 25
EVENT_LABEL_HEIGHT = 20
SAMPLE_HEIGHT = 40
COLOUR_BLACK = (0.1,0.1,0.1)
COLOUR_WHITE = (1,1,1)
LABEL_X_OFFSET = 4
LABEL_OFFSET_Y = 4
TEXT_SIZE_LABEL = 13
TEXT_SIZE_TITLE = 17
TEXT_SIZE_DURATION = 10
TEXT_LABEL_DURATION_OFFSET_Y = 20
TEXT_SIZE_EVENT_LABEL = 10
EVENT_LABEL_OFFSET_X = 1
EVENT_LABEL_OFFSET_Y = 1
COUNTER_ROW_HEIGHT = 100
class RenderContext:
def __init__(self, cr, width, height, start_time, finish_time, offset_x, offset_y):
self.cr = cr
self.width = float(width)
self.height = float(height)
self.start_time = start_time
self.finish_time = finish_time
self.offset_x = offset_x
self.offset_y = offset_y
self._duration = max(0.001, float(finish_time - start_time))
def get_x_for_time(self, time):
if time <= self.start_time:
return 0
elif time >= self.finish_time:
return self.width
else:
return (time-self.start_time) * self.width / self._duration
def is_sample_visible(self, sample):
return not((sample.get_finish_time() < self.start_time) or (sample.get_start_time() > self.finish_time))
def is_sample_off_right_of_screen(self, sample):
return sample.get_start_time() > self.finish_time
def is_event_visible(self, event_sample):
time = event_sample.get_time()
return (time > self.start_time) and (time < self.finish_time)
def is_event_off_right_of_screen(self, event_sample):
time = event_sample.get_time()
return (time > self.finish_time)
def render_text(cr, label, font_size, x, y, width = None):
# render label using x,y as top-left co-ords
cr.select_font_face("Arial", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL)
cr.set_font_size(font_size)
(label_x, label_y, label_width, label_height, label_dx, label_dy) = cr.text_extents(label)
if width and (label_width > (width-LABEL_X_OFFSET)):
return
cr.move_to(x + LABEL_X_OFFSET,y + LABEL_OFFSET_Y + label_height)
cr.show_text(label)
return (label_width, label_height)
def render_sample(render_context, sample, y):
if not render_context.is_sample_visible(sample):
return not render_context.is_sample_off_right_of_screen(sample)
cr = render_context.cr
start_time = sample.get_start_time()
finish_time = sample.get_finish_time()
start_x = render_context.get_x_for_time(start_time)
finish_x = render_context.get_x_for_time(finish_time)
width = finish_x - start_x
# make sure we always render at least something for a sample
width = max(width,0.5)
if width < 4:
# filled rectangle for this sample + all its' children
call_stack_depth = sample.get_child_call_stack_depth() + 1
cr.set_source_rgb(*render_context.sample_colour)
cr.rectangle(start_x,y, width, SAMPLE_HEIGHT * call_stack_depth)
cr.fill()
else:
# filled rectangle
cr.set_source_rgb(*render_context.sample_colour)
cr.rectangle(start_x,y, width, SAMPLE_HEIGHT)
cr.fill()
# black outline
cr.set_source_rgb(*COLOUR_BLACK)
cr.rectangle(start_x,y, width, SAMPLE_HEIGHT)
cr.stroke()
if width > 10:
# function name
label = sample.get_function().get_label()
render_text(cr, label, TEXT_SIZE_LABEL, start_x, y, width)
duration = sample.get_duration()
duration_label = '%.3fms' % ( sample.get_duration() / 1000.0 )
render_text(cr, duration_label, TEXT_SIZE_DURATION, start_x, y + TEXT_LABEL_DURATION_OFFSET_Y, width)
# recursive calls
children = sample.get_children()
for child in children:
if not render_sample( render_context, child, y+SAMPLE_HEIGHT):
return False
return True
def render_event(render_context, event_sample, y, height):
if not render_context.is_event_visible(event_sample):
return not render_context.is_event_off_right_of_screen(event_sample)
cr = render_context.cr
time = event_sample.get_time()
x = render_context.get_x_for_time(time)
cr.set_source_rgb(*COLOUR_WHITE)
cr.move_to(x,y)
cr.line_to(x,y+height)
cr.set_line_width(3)
cr.stroke()
cr.set_source_rgb(*COLOUR_BLACK)
cr.move_to(x,y)
cr.line_to(x,y+height)
cr.set_line_width(2)
cr.stroke()
event = event_sample.get_event()
label = event.get_label()
(label_width, label_height) = render_text(cr, label, TEXT_SIZE_EVENT_LABEL, x + EVENT_LABEL_OFFSET_X, y + EVENT_LABEL_OFFSET_Y)
cr.move_to(x,y)
cr.line_to(x + EVENT_LABEL_OFFSET_X + label_width, y)
cr.stroke()
class ProfileRenderCounter:
def __init__(self, counter_data, colour, background_colour):
self._counter_data = counter_data
self._colour = colour
self._background_colour = background_colour
self._height = TITLE_HEIGHT + COUNTER_ROW_HEIGHT
def render(self, render_context):
cr = render_context.cr
counter_data = self._counter_data
# render background colour
cr.set_source_rgb(*self._background_colour)
cr.rectangle(0, 0, render_context.width, self._height)
cr.fill()
# render title
title = "Counter: " + counter_data.get_label()
cr.set_source_rgb(*COLOUR_BLACK)
render_text(cr, title, TEXT_SIZE_TITLE, 0, 0)
# render values
samples = counter_data.get_samples()
if len(samples) > 0:
cr.set_source_rgb(*self._colour)
max_value = counter_data.get_max_value()
min_value = counter_data.get_min_value()
y_scale = float(COUNTER_ROW_HEIGHT) / (max_value-min_value)
min_value_height = -min_value * y_scale
cr.translate(0, TITLE_HEIGHT)
def render_sample(cr, last_x, x, value):
value_height = float(value) * y_scale
if value >= 0:
cr.rectangle(last_x, COUNTER_ROW_HEIGHT - min_value_height - value_height, 1+x-last_x, value_height)
else:
value_height = -value_height
cr.rectangle(last_x, COUNTER_ROW_HEIGHT - min_value_height, 1+x-last_x, value_height)
cr.fill()
last_sample = samples[0]
for sample in samples:
last_x = render_context.get_x_for_time(last_sample.get_time())
x = render_context.get_x_for_time(sample.get_time())
value = last_sample.get_value()
render_sample(cr, last_x, x, value)
last_sample = sample
end_x = render_context.get_x_for_time(render_context.finish_time)
# render the last sample
last_x = render_context.get_x_for_time(last_sample.get_time())
value = last_sample.get_value()
render_sample(cr, last_x, end_x, value)
# render the x-axis
line_y = COUNTER_ROW_HEIGHT - min_value_height
cr.set_line_width(1)
cr.move_to(0, line_y)
cr.line_to(end_x, line_y)
cr.stroke()
def get_height(self):
return self._height
class ProfileRenderThread:
def __init__(self, thread_data, colour, background_colour):
self._thread_data = thread_data
self._colour = colour
self._background_colour = background_colour
self._height = TITLE_HEIGHT + EVENT_LABEL_HEIGHT + (self._thread_data.get_max_stack_depth() * SAMPLE_HEIGHT)
def render(self, render_context):
cr = render_context.cr
# render background colour
cr.set_source_rgb(*self._background_colour)
cr.rectangle(0, 0, render_context.width, self._height)
cr.fill()
# render title
title = "Thread: " + self._thread_data.get_label()
cr.set_source_rgb(*COLOUR_BLACK)
render_text(cr, title, TEXT_SIZE_TITLE, 0, 0)
# render samples
render_context.sample_colour = self._colour
samples = self._thread_data.get_samples()
for sample in samples:
if not render_sample(render_context, sample, TITLE_HEIGHT + EVENT_LABEL_HEIGHT):
break
# render events
event_samples = self._thread_data.get_event_samples()
event_height = self.get_height()
for event_sample in event_samples:
if not render_event(render_context, event_sample, TITLE_HEIGHT, event_height):
break
def get_height(self):
""" return the height of this thread on screen, in pixels """
return self._height
class ProfileRenderObjects:
def __init__(self, profile_data):
self._counters = []
self._threads = []
num_counters = profile_data.get_num_counters()
num_threads = profile_data.get_num_threads()
num_rows = num_counters + num_threads
row_index_mutable = [0]
def get_row_colours():
row_index = row_index_mutable[0]
background_colour = (1.0,1.0,1.0) if (row_index % 2) else (243.0/255.0,245.0/255.0,220.0/255.0)
colour = colorsys.hls_to_rgb(float(row_index+1) / float(num_rows), 0.5, 0.5)
row_index_mutable[0] += 1
return (background_colour, colour)
for i in range(num_counters):
counter_data = profile_data.get_counter(i)
(background_colour, colour) = get_row_colours()
render_counter = ProfileRenderCounter(counter_data, colour, background_colour)
self._counters.append( render_counter )
for i in range(num_threads):
thread_data = profile_data.get_thread(i)
(background_colour, colour) = get_row_colours()
render_thread = ProfileRenderThread(thread_data, colour, background_colour)
self._threads.append( render_thread )
self._render_height = self._calculate_render_height()
def _render_background(self, render_context):
# Fill the background with white
cr = render_context.cr
cr.set_source_rgb(1.0, 1.0, 1.0)
cr.rectangle(0, 0, render_context.width, render_context.height)
cr.fill()
def render(self, render_context):
cr = render_context.cr
self._render_background(render_context)
self._render_counters(render_context)
self._render_threads(render_context)
def _render_counters(self, render_context):
cr = render_context.cr
offset_x = render_context.offset_x
offset_y = render_context.offset_y
for render_counter in self._counters:
if offset_y > render_context.height:
break
if (offset_y + render_counter.get_height()) > 0:
cr.save()
cr.translate(offset_x,offset_y)
render_counter.render(render_context)
cr.restore()
offset_y += render_counter.get_height()
render_context.offset_x = offset_x
render_context.offset_y = offset_y
def _render_threads(self, render_context):
cr = render_context.cr
offset_x = render_context.offset_x
offset_y = render_context.offset_y
for render_thread in self._threads:
if offset_y > render_context.height:
break
if (offset_y + render_thread.get_height()) > 0:
cr.save()
cr.translate(offset_x,offset_y)
render_thread.render(render_context)
cr.restore()
offset_y += render_thread.get_height()
render_context.offset_x = offset_x
render_context.offset_y = offset_y
def _calculate_render_height(self):
# get the combined height of all the render counters & threads
render_height = 0
for counter in self._counters:
render_height += counter.get_height()
for thread in self._threads:
render_height += thread.get_height()
return render_height
def get_render_height(self):
return self._render_height
class ProfileRender:
""" Render the data for a profiling session """
def __init__(self, profile_data):
self._width = 0.0
self._height = 0.0
self._profile_data = profile_data
self._profile_data_objects = ProfileRenderObjects(profile_data)
self._offset_y = 0
# initialise times at the left + right edges of the window
self._start_time = profile_data.get_start_time()
self._finish_time = profile_data.get_finish_time()
def render(self, cr):
offset_y = self._offset_y
offset_x = 0
render_context = RenderContext( cr, self._width, self._height, self._start_time, self._finish_time, offset_x, offset_y)
self._profile_data_objects.render(render_context )
def render_pointer(self, cr, pointer):
(x,y) = pointer
t = self._get_time_at_x(x)
cr.set_source_rgb(0.0, 0.0, 0.0)
cr.move_to(x,0)
cr.line_to(x, self._height)
cr.stroke()
def resize(self, width, height):
self._width = float(width)
self._height = float(height)
self._validate_viewport()
def pan_by(self, dx, dy):
dt = self._get_dt_for_dx( dx )
if dt > 0:
dt = min(dt, self._start_time - self._profile_data.get_start_time())
else:
dt = max(dt, self._finish_time - self._profile_data.get_finish_time())
self._start_time -= dt
self._finish_time -= dt
self._offset_y += dy
self._validate_viewport()
def scale_at(self, scale_factor, x, y):
x = float(x)
x_time = self._get_time_at_x(x)
self._start_time = x_time - ((x_time - self._start_time) / scale_factor)
self._finish_time = x_time + ((self._finish_time - x_time) / scale_factor)
self._validate_viewport()
def _get_time_at_x(self, x):
if x <= 0:
return self._start_time
elif x >= self._width:
return self._finish_time
else:
duration = self._finish_time - self._start_time
return ((x/self._width) * duration) + self._start_time
def _get_dt_for_dx(self, dx):
time_per_pixel = (self._finish_time - self._start_time) / self._width
dt = dx * time_per_pixel
return dt
def _validate_viewport(self):
# validate start / finish time
profile_start_time = self._profile_data.get_start_time()
profile_finish_time = self._profile_data.get_finish_time()
if self._start_time < profile_start_time:
self._start_time = profile_start_time
if self._finish_time > profile_finish_time:
self._finish_time = profile_finish_time
# validate offset_y
profile_render_height = self._profile_data_objects.get_render_height()
offset_y = self._offset_y
bottom = self._offset_y + profile_render_height
if bottom < self._height:
offset_bottom = self._height - bottom
offset_y += offset_bottom
offset_y = min(0, offset_y)
self._offset_y = offset_y
| 28.583691 | 128 | 0.743093 |
acf3ed5c7f8e553dac90ab1dd1bdef28ca689443 | 9,478 | py | Python | Competitors/Baseline_MultipleADWINs_withIterations.py | Lucciola111/stream_autoencoder_windowing | 5456b07bd20220c987598db2cdb832d8195e1575 | [
"MIT"
] | 4 | 2021-09-16T05:50:25.000Z | 2021-12-31T07:04:55.000Z | Competitors/Baseline_MultipleADWINs_withIterations.py | Lucciola111/stream_autoencoder_windowing | 5456b07bd20220c987598db2cdb832d8195e1575 | [
"MIT"
] | null | null | null | Competitors/Baseline_MultipleADWINs_withIterations.py | Lucciola111/stream_autoencoder_windowing | 5456b07bd20220c987598db2cdb832d8195e1575 | [
"MIT"
] | 1 | 2021-12-16T06:53:08.000Z | 2021-12-16T06:53:08.000Z | import sys
import numpy as np
from sklearn.ensemble import RandomForestClassifier
from time import process_time as timer
from skmultiflow.drift_detection.adwin import ADWIN
from TrainingFunctions.LoadDataStream import load_data_stream
from TrainingFunctions.PreprocessingWithLabels import preprocessing_with_labels
from TrainingFunctions.PreprocessingWithoutLabels import preprocessing_without_labels
from TrainingFunctions.DetectDriftMultipleADWINSOnline import detect_drift_multiple_adwins_online
from EvaluationFunctions.GeneratePerformanceMetrics import generate_performance_metrics
from EvaluationFunctions.GenerateResultsTableIterations import generate_results_table_iterations
from Utility_Functions.CreateResultsFileName import create_results_file_name
from Utility_Functions.GPU import setup_machine
# Use GPU
setup_machine(cuda_device=2)
# 0. Set all parameters
n_iterations = 10
# Set agreement rate for drift detection
agreement_rate = 0.1
# Set option for initializing ADWIN after a detected drift
reinitialize_adwin = False
# 1. Load Data
# Differentiate whether data is provided in separate train and test file
separate_train_test_file = False
image_data = False
drift_labels_known = True
proxy_evaluation = False
if not drift_labels_known and not proxy_evaluation:
print("Error: Change detection evaluation and/or proxy evaluation missing!")
sys.exit()
# Initialize parameters
if not proxy_evaluation:
acc_vector = False
if not drift_labels_known:
drift_labels = False
# Load path and name of dataset
if separate_train_test_file:
path = "IBDD_Datasets/benchmark_real/"
# dataset = "Yoga"
# dataset = "StarLightCurves"
# dataset = "Heartbeats"
elif image_data:
path = "Generated_Streams/Image_Data_Drift_And_Classifier_Labels/"
# dataset = "RandomMNIST_and_FashionMNIST_SortAllNumbers19DR_2021-08-06_11.07.pickle"
else:
if drift_labels_known:
if proxy_evaluation:
path = "Generated_Streams/Drift_And_Classifier_Labels/"
# dataset = "RandomRandomRBF_50DR_100Dims_50Centroids_1MinDriftCentroids_300MinL_2000MaxL_2021-08-06_10.57.pickle"
else:
path = "Generated_Streams/Drift_Labels/"
# Experiments Evaluation
# dataset = "RandomNumpyRandomNormalUniform_onlyMeanDrift_var0.01_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-10_10.32.pickle"
# dataset = "RandomNumpyRandomNormalUniform_onlyMeanDrift_var0.05_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_10.42.pickle"
# dataset = "RandomNumpyRandomNormalUniform_onlyMeanDrift_var0.25_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_10.45.pickle"
# dataset = "RandomNumpyRandomNormalUniform_onlyVarianceDrift_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_11.15.pickle"
# dataset = "RandomNumpyRandomNormalUniform_50DR_100Dims_100MinDimBroken_300MinL_2000MaxL_2021-08-06_10.54.pickle"
# dataset = "RandomNumpyRandomNormalUniform_50DR_100Dims_1MinDimBroken_300MinL_2000MaxL_2021-08-06_10.53.pickle"
# dataset = "Mixed_300MinDistance_DATASET_A_RandomNumpyRandomNormalUniform_50DR_100Dims_1MinDimBroken_DATASET_B_RandomRandomRBF_50DR_100Dims_50Centroids_1MinDriftCentroids.pickle"
# Experiments Time Complexity
# "Time_RandomNumpyRandomNormalUniform_10DR_10Dims_1MinDimBroken_300MinL_2000MaxL_2021-09-06_22.24.pickle",
# "Time_RandomNumpyRandomNormalUniform_10DR_50Dims_5MinDimBroken_300MinL_2000MaxL_2021-09-06_22.25.pickle",
# "Time_RandomNumpyRandomNormalUniform_10DR_100Dims_10MinDimBroken_300MinL_2000MaxL_2021-09-06_22.26.pickle",
# "Time_RandomNumpyRandomNormalUniform_10DR_500Dims_50MinDimBroken_300MinL_2000MaxL_2021-09-06_22.26.pickle",
# "Time_RandomNumpyRandomNormalUniform_10DR_1000Dims_100MinDimBroken_300MinL_2000MaxL_2021-09-06_22.27.pickle"
else:
path = "Generated_Streams/Classifier_Labels/"
dataset = ""
# Load data stream
data_stream = load_data_stream(dataset=dataset, path=path, separate_train_test_file=separate_train_test_file,
image_data=image_data, drift_labels_known=drift_labels_known,
proxy_evaluation=proxy_evaluation)
# Set number of instances
n_instances = data_stream.shape[0]
# Set number of train data, validation data, and test data
if dataset == "Yoga":
n_train_data = 300
elif dataset == "Heartbeats":
n_train_data = 500
else:
n_train_data = 1000
n_val_data = 0
n_test_data = int(n_instances - n_val_data - n_train_data)
# 2. Pre-processing
# Separate data stream and drift labels
if drift_labels_known:
data_stream, drift_labels = data_stream[:, :-1], data_stream[:, -1]
drift_labels = drift_labels[(len(drift_labels) - n_test_data):]
# Preprocess data stream
if proxy_evaluation:
train_X, train_y, val_X, val_y, test_X, test_y = preprocessing_with_labels(
data_stream, n_instances, n_train_data, n_val_data, n_test_data, image_data)
else:
train_X, val_X, test_X = preprocessing_without_labels(
data_stream, n_instances, n_train_data, n_val_data, n_test_data)
# Set number of dimensions
n_dimensions = train_X.shape[1]
# Start global iterations
all_performance_metrics = []
all_accuracies = []
all_times_per_example = []
for iteration in range(n_iterations):
print("Global Iteration:")
print(iteration)
# 3. Train classifier for Evaluation
if proxy_evaluation:
model_classifier = RandomForestClassifier(n_estimators=100, max_depth=5, random_state=0)
model_classifier.fit(np.concatenate((train_X, val_X), axis=0), np.concatenate((train_y, val_y), axis=0))
acc_vector = np.zeros(len(test_y), dtype=int)
start = timer()
# 4. Drift Detection:
# Test: Apply n ADWINs on n dimensions
# Train: Retrain classifier if >=agreement_threshold dimensions detect a drift
agreement_threshold = int(agreement_rate * n_dimensions)
adwins = {}
widths_multiple_adwins = []
drift_decisions_fusion = []
drift_points_multiple_adwins = []
for dim in range(n_dimensions):
# Initialize ADWINs
adwins[dim] = ADWIN(delta=0.002)
for idx in range(n_test_data):
# If we have validation classifier: Set parameters for retraining classifier after drift
model_classifier_parameter = model_classifier if proxy_evaluation else False
test_y_parameter = test_y if proxy_evaluation else False
# Test: Make prediction for element with classifier
if proxy_evaluation:
y_pred = model_classifier.predict(test_X[idx].reshape(1, -1))
if y_pred == test_y[idx]:
acc_vector[idx] = 1
# Detect Drift with n ADWINs for n dimensions
adwins, adwins_width, drift_decision_fusion, drift_point_dims = detect_drift_multiple_adwins_online(
test_X=test_X, idx=idx, adwins=adwins, agreement_threshold=agreement_threshold,
reinitialize_adwin=reinitialize_adwin, model=model_classifier_parameter, test_y=test_y_parameter)
widths_multiple_adwins = adwins_width if len(widths_multiple_adwins) == 0 else np.column_stack(
(widths_multiple_adwins, adwins_width))
drift_decisions_fusion.append(drift_decision_fusion)
drift_points_multiple_adwins.append(drift_point_dims)
if idx % 8000 == 0:
print(f"Iteration of Loop: {idx}/{len(test_X)}")
# 5. Evaluation
# 5.1 Drift Detection Evaluation
if drift_labels_known:
# Define acceptance levels
acceptance_levels = [60, 120, 180, 300]
# Generate performance metrics
simple_metrics = False
complex_metrics = True
performance_metrics = generate_performance_metrics(
drift_decisions=drift_decisions_fusion, drift_labels=drift_labels, acceptance_levels=acceptance_levels,
simple_metrics=simple_metrics, complex_metrics=complex_metrics, index_name='ADWIN-' + str(int(agreement_rate*100)))
all_performance_metrics.append(performance_metrics)
# 5.2 Proxy Evaluation
if proxy_evaluation:
# Calculate mean accuracy of classifier
mean_acc = np.mean(acc_vector) * 100
print('Average classification accuracy: {}%'.format(np.round(mean_acc, 2)))
all_accuracies.append(mean_acc)
# Measure the elapsed time
end = timer()
execution_time = end - start
print('Time per example: {} sec'.format(np.round(execution_time / len(test_X), 4)))
print('Total time: {} sec'.format(np.round(execution_time, 2)))
all_times_per_example.append(execution_time / len(test_X))
# Generate evaluation results table
evaluation_results = generate_results_table_iterations(
drift_labels_known=drift_labels_known, proxy_evaluation=proxy_evaluation,
all_performance_metrics=all_performance_metrics, all_accuracies=all_accuracies,
all_times_per_example=all_times_per_example)
# 7. Save results in file from iterations
# Create file name
file_name = create_results_file_name(
dataset=dataset, algorithm_name='Baseline_MultipleADWINs', drift_labels_known=drift_labels_known,
proxy_evaluation=proxy_evaluation, image_data=image_data)
# Save file
evaluation_results.to_csv(
str(file_name)
+ '_' + str(n_iterations) + 'ITERATIONS_'
+ '_agreementRate' + str(agreement_rate)
+ '_reinitializeADWIN' + str(reinitialize_adwin)
+ '.csv')
| 44.497653 | 191 | 0.761448 |
acf3edf2940f890b1d2d682a3e806bbd40be9959 | 39,836 | py | Python | tests/unit/core/test_fileoperations.py | myungseokang/aws-elastic-beanstalk-cli | 339ff2660058ad76d0aef2e86ebe97d68f5f2789 | [
"Apache-2.0"
] | 110 | 2020-01-15T22:58:46.000Z | 2022-03-27T20:47:33.000Z | tests/unit/core/test_fileoperations.py | QPC-database/aws-elastic-beanstalk-cli | 87ad9d8bbe5e4e7cb01b1bd4392eda33cb1943f7 | [
"Apache-2.0"
] | 89 | 2020-01-15T23:18:34.000Z | 2022-03-31T21:56:05.000Z | tests/unit/core/test_fileoperations.py | QPC-database/aws-elastic-beanstalk-cli | 87ad9d8bbe5e4e7cb01b1bd4392eda33cb1943f7 | [
"Apache-2.0"
] | 50 | 2020-01-15T22:58:53.000Z | 2022-02-11T17:39:28.000Z | # -*- coding: UTF-8 -*-
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import shutil
import stat
import sys
import yaml
import zipfile
import pytest
import unittest
from mock import call, patch, Mock
from ebcli.core import fileoperations
from ebcli.objects.buildconfiguration import BuildConfiguration
from ebcli.objects.exceptions import NotInitializedError, NotFoundError
class TestFileOperations(unittest.TestCase):
def create_config_file(self):
app_name = 'ebcli-test'
region = 'us-east-1'
solution = 'my-solution-stack'
fileoperations.create_config_file(app_name, region, solution)
def setUp(self):
self.test_root = os.getcwd()
if not os.path.exists('testDir'):
os.makedirs('testDir')
os.chdir('testDir')
if not os.path.exists(fileoperations.beanstalk_directory):
os.makedirs(fileoperations.beanstalk_directory)
if not os.path.exists('home'):
os.makedirs('home')
fileoperations.aws_config_folder = os.path.join('home', '.aws')
fileoperations.aws_config_location = os.path.join('home', '.aws', 'config')
def tearDown(self):
os.chdir(self.test_root)
if os.path.exists('testDir'):
shutil.rmtree('testDir', ignore_errors=True)
def test_get_aws_home(self):
fileoperations.get_aws_home()
def test_get_ssh_folder(self):
try:
fileoperations.get_ssh_folder()
except OSError as ex:
# If access is denied assume we are running on a limited environment
if ex.errno == 13:
pass
def test_create_config_file_no_file(self):
if os.path.exists(fileoperations.local_config_file):
os.remove(fileoperations.local_config_file)
self.assertFalse(os.path.exists(fileoperations.local_config_file))
app_name = 'ebcli-test'
region = 'us-east-1'
solution = 'my-solution-stack'
fileoperations.create_config_file(app_name, region, solution)
self.assertTrue(os.path.exists(fileoperations.local_config_file))
rslt = fileoperations.get_config_setting('global', 'application_name')
self.assertEqual(app_name, rslt)
def test_create_config_file_no_dir(self):
if os.path.exists(fileoperations.beanstalk_directory):
shutil.rmtree(fileoperations.beanstalk_directory, ignore_errors=True)
self.assertFalse(os.path.exists(fileoperations.beanstalk_directory))
app_name = 'ebcli-test'
region = 'us-east-1'
solution = 'my-solution-stack'
fileoperations.create_config_file(app_name, region, solution)
self.assertTrue(os.path.exists(fileoperations.beanstalk_directory))
self.assertTrue(os.path.exists(fileoperations.local_config_file))
rslt = fileoperations.get_config_setting('global', 'application_name')
self.assertEqual(app_name, rslt)
def test_create_config_file_file_exists(self):
fileoperations.write_config_setting('global', 'randomKey', 'val')
fileoperations.write_config_setting('test', 'application_name', 'app1')
app_name = 'ebcli-test'
region = 'us-east-1'
solution = 'my-solution-stack'
fileoperations.create_config_file(app_name, region, solution)
key = fileoperations.get_config_setting('global', 'randomKey')
app = fileoperations.get_config_setting('global', 'application_name')
test = fileoperations.get_config_setting('test', 'application_name')
self.assertEqual(key, 'val')
self.assertEqual(app, app_name)
self.assertEqual(test, 'app1')
def test_project_root__traverse_at_root(self):
if not os.path.exists(fileoperations.beanstalk_directory):
os.makedirs(fileoperations.beanstalk_directory)
cwd = os.getcwd()
fileoperations.ProjectRoot.traverse()
nwd = os.getcwd()
self.assertEqual(cwd, nwd)
def test_project_root__traverse_deep(self):
cwd = os.getcwd()
dir = 'fol1' + os.path.sep + 'fol2' + os.path.sep + 'fol3'
os.makedirs(dir)
os.chdir(dir)
fileoperations.ProjectRoot.traverse()
nwd = os.getcwd()
self.assertEqual(cwd, nwd)
def test_project_root__traverse_no_root(self):
cwd = os.getcwd()
try:
os.chdir(os.path.pardir)
os.chdir(os.path.pardir)
try:
fileoperations.ProjectRoot.traverse()
raise Exception('Should have thrown NotInitializedException')
except fileoperations.NotInitializedError:
pass
finally:
os.chdir(cwd)
@patch('ebcli.core.fileoperations.LOG.debug')
def test_project_root__root_is_cached_once_found(
self,
debug_mock
):
cwd = os.getcwd()
fol1 = os.path.abspath('fol1')
fol2 = os.path.abspath(os.path.join(fol1, 'fol2'))
fol3 = os.path.abspath(os.path.join(fol2, 'fol3'))
os.makedirs(fol3)
os.chdir(fol3)
def traverse_to_root_and_assert():
fileoperations.ProjectRoot.traverse()
nwd = os.getcwd()
self.assertEqual(cwd, nwd)
traverse_to_root_and_assert()
traverse_to_root_and_assert()
traverse_to_root_and_assert()
debug_mock.assert_has_calls(
[
call('beanstalk directory not found in {} -Going up a level'.format(fol3)),
call('beanstalk directory not found in {} -Going up a level'.format(fol2)),
call('beanstalk directory not found in {} -Going up a level'.format(fol1)),
call('Project root found at: {}'.format(os.path.abspath(cwd)))
]
)
def test_project_root__traverse__file_system_root_reached(self):
if os.path.isdir('.elasticbeanstalk'):
shutil.rmtree('.elasticbeanstalk', ignore_errors=True)
cwd = os.getcwd()
with patch('os.getcwd') as getcwd_mock:
getcwd_mock.return_value = cwd
with self.assertRaises(fileoperations.NotInitializedError) as context_manager:
fileoperations.ProjectRoot.traverse()
self.assertEqual(
'EB is not yet initialized',
str(context_manager.exception)
)
def test_write_config_setting_no_section(self):
fileoperations.create_config_file('ebcli-test', 'us-east-1',
'my-solution-stack')
dict = fileoperations._get_yaml_dict(fileoperations.local_config_file)
self.assertFalse('mytestsection' in dict)
fileoperations.write_config_setting('mytestsection',
'testkey', 'value')
dict = fileoperations._get_yaml_dict(fileoperations.local_config_file)
self.assertTrue('mytestsection' in dict)
def test_write_config_setting_no_option(self):
fileoperations.create_config_file('ebcli-test', 'us-east-1',
'my-solution-stack')
fileoperations.write_config_setting('mytestsection', 'notmykey', 'val')
dict = fileoperations._get_yaml_dict(fileoperations.local_config_file)
self.assertTrue('mytestsection' in dict)
self.assertFalse('testkey' in dict['mytestsection'])
fileoperations.write_config_setting('mytestsection',
'testkey', 'value')
dict = fileoperations._get_yaml_dict(fileoperations.local_config_file)
self.assertTrue('mytestsection' in dict)
self.assertTrue('testkey' in dict['mytestsection'])
self.assertEqual(dict['mytestsection']['testkey'], 'value')
def test_write_config_setting_override(self):
fileoperations.create_config_file('ebcli-test', 'us-east-1',
'my-solution-stack')
dict = fileoperations._get_yaml_dict(fileoperations.local_config_file)
self.assertTrue('global' in dict)
self.assertTrue('application_name' in dict['global'])
self.assertTrue('application_name' in dict['global'])
self.assertEqual(dict['global']['application_name'], 'ebcli-test')
fileoperations.write_config_setting('global',
'application_name', 'new_name')
dict = fileoperations._get_yaml_dict(fileoperations.local_config_file)
self.assertEqual(dict['global']['application_name'], 'new_name')
def test_write_config_setting_no_file(self):
if os.path.exists(fileoperations.local_config_file):
os.remove(fileoperations.local_config_file)
self.assertFalse(os.path.exists(fileoperations.local_config_file))
fileoperations.write_config_setting('mytestsection',
'testkey', 'value')
self.assertTrue(os.path.exists(fileoperations.local_config_file))
dict = fileoperations._get_yaml_dict(fileoperations.local_config_file)
self.assertTrue('mytestsection' in dict)
def test_write_config_setting_standard(self):
fileoperations.write_config_setting('global', 'mysetting', 'value')
result = fileoperations.get_config_setting('global', 'mysetting')
self.assertEqual(result, 'value')
def test_get_config_setting_no_global(self):
if os.path.exists(fileoperations.global_config_file):
os.remove(fileoperations.global_config_file)
self.assertFalse(os.path.exists(fileoperations.global_config_file))
fileoperations.create_config_file('ebcli-test', 'us-east-1',
'my-solution-stack')
result = fileoperations.get_config_setting('global',
'application_name')
self.assertEqual(result, 'ebcli-test')
def test_get_config_setting_no_local(self):
config = {'global': {'application_name': 'myApp'}}
with open(fileoperations.global_config_file, 'w') as f:
f.write(yaml.dump(config, default_flow_style=False))
self.assertTrue(os.path.exists(fileoperations.global_config_file))
if os.path.exists(fileoperations.local_config_file):
os.remove(fileoperations.local_config_file)
self.assertFalse(os.path.exists(fileoperations.local_config_file))
result = fileoperations.get_config_setting('global',
'application_name')
self.assertEqual(result, 'myApp')
def test_get_config_setting_no_files(self):
if os.path.exists(fileoperations.local_config_file):
os.remove(fileoperations.local_config_file)
self.assertFalse(os.path.exists(fileoperations.local_config_file))
if os.path.exists(fileoperations.global_config_file):
os.remove(fileoperations.global_config_file)
self.assertFalse(os.path.exists(fileoperations.global_config_file))
result = fileoperations.get_config_setting('global',
'application_name')
self.assertEqual(result, None)
def test_get_config_setting_merge(self):
config = {'global': {'application_name': 'myApp'}}
with open(fileoperations.global_config_file, 'w') as f:
f.write(yaml.dump(config, default_flow_style=False))
self.assertTrue(os.path.exists(fileoperations.global_config_file))
fileoperations.create_config_file('ebcli-test', 'us-east-1',
'my-solution-stack')
result = fileoperations.get_config_setting('global',
'application_name')
self.assertEqual(result, 'ebcli-test')
def test_get_project_root_at_root(self):
cwd = os.getcwd()
self.assertEqual(cwd, fileoperations.get_project_root())
self.assertEqual(cwd, os.getcwd())
def test_project_root__traverse_deep2(self):
cwd = os.getcwd()
dir = 'fol1' + os.path.sep + 'fol2' + os.path.sep + 'fol3'
os.makedirs(dir)
os.chdir(dir)
self.assertEqual(cwd, fileoperations.get_project_root())
def test_traverse_to_project_no_root(self):
os.chdir(os.path.pardir)
os.chdir(os.path.pardir)
self.assertRaises(NotInitializedError, fileoperations.get_project_root)
def test_inside_ebcli_project(self):
self.assertTrue(fileoperations.inside_ebcli_project())
def test_inside_ebcli_project__false(self):
shutil.rmtree(fileoperations.beanstalk_directory, ignore_errors=True)
self.assertFalse(fileoperations.inside_ebcli_project())
@patch('ebcli.core.fileoperations.json.loads')
@patch('ebcli.core.fileoperations.read_from_text_file')
def test_get_json_dict(self, read_from_data_file, loads):
read_from_data_file.return_value = '{}'
loads.return_value = {}
mock_path = 'a{0}b{0}c{0}file.json'.format(os.path.sep)
self.assertEqual(fileoperations.get_json_dict(mock_path), {})
read_from_data_file.assert_called_once_with(mock_path)
loads.assert_called_once_with('{}')
@patch('ebcli.core.fileoperations.get_project_root')
def test_project_file_path(self, get_project_root):
get_project_root.side_effect = [os.path.sep]
expected_file_path = '{}foo'.format(os.path.sep)
self.assertEqual(fileoperations.project_file_path('foo'),
expected_file_path)
@patch('ebcli.core.fileoperations.file_exists')
@patch('ebcli.core.fileoperations.project_file_path')
def test_project_file_exists(self, project_file_path,
file_exists):
project_file_path.side_effect = ['{}foo'.format(os.path.sep)]
file_exists.return_value = True
self.assertTrue(fileoperations.project_file_exists('foo'))
project_file_path.assert_called_once_with('foo')
file_exists.assert_called_once_with('{}foo'.format(os.path.sep))
@patch('ebcli.core.fileoperations.codecs')
@patch('ebcli.core.fileoperations.safe_load')
def test_get_build_spec_info(self, mock_yaml_load, mock_codecs):
image = 'aws/codebuild/eb-java-8-amazonlinux-64:2.1.3'
compute_type = 'BUILD_GENERAL1_SMALL'
service_role = 'eb-test'
timeout = 60
mock_yaml_load.return_value = {fileoperations.buildspec_config_header:
{'ComputeType': compute_type,
'CodeBuildServiceRole': service_role,
'Image': image,
'Timeout': timeout}}
expected_build_config = BuildConfiguration(image=image, compute_type=compute_type,
service_role=service_role, timeout=timeout)
actual_build_config = fileoperations.get_build_configuration()
self.assertEqual(expected_build_config.__str__(), actual_build_config.__str__(),
"Expected '{0}' but got: {1}".format(expected_build_config.__str__(),
actual_build_config.__str__()))
@patch('ebcli.core.fileoperations.codecs')
@patch('ebcli.core.fileoperations.safe_load')
def test_get_build_spec_info_with_bad_header(self, mock_yaml_load, mock_codecs):
image = 'aws/codebuild/eb-java-8-amazonlinux-64:2.1.3'
compute_type = 'BUILD_GENERAL1_SMALL'
service_role = 'eb-test'
timeout = 60
mock_yaml_load.return_value = {'BadHeader':
{'ComputeType': compute_type,
'CodeBuildServiceRole': service_role,
'Image': image,
'Timeout': timeout}}
actual_build_config = fileoperations.get_build_configuration()
self.assertIsNone(actual_build_config)
@patch('ebcli.core.fileoperations.codecs')
@patch('ebcli.core.fileoperations.safe_load')
def test_get_build_spec_info_with_no_values(self, mock_yaml_load, mock_codecs):
mock_yaml_load.return_value = {fileoperations.buildspec_config_header: None}
actual_build_config = fileoperations.get_build_configuration()
self.assertIsNone(actual_build_config.compute_type)
self.assertIsNone(actual_build_config.image)
self.assertIsNone(actual_build_config.service_role)
self.assertIsNone(actual_build_config.timeout)
def test_build_spec_file_exists_yaml(self):
file = 'buildspec.yaml'
open(file, 'a').close()
self.assertFalse(fileoperations.build_spec_exists(),
"Expected to find build spec file with filename: {0}".format(file))
os.remove(file)
def test_build_spec_file_exists_yml(self):
file = 'buildspec.yml'
open(file, 'a').close()
self.assertTrue(fileoperations.build_spec_exists(),
"Expected to find build spec file with filename: {0}".format(file))
os.remove(file)
def test_get_filename_without_extension_with_path(self):
filepath = '{1}tmp{1}dir{1}test{1}{0}'.format('foo.txt', os.path.sep)
actual_file = fileoperations.get_filename_without_extension(filepath)
self.assertEqual('foo', actual_file, "Expected {0} but got: {1}"
.format('foo', actual_file))
def test_get_filename_without_extension_with_file(self):
actual_file = fileoperations.get_filename_without_extension('foo.txt')
self.assertEqual('foo', actual_file, "Expected {0} but got: {1}"
.format('foo', actual_file))
def test_zip_append_archive(self):
os.chdir(self.test_root)
os.chdir('testDir')
open('source_file.txt', 'w+').close()
open('target_file.txt', 'w+').close()
os.system('python -m zipfile -c source_file.zip source_file.txt')
os.system('python -m zipfile -c target_file.zip target_file.txt')
fileoperations.zip_append_archive('target_file.zip', 'source_file.zip')
target_file_zip = zipfile.ZipFile('target_file.zip', 'r', allowZip64=True)
self.assertEqual(['source_file.txt', 'target_file.txt'], sorted(target_file_zip.namelist()))
@patch('ebcli.core.fileoperations.get_editor')
@patch('ebcli.core.fileoperations.os.system')
def test_open_file_for_editing(self, system_mock, get_editor_mock):
file_to_open = 'config.yml'
get_editor_mock.return_value = 'vim'
system_mock.side_effect = None
fileoperations.open_file_for_editing(file_to_open)
@patch('ebcli.core.fileoperations.get_editor')
@patch('os.system')
@patch('ebcli.core.fileoperations.io.log_error')
def test_open_file_for_editing__editor_could_not_open_file(
self,
log_error_mock,
system_mock,
get_editor_mock
):
file_to_open = 'config.yml'
get_editor_mock.return_value = 'vim'
system_mock.side_effect = OSError
fileoperations.open_file_for_editing(file_to_open)
log_error_mock.assert_called_with(
'EB CLI cannot open the file using the editor vim.'
)
def test_get_platform_from_env_yaml(self):
with open('env.yaml', 'w') as yaml_file:
yaml_file.write(
'SolutionStack: 64bit Amazon Linux 2015.09 v2.0.6 running Multi-container Docker 1.7.1 (Generic)'
)
self.assertEqual(
'64bit Amazon Linux 2015.09 v2.0.6 running Multi-container Docker 1.7.1 (Generic)',
fileoperations.get_platform_from_env_yaml()
)
def test_env_yaml_exists(self):
self.assertFalse(fileoperations.env_yaml_exists())
os.mkdir('src')
open(os.path.join('src', 'env.yaml'), 'w').close()
self.assertFalse(fileoperations.env_yaml_exists())
open(os.path.join('env.yaml'), 'w').close()
self.assertTrue(fileoperations.env_yaml_exists())
def test_get_filename_without_extension(self):
self.assertEqual('', fileoperations.get_filename_without_extension(''))
self.assertEqual('file', fileoperations.get_filename_without_extension('file.zip'))
self.assertEqual('file', fileoperations.get_filename_without_extension('src/file.zip'))
self.assertEqual('file', fileoperations.get_filename_without_extension('src/file.zip.app'))
def test_get_eb_file_full_location(self):
self.assertEqual(
os.path.join(os.path.abspath('.'), '.elasticbeanstalk'),
fileoperations.get_eb_file_full_location('')
)
self.assertEqual(
os.path.join(os.path.abspath('.'), '.elasticbeanstalk', 'app'),
fileoperations.get_eb_file_full_location('app')
)
def test_get_ebignore_location(self):
self.assertEqual(
os.path.join(os.path.abspath('.'), '.ebignore'),
fileoperations.get_ebignore_location()
)
def test_readlines_from_text_file(self):
fileoperations.write_to_text_file(
"""aaa
bbb
ccc""",
location='file'
)
self.assertEqual(
['aaa\n', 'bbb\n', 'ccc'],
fileoperations.readlines_from_text_file('file')
)
fileoperations.append_to_text_file(
location='file',
data="""dddd"""
)
self.assertEqual(
'aaa{linesep}bbb{linesep}cccdddd'.format(linesep=os.linesep).encode('utf-8'),
fileoperations.read_from_data_file('file')
)
def test_write_json_dict(self):
fileoperations.write_json_dict('{"EnvironmentName": "my-environment"}', 'file')
self.assertEqual(
'{"EnvironmentName": "my-environment"}',
fileoperations.get_json_dict('file')
)
def test_get_application_from_file__filename_provided(self):
with self.assertRaises(NotFoundError) as context_manager:
fileoperations.get_environment_from_file('my-environment', path='foo path')
self.assertEqual(
'Can not find configuration file in following path: foo path',
str(context_manager.exception)
)
def test_get_application_from_file__app_yml_file_does_not_exist(self):
self.assertIsNone(fileoperations.get_application_from_file('my-application'))
@patch('ebcli.core.fileoperations.codecs.open')
def test_get_application_from_file__yaml_parse_errors(self, codecs_mock):
open('.elasticbeanstalk/my-application.app.yml', 'w').close()
codecs_mock.side_effect = fileoperations.ScannerError
with self.assertRaises(fileoperations.InvalidSyntaxError):
fileoperations.get_application_from_file('my-application')
codecs_mock.side_effect = fileoperations.ParserError
with self.assertRaises(fileoperations.InvalidSyntaxError):
fileoperations.get_application_from_file('my-application')
def test_get_application_from_file__gets_app_name(self):
with open('.elasticbeanstalk/my-application.app.yml', 'w') as file:
file.write('{"EnvironmentName": "my-environment"}')
self.assertEqual(
{
'EnvironmentName': 'my-environment'
},
fileoperations.get_application_from_file('my-application')
)
def test_get_environment_from_file__filename_provided(self):
with open('.elasticbeanstalk/user-modification.json', 'w') as file:
file.write('{"OptionSettings": {"namespace":{"option":"value"}}}')
self.assertEqual(
{
'OptionSettings': {
'namespace': {'option': 'value'}
}
},
fileoperations.get_environment_from_file('my-environment', path='.elasticbeanstalk/user-modification.json')
)
def test_get_environment_from_file__file_does_not_exist(self):
with self.assertRaises(NotFoundError) as context_manager:
fileoperations.get_environment_from_file('my-environment', path='foo path')
self.assertEqual(
'Can not find configuration file in following path: foo path',
str(context_manager.exception)
)
@patch('ebcli.core.fileoperations.safe_load')
@patch('ebcli.core.fileoperations.load')
def test_get_environment_from_file__yaml_parse_errors(self, load_mock, safe_load_mock):
open('.elasticbeanstalk/my-environment.env.yml', 'w').close()
safe_load_mock.side_effect = fileoperations.ScannerError
load_mock.side_effect = fileoperations.JSONDecodeError("foo", "", 0)
with self.assertRaises(fileoperations.InvalidSyntaxError):
fileoperations.get_environment_from_file('my-environment')
def test_get_environment_from_file__gets_environment(self):
with open('.elasticbeanstalk/my-environment.env.yml', 'w') as file:
file.write('{"EnvironmentName": "my-environment"}')
self.assertEqual(
{
'EnvironmentName': 'my-environment'
},
fileoperations.get_environment_from_file('my-environment')
)
def test_save_env_file(self):
env = {
'EnvironmentName': 'my-environment∂'
}
fileoperations.save_env_file(env)
self.assertEqual(
'EnvironmentName: "my-environment\\u2202"',
open(os.path.join('.elasticbeanstalk', 'my-environment∂.env.yml')).read().strip()
)
def test_save_app_file(self):
env = {
'ApplicationName': 'my-application∂'
}
fileoperations.save_app_file(env)
self.assertEqual(
'ApplicationName: "my-application\\u2202"',
open(os.path.join('.elasticbeanstalk', 'my-application∂.app.yml')).read().strip()
)
def test_get_editor__set_as_global(self):
os.environ['EDITOR'] = ''
fileoperations.write_config_setting('global', 'editor', 'vim')
self.assertEqual(
'vim',
fileoperations.get_editor()
)
def test_get_editor__set_as_environment_variable(self):
os.environ['EDITOR'] = 'vim'
fileoperations.write_config_setting('global', 'editor', None)
self.assertEqual(
'vim',
fileoperations.get_editor()
)
os.environ['EDITOR'] = ''
@pytest.mark.skipif(not sys.platform.startswith('win'), reason='Behaviour is exclusive to Windows')
def test_get_editor__cant_determine_editor__picks_default__windows(self):
os.environ['EDITOR'] = ''
fileoperations.write_config_setting('global', 'editor', None)
self.assertEqual('notepad.exe', fileoperations.get_editor())
@pytest.mark.skipif(sys.platform.startswith('win'), reason='Behaviour is exclusive to Linux')
def test_get_editor__cant_determine_editor__picks_default__non_windows(self):
os.environ['EDITOR'] = ''
fileoperations.write_config_setting('global', 'editor', None)
self.assertEqual('nano', fileoperations.get_editor())
def test_delete_env_file(self):
open(os.path.join('.elasticbeanstalk', 'my-environment.env.yml'), 'w').close()
open(os.path.join('.elasticbeanstalk', 'my-environment.ebe.yml'), 'w').close()
fileoperations.delete_env_file('my-environment')
def test_delete_app_file(self):
open(os.path.join('.elasticbeanstalk', 'my-application.env.yml'), 'w').close()
fileoperations.delete_app_file('my-application')
@unittest.skipIf(not hasattr(os, 'symlink'), reason='"symlink" appears to not have been defined on "os"')
@patch('ebcli.core.fileoperations._validate_file_for_archive')
def test_zip_up_project(self, _validate_file_for_archive_mock):
_validate_file_for_archive_mock.side_effect = lambda f: not f.endswith('.sock')
shutil.rmtree('home', ignore_errors=True)
os.mkdir('src')
os.mkdir(os.path.join('src', 'lib'))
open(os.path.join('src', 'lib', 'app.py'), 'w').write('import os')
open(os.path.join('src', 'lib', 'app.py~'), 'w').write('import os')
open(os.path.join('src', 'lib', 'ignore-this-file.py'), 'w').write('import os')
open(os.path.join('src', 'lib', 'test.sock'), 'w').write('mock socket file')
os.symlink(
os.path.join('src', 'lib', 'app.py'),
os.path.join('src', 'lib', 'app.py-copy')
)
os.mkdir(os.path.join('src', 'lib', 'api'))
if sys.version_info > (3, 0):
os.symlink(
os.path.join('src', 'lib', 'api'),
os.path.join('src', 'lib', 'api-copy'),
target_is_directory=True
)
else:
os.symlink(
os.path.join('src', 'lib', 'api'),
os.path.join('src', 'lib', 'api-copy')
)
open(os.path.join('src', 'lib', 'api', 'api.py'), 'w').write('import unittest')
fileoperations.zip_up_project(
'app.zip',
ignore_list=[os.path.join('src', 'lib', 'ignore-this-file.py')]
)
os.mkdir('tmp')
fileoperations.unzip_folder('app.zip', 'tmp')
self.assertTrue(os.path.exists(os.path.join('tmp', 'src', 'lib', 'app.py')))
self.assertTrue(os.path.exists(os.path.join('tmp', 'src', 'lib', 'api')))
self.assertTrue(os.path.exists(os.path.join('tmp', 'src', 'lib', 'app.py-copy')))
self.assertTrue(os.path.exists(os.path.join('tmp', 'src', 'lib', 'api-copy')))
self.assertFalse(os.path.exists(os.path.join('tmp', 'src', 'lib', 'app.py~')))
self.assertFalse(os.path.exists(os.path.join('tmp', 'src', 'lib', 'ignore-this-file.py')))
self.assertFalse(os.path.exists(os.path.join('tmp', 'src', 'lib', 'test.sock')))
def test_delete_app_versions(self):
os.mkdir(os.path.join('.elasticbeanstalk', 'app_versions'))
fileoperations.delete_app_versions()
self.assertFalse(
os.path.exists(os.path.join('.elasticbeanstalk', 'app_versions'))
)
@patch('ebcli.core.fileoperations.os_which')
def test_program_is_installed(self, os_which_mock):
os_which_mock.return_value = '/Users/name/ebcli-virtaulenv/bin/eb'
self.assertTrue(fileoperations.program_is_installed('eb'))
def test_get_logs_location(self):
self.assertEqual(
os.path.join(os.path.abspath('.'), '.elasticbeanstalk', 'logs', 'some-folder'),
fileoperations.get_logs_location('some-folder')
)
def test_get_zip_location(self):
self.assertEqual(
os.path.join(os.path.abspath('.'), '.elasticbeanstalk', 'app_versions', 'some-file'),
fileoperations.get_zip_location('some-file')
)
def test_get_project_root(self):
root = os.getcwd()
os.mkdir('src')
os.mkdir(os.path.join('src', 'app'))
os.mkdir(os.path.join('src', 'app', 'dir'))
os.chdir(os.path.join('src', 'app', 'dir'))
self.assertEqual(
root,
fileoperations.get_project_root()
)
def test_save_to_aws_config__config_file_abosent(self):
fileoperations.save_to_aws_config('my-access-key', 'my-secret-key')
self.assertEqual(
"""[profile eb-cli]
aws_access_key_id = my-access-key
aws_secret_access_key = my-secret-key""",
open(os.path.join('home', '.aws', 'config')).read().strip()
)
def test_save_to_aws_config__config_file_present__eb_cli_profile_absent(self):
os.mkdir(os.path.join('home', '.aws'))
open(os.path.join('home', '.aws', 'config'), 'w').write(
"""[profile aws-cli]
aws_access_key_id = my-access-key
aws_secret_access_key = my-secret-key"""
)
fileoperations.save_to_aws_config('my-access-key', 'my-secret-key')
self.assertEqual(
"""[profile aws-cli]
aws_access_key_id = my-access-key
aws_secret_access_key = my-secret-key
[profile eb-cli]
aws_access_key_id = my-access-key
aws_secret_access_key = my-secret-key""",
open(os.path.join('home', '.aws', 'config')).read().strip()
)
def test_get_war_file_location(self):
os.mkdir('build')
os.mkdir(os.path.join('build', 'libs'))
open(os.path.join('build', 'libs', 'war1.war'), 'w').write('')
open(os.path.join('build', 'libs', 'war2.war'), 'w').write('')
self.assertTrue(
fileoperations.get_war_file_location() in [
os.path.abspath(os.path.join('build', 'libs', 'war1.war')),
os.path.abspath(os.path.join('build', 'libs', 'war2.war'))
]
)
def test_get_war_file_location__war_file_absent(self):
with self.assertRaises(fileoperations.NotFoundError) as context_manager:
fileoperations.get_war_file_location()
self.assertEqual(
'Can not find .war artifact in {}'.format(os.path.join('build', 'libs') + os.path.sep),
str(context_manager.exception)
)
def test_make_eb_dir__directory_already_exists(self):
self.create_config_file()
os.mkdir(os.path.join('.elasticbeanstalk', 'saved_configs'))
with patch('os.makedirs') as makedirs_mock:
fileoperations.make_eb_dir('saved_configs')
makedirs_mock.assert_not_called()
def test_make_eb_dir(self):
self.create_config_file()
with patch('os.makedirs') as makedirs_mock:
fileoperations.make_eb_dir('saved_configs')
makedirs_mock.assert_called_once_with(
os.path.join('.elasticbeanstalk', 'saved_configs')
)
def test_clean_up(self):
self.create_config_file()
fileoperations.clean_up()
self.assertFalse(os.path.exists('elasticbeanstalk'))
@unittest.skipIf(
condition=sys.platform.startswith('win'),
reason='file permissions work differently on Windows'
)
def test_set_user_only_permissions(self):
dir_1 = 'dir_1'
dir_2 = os.path.join('dir_1', 'dir_2')
dir_3 = os.path.join('dir_1', 'dir_2', 'dir_3')
file_1 = os.path.join('dir_1', 'dir_2', 'dir_3', 'file_1')
os.mkdir(dir_1)
os.mkdir(dir_2)
os.mkdir(dir_3)
open(file_1, 'w').close()
fileoperations.set_user_only_permissions('dir_1')
if sys.version_info < (3, 0):
self.assertEqual('040755', oct(os.stat(dir_1)[stat.ST_MODE]))
self.assertEqual('040700', oct(os.stat(dir_2)[stat.ST_MODE]))
self.assertEqual('040700', oct(os.stat(dir_3)[stat.ST_MODE]))
self.assertEqual('0100600', oct(os.stat(file_1)[stat.ST_MODE]))
else:
self.assertEqual('0o40755', oct(os.stat(dir_1)[stat.ST_MODE]))
self.assertEqual('0o40700', oct(os.stat(dir_2)[stat.ST_MODE]))
self.assertEqual('0o40700', oct(os.stat(dir_3)[stat.ST_MODE]))
self.assertEqual('0o100600', oct(os.stat(file_1)[stat.ST_MODE]))
def test_get_platform_version(self):
self.create_config_file()
fileoperations.write_config_setting('global', 'platform_version', 'my-platform-version')
self.assertEqual(
'my-platform-version',
fileoperations.get_platform_version()
)
def test_get_platform_version__directory_not_initialized(self):
self.assertIsNone(fileoperations.get_platform_version())
def test_get_instance_profile(self):
self.create_config_file()
fileoperations.write_config_setting('global', 'instance_profile', 'my-instance-profile')
self.assertEqual(
'my-instance-profile',
fileoperations.get_instance_profile()
)
def test_get_instance_profile__directory_not_initialized(self):
self.assertEqual(
'default',
fileoperations.get_instance_profile(default='default')
)
def test_get_application_name(self):
self.create_config_file()
fileoperations.write_config_setting('global', 'application_name', 'my-application-name')
self.assertEqual(
'my-application-name',
fileoperations.get_application_name()
)
def test_get_platform_name(self):
self.create_config_file()
fileoperations.write_config_setting('global', 'platform_name', 'my-platform-name')
self.assertEqual(
'my-platform-name',
fileoperations.get_platform_name()
)
def test_get_workspace_type(self):
self.create_config_file()
fileoperations.write_config_setting('global', 'workspace_type', 'application')
self.assertEqual(
'application',
fileoperations.get_workspace_type()
)
def test_get_workspace_type__application_not_inited(self):
with self.assertRaises(fileoperations.NotInitializedError):
fileoperations.get_workspace_type()
def test_get_workspace_type__application_not_inited__default_provided(self):
self.assertEqual(
'platform',
fileoperations.get_workspace_type(default='platform')
)
def test_update_platform_version(self):
self.create_config_file()
fileoperations.update_platform_version('my-platform-version')
self.assertEqual(
'my-platform-version',
fileoperations.get_platform_version()
)
def test_write_keyname(self):
self.create_config_file()
fileoperations.write_keyname('my-keyname')
self.assertEqual(
'my-keyname',
fileoperations.get_keyname()
)
def test_directory_empty(self):
os.mkdir('temp')
self.assertTrue(fileoperations.directory_empty('temp'))
def test_write_buildspec_config_header(self):
fileoperations.write_buildspec_config_header('Image', 'image-name')
with open(fileoperations.buildspec_name) as buildspec:
self.assertEqual(
"""eb_codebuild_settings:
Image: image-name
""",
buildspec.read()
)
@patch('ebcli.core.fileoperations.os.stat')
def test___validate_file_for_archive__regular_files_are_valid(
self,
stat_mock,
):
filepath = '/Users/dina/eb_applications/my-app/'
stat_mock.return_value = Mock(st_mode=33188)
actual = fileoperations._validate_file_for_archive(filepath)
self.assertTrue(actual)
@patch('ebcli.core.fileoperations.os.stat')
def test___validate_file_for_archive__ignores_socket_files(
self,
stat_mock,
):
filepath = '/Users/dina/eb_applications/my-app/'
stat_mock.return_value = Mock(st_mode=49645)
actual = fileoperations._validate_file_for_archive(filepath)
self.assertFalse(actual)
| 38.600775 | 119 | 0.64143 |
acf3ee0be21c9f165c8cc5e46efd00ab968dcfd5 | 18,897 | py | Python | lyrics_search/apis/spotify.py | tchamberlin/lyrics_search | 9c4e3fe7ebc13c89f61e3e2012af048fc1733608 | [
"MIT"
] | null | null | null | lyrics_search/apis/spotify.py | tchamberlin/lyrics_search | 9c4e3fe7ebc13c89f61e3e2012af048fc1733608 | [
"MIT"
] | 1 | 2021-01-27T23:44:26.000Z | 2021-05-14T00:21:42.000Z | lyrics_search/apis/spotify.py | tchamberlin/lyrics_search | 9c4e3fe7ebc13c89f61e3e2012af048fc1733608 | [
"MIT"
] | null | null | null | import logging
import math
import os
import re
import string
from collections import OrderedDict
from datetime import datetime
import spotipy
from fuzzywuzzy import fuzz
from spotipy.oauth2 import SpotifyClientCredentials
from tqdm import tqdm
from unidecode import unidecode
from lyrics_search.filters import contains_banned_word
from lyrics_search.normalize import PARENTHETICAL_REGEX
from lyrics_search.utils import (
choices_prompt,
chunks,
load_json,
normalize_query,
order_by_key,
save_json,
)
LOGGER = logging.getLogger(__name__)
SPOTIFY_USER_ID = os.getenv("SPOTIFY_USER_ID")
SPOTIPY = spotipy.Spotify(client_credentials_manager=SpotifyClientCredentials())
_token = spotipy.util.prompt_for_user_token(
SPOTIFY_USER_ID, scope="playlist-modify-private playlist-read-private"
)
if not _token:
raise ValueError(f"Failed to get token for {SPOTIFY_USER_ID}")
USER_SPOTIFY = spotipy.Spotify(auth=_token)
USER_SPOTIFY.trace = False
# Spotify won't allow us to add more than 100 tracks at a time
SPOTIFY_MAX_CHUNK_SIZE = 100
# Spotify search will return a max of 50 items at a time
SPOTIFY_API_SEARCH_LIMIT = 50
SPOTIFY_API_RESULTS_LIMIT = 1000
def spotify_add_tracks_to_playlist(playlist, track_ids, replace_existing=True):
playlist_id = playlist["id"]
if replace_existing:
snapshot = USER_SPOTIFY.user_playlist_replace_tracks(
user=SPOTIFY_USER_ID, playlist_id=playlist_id, tracks=[]
)
LOGGER.debug(
f"Deleted all tracks in playlist {playlist['name']!r} ({playlist_id!r})"
)
track_chunks = chunks(track_ids, SPOTIFY_MAX_CHUNK_SIZE)
for track_chunk in track_chunks:
snapshot = USER_SPOTIFY.user_playlist_add_tracks(
user=SPOTIFY_USER_ID, playlist_id=playlist_id, tracks=track_chunk
)
LOGGER.debug(
f"Added {len(track_chunk)} tracks to playlist {playlist['name']!r}"
)
return snapshot
def delete_playlists(playlist_infos):
for playlist in playlist_infos:
USER_SPOTIFY.user_playlist_unfollow(SPOTIFY_USER_ID, playlist["id"])
LOGGER.debug(f"Deleted playlist {playlist['name']} ({playlist['id']})")
def spotify_get_existing_playlist(playlist_name, no_input=False):
playlists = get_spotify_user_playlists()
existing_playlists = [
playlist for playlist in playlists if playlist_name == playlist["name"]
]
num_existing_playlists = len(existing_playlists)
if num_existing_playlists > 1 and not no_input:
print("The following playlists already exist with the same name:")
choices = list(enumerate(existing_playlists, 1))
for num, playlist in choices:
print(
f" {num}) {playlist['name']} ({playlist['id']}): "
f"{playlist['tracks']['total']} tracks"
)
choice = choices_prompt(
"Which playlist would you like to use (will replace its entire "
"contents with the new tracks)?",
choices=[c for c in choices[0]],
)
# Choice is 1-indexed so we must subtract 1
playlist = existing_playlists[choice - 1]
elif num_existing_playlists == 1:
LOGGER.debug(f"Exactly 1 existing playlist named {playlist_name}")
playlist = existing_playlists[0]
else:
LOGGER.debug(f"No existing playlist(s) named {playlist_name}")
playlist = None
return playlist
def create_spotify_playlist(
query, playlist_name, track_ids, replace_existing=True, description=None
):
"""Create Spotify playlist of given name, with given tracks."""
if len(track_ids) == 0:
raise ValueError(
f"Refusing to create empty Spotify playlist '{playlist_name}'. "
"No changes have been made."
)
if description is None:
description = create_playlist_description(query)
playlist = spotify_get_existing_playlist(playlist_name)
if playlist is None:
playlist = USER_SPOTIFY.user_playlist_create(
SPOTIFY_USER_ID, name=playlist_name, public=False, description=description
)
tqdm.write(f"Creating playlist {playlist_name!r}")
else:
tqdm.write(f"Replacing existing playlist {playlist_name!r}")
USER_SPOTIFY.user_playlist_change_details(
SPOTIFY_USER_ID, playlist["id"], description=description
)
spotify_add_tracks_to_playlist(
playlist, track_ids, replace_existing=replace_existing
)
return playlist["id"]
def get_spotify_user_playlists(limit=50):
"""Get all playlists from current Spotify user."""
has_more = True
offset = 0
playlists = []
while has_more:
playlists_ = USER_SPOTIFY.current_user_playlists(limit=limit, offset=offset)
has_more = bool(playlists_["next"])
if has_more:
offset += limit
playlists.extend(playlists_["items"])
return playlists
def search_spotify_for_track(artist, track):
query = f"{artist} {track}"
LOGGER.debug(f"Querying for {query}")
results = SPOTIPY.search(q=query, limit=50)
num_results = results["tracks"]["total"]
LOGGER.debug(f"Found {num_results} results")
filtered_results = filter_results(track, results["tracks"]["items"])
if not filtered_results:
LOGGER.debug(f"After filtering, no results! Bailing out of query {query}")
return None
num_filtered_results = len(filtered_results)
item = None
if num_filtered_results:
if num_filtered_results == 1:
item = results["tracks"]["items"][0]
stub = "only track"
elif num_filtered_results > 1:
item = sorted(filtered_results, key=lambda x: x["popularity"])[-1]
stub = "most popular track"
artists = [artist["name"] for artist in item["artists"]]
LOGGER.debug(f"Added {stub} track '{item['name']}' by '{artists}' for {query=}")
else:
LOGGER.debug(f"Failed to find any results for {query=}")
return item
def sort_playlist(playlist, key):
"""Sort the given Spotify `playlist` by `key`"""
def query_spotify_from_track_infos(track_infos, order_by=None):
to_add = OrderedDict()
not_found = []
# Create a set of each unique artist/track name pair. Use the "cleaned" track name (this
# strips out things in parentheses, for example). This avoid unnecessecary duplicate queries
# to spotify. NOTE: This obviously assumes that a given track title is unique per artist, which
# is not true. However, it is prefereable doing it this way vs. getting a bunch of duplicate
# results for the much more common case of the same song existing on multiple albums per artist
# TODO: De-dup elsewhere, with better explanation of what's going on in logs
to_query = {
(track_info["cleaned_artist"], track_info["cleaned_track"]): track_info
for track_info in track_infos
}
to_query = sorted(to_query.items(), key=lambda x: x[1]["score"], reverse=True)
for (artist, track), track_info in tqdm(to_query, unit="track"):
# TODO: Filter for song names that closely match our query!!!
item = search_spotify_for_track(artist, track)
if item:
to_add[item["id"]] = item
else:
not_found.append(track_info)
if order_by:
ret = order_by_key(to_add.values(), order_by)
else:
ret = to_add
return list(ret.values()), not_found
def create_playlist_description(query):
repo_url = os.getenv("LR_REPO_URL", "<none>")
return (
f"{query} playlist! Created via an automated script; author does not endorse contents. "
f"Sorted in rough order of {query}-ness. "
f"See {repo_url} for more details."
)
def spotify_deep_search(query):
# TODO: Inefficient!
initial_results = SPOTIPY.search(q=f"track:{query}", type="track", limit=1)
all_results = []
if initial_results["tracks"]["total"] > SPOTIFY_API_RESULTS_LIMIT:
for year in tqdm(range(2010, datetime.now().year), unit="year", position=1):
LOGGER.info(f"{year=}")
for char in tqdm(string.ascii_lowercase, unit="char", position=2):
LOGGER.info(f"{char=}")
results = search_spotify(f"track:{query} year:{year} artist:{char}*")
all_results.extend(results)
return all_results
def spotify_deep_search_lazy(query):
cleaned_query = unidecode(query)
_query = (
f"{query} OR {cleaned_query}"
if query.lower() != cleaned_query.lower()
else query
)
# TODO: Inefficient!
initial_results = SPOTIPY.search(q=_query, type="track", limit=1)
total_results = initial_results["tracks"]["total"]
if total_results > SPOTIFY_API_RESULTS_LIMIT:
for year in tqdm(
reversed(range(2010, datetime.now().year)), unit="year", position=1
):
LOGGER.info(f"{year=}")
for char in tqdm(string.ascii_lowercase, unit="char", position=2):
LOGGER.info(f"{char=}")
results = search_spotify_lazy(
f"track:{_query} year:{year} artist:{char}*"
)
for result in results:
yield result
else:
for result in search_spotify_lazy(f"track:{query}"):
yield result
def spotify_shallow_search(query):
return search_spotify(f"track:{query}")
def search_spotify(
query,
type_="track",
max_results=None,
limit=SPOTIFY_API_SEARCH_LIMIT,
**kwargs,
):
all_items = []
results = SPOTIPY.search(q=query, type=type_, limit=limit, **kwargs)
all_items.extend(results["tracks"]["items"])
total = results["tracks"]["total"]
if max_results is not None and total > max_results:
LOGGER.debug(f"Limiting results from {total=} to {max_results=}")
total = max_results
num_pages = math.ceil(total / limit)
LOGGER.debug(
f"Total {total} results across {num_pages} pages of {limit} results each"
)
max_pages = math.ceil(SPOTIFY_API_RESULTS_LIMIT / limit)
if num_pages > max_pages:
LOGGER.debug(f"Limiting pages from {num_pages=} to {max_pages=}")
num_pages = max_pages
offset = limit
for page in tqdm(range(1, num_pages + 1), initial=1, unit="page", position=0):
if offset >= SPOTIFY_API_RESULTS_LIMIT:
LOGGER.warning(
f"Reach Spotify API Offset limit of {SPOTIFY_API_RESULTS_LIMIT}; exiting"
)
break
LOGGER.debug(f"Fetching page {page} ({offset=})")
results = SPOTIPY.search(
q=query, type=type_, offset=offset, limit=limit, **kwargs
)
all_items.extend(results["tracks"]["items"])
offset = page * limit
return all_items
def normalize_track_field(value):
normalized = PARENTHETICAL_REGEX.sub("", value).strip().lower()
try:
feat_start = normalized.index("feat.")
except ValueError:
pass
else:
normalized = normalized[:feat_start].strip()
if normalized != value:
LOGGER.debug(f"Normalized value from {value=} to {normalized=}")
return normalized
def filter_results(query, items, fast=True):
filtered = []
query = unidecode(query.lower())
query_word_regex = re.compile(r"\b" + re.escape(query) + r"\b")
for item in items:
track = unidecode(item["name"]).lower()
album = unidecode(item["album"]["name"]).lower()
artists = [unidecode(a["name"]).lower() for a in item["artists"]]
clean_track = normalize_track_field(track)
track_contains_query = (
bool(query_word_regex.match(clean_track))
# TODO: Why was this here? seems bad
# or fuzz.partial_token_sort_ratio(query, clean_track) > 85
)
filters = (
(
"banned_word_in_artist_field",
[artist for artist in artists if contains_banned_word(artist)],
),
("banned_word_in_album_field", contains_banned_word(album)),
("banned_word_in_track_field", contains_banned_word(clean_track)),
("track_does_not_contain_query", not track_contains_query),
(
"artist_name_contains_query",
(
# If the track name doesn't contain the query,
not track_contains_query
# AND one of the arists does, then evaluate to True
and any(artist for artist in artists if query in artist)
),
),
(
"album_name_contains_query",
(
# If the track name doesn't contain the query,
not track_contains_query
# AND the album does, then evaluate to True
and query in album
),
),
(
"artist_name_fuzzy_matches_query",
(
not track_contains_query
# AND one of the arists does, then evaluate to True
and any(
artist
for artist in artists
if fuzz.partial_token_sort_ratio(query, artist) > 85
)
),
),
(
"album_name_contains_query",
(
# If the track name doesn't contain the query,
not track_contains_query
# AND the album does, then evaluate to True
and fuzz.partial_token_sort_ratio(query, album) > 85
),
),
)
if fast:
do_add = not any(v for k, v in filters)
else:
filters = dict(filters)
do_add = not any(filters.values())
filters = filters.items()
if do_add:
filtered.append(item)
else:
LOGGER.info(
f"Filtered out '{format_item(item)}' due to: "
f"{[k for k, v in filters if v]}"
)
return filtered
def format_item(item):
artists = ", ".join([a["name"] for a in item["artists"]])
return f"{artists} | {item['album']['name']} | {item['name']}"
def gen_spotify_search_results_json_path(output_path, normalized_query):
return output_path / f"{normalized_query}_spotify_search_results.json"
def search_and_filter(
query, output_path, order_by="-popularity", fast=True, deep=False
):
normalized_query = normalize_query(query)
spotify_search_results_json_path = gen_spotify_search_results_json_path(
output_path, normalized_query
)
if not spotify_search_results_json_path.exists():
LOGGER.debug(f"Searching Spotify for {query!r}")
if deep:
spotify_search_results = spotify_deep_search(query)
else:
spotify_search_results = spotify_shallow_search(query)
save_json(spotify_search_results, spotify_search_results_json_path)
else:
LOGGER.debug(
"Skipping Spotify search; results are cached at "
f"'{spotify_search_results_json_path}'"
)
spotify_search_results = load_json(spotify_search_results_json_path)
filtered = filter_results(query, spotify_search_results, fast=fast)
deduped = remove_duplicates(query, filtered)
ordered = order_by_key(deduped, order_by)
save_json(
[format_item(item) for item in ordered],
output_path / f"{normalized_query}_spotify_playlist.json",
)
return ordered
def remove_duplicates(query, items):
compressed = {}
for item in items:
track = normalize_track_field(item["name"])
# album = PARENTHETICAL_REGEX.sub("", item["album"]["name"]).strip().lower()
artists = tuple(
sorted((normalize_track_field(a["name"]) for a in item["artists"]))
)
key = (artists, track)
existing = compressed.get(key, None)
# If there is an existing track in `compressed`...
if existing:
do_add = False
# If the existing album is a single (e.g. not a compilation),
if existing["album"]["album_type"] == "single":
# AND the new item is also a single, AND the new item is more popular, we add it
if (
item["album"]["album_type"] == "single"
and item["popularity"] > existing["popularity"]
):
do_add = True
# If it is NOT a single,
else:
# AND it is more popular than the existing one, we add it
if item["popularity"] > existing["popularity"]:
do_add = True
if do_add:
LOGGER.debug(
f"Overwriting '{format_item(existing)}' (pop. {existing['popularity']}) "
f"with '{format_item(item)}' (pop. {item['popularity']})"
)
compressed[key] = item
else:
compressed[key] = item
LOGGER.debug(f"De-dup'd {len(items)=} to {len(compressed)=}")
return list(compressed.values())
def search_spotify_lazy(
query,
type_="track",
max_results=None,
limit=SPOTIFY_API_SEARCH_LIMIT,
**kwargs,
):
all_items = []
results = SPOTIPY.search(q=query, type=type_, limit=limit, **kwargs)
if results is None:
raise ValueError("uh oh")
for track in results["tracks"]["items"]:
yield track
all_items.extend(results["tracks"]["items"])
total = results["tracks"]["total"]
if max_results is not None and total > max_results:
LOGGER.debug(f"Limiting results from {total=} to {max_results=}")
total = max_results
num_pages = math.ceil(total / limit)
LOGGER.debug(
f"Total {total} results across {num_pages} pages of {limit} results each"
)
max_pages = math.ceil(SPOTIFY_API_RESULTS_LIMIT / limit)
if num_pages > max_pages:
LOGGER.debug(f"Limiting pages from {num_pages=} to {max_pages=}")
num_pages = max_pages
offset = limit
for page in tqdm(range(1, num_pages + 1), initial=1, unit="page", position=0):
if offset >= SPOTIFY_API_RESULTS_LIMIT:
LOGGER.warning(
f"Reach Spotify API Offset limit of {SPOTIFY_API_RESULTS_LIMIT}; exiting"
)
break
LOGGER.debug(f"Fetching page {page} ({offset=})")
results = SPOTIPY.search(
q=query, type=type_, offset=offset, limit=limit, **kwargs
)
for track in results["tracks"]["items"]:
yield track
all_items.extend(results["tracks"]["items"])
offset = page * limit
return all_items
| 35.587571 | 99 | 0.622056 |
acf3ef2cf59b60d2d9b5ea7981d3868bcb195dc2 | 293 | py | Python | vol2/78.py | EdisonAlgorithms/ProjectEuler | 95025ede2c92dbd3ed2dccc0f8a97e9a3db95ef0 | [
"MIT"
] | null | null | null | vol2/78.py | EdisonAlgorithms/ProjectEuler | 95025ede2c92dbd3ed2dccc0f8a97e9a3db95ef0 | [
"MIT"
] | null | null | null | vol2/78.py | EdisonAlgorithms/ProjectEuler | 95025ede2c92dbd3ed2dccc0f8a97e9a3db95ef0 | [
"MIT"
] | null | null | null | if __name__ == '__main__':
k = sum([[i * (3 * i - 1) / 2, i * (3 * i - 1) / 2 + i] for i in range(1, 250)], [])
n = 0
m = 1e6
p = [1]
sgn = [1, 1, -1, -1]
while p[n] > 0:
n += 1
px = 0
i = 0
while k[i] <= n:
px += p[n - k[i]] * sgn[i % 4]
i += 1
p.append(px % m)
print n | 19.533333 | 85 | 0.392491 |
acf3f07cb4a0ab77eddd25f4fa0166c15735c233 | 280 | py | Python | Ashwini Jha/text_to_speech/online_tts.py | akhilaku/Image-Classifier-And-Recognition-Device-For-Blind-People-Using-Intel-Movidius-NCS-2 | 675620375a8568cc5bce387198e857a6c9624a81 | [
"MIT"
] | 2 | 2020-03-21T15:45:25.000Z | 2020-04-21T10:51:46.000Z | Ashwini Jha/text_to_speech/online_tts.py | akhilaku/Image-Classifier-And-Recognition-Device-For-Blind-People-Using-Intel-Movidius-NCS-2 | 675620375a8568cc5bce387198e857a6c9624a81 | [
"MIT"
] | 10 | 2020-06-27T16:48:31.000Z | 2020-08-12T09:14:07.000Z | Ashwini Jha/text_to_speech/online_tts.py | akhilaku/Image-Classifier-And-Recognition-Device-For-Blind-People-Using-Intel-Movidius-NCS-2 | 675620375a8568cc5bce387198e857a6c9624a81 | [
"MIT"
] | 4 | 2020-06-20T14:11:51.000Z | 2020-06-24T07:21:20.000Z | #Online Synthesis
import gtts
from playsound import playsound
# make a request to google to get synthesis
tts = gtts.gTTS("The person in front of you is wearing a mask")
#save the audio file
tts.save("human_with_mask.mp3")
#play the audio file
playsound("human_with_mask.mp3")
| 21.538462 | 63 | 0.775 |
acf3f1ab23ec3b6255914c94a4089919d5824ff5 | 2,906 | py | Python | funowl/__init__.py | hsolbrig/funowl | 9345591e6c6cdf246fb8f4b0fcdae0b904c2a45d | [
"CC0-1.0"
] | 23 | 2019-05-24T05:27:25.000Z | 2022-02-18T16:37:17.000Z | funowl/__init__.py | hsolbrig/funowl | 9345591e6c6cdf246fb8f4b0fcdae0b904c2a45d | [
"CC0-1.0"
] | 22 | 2019-11-04T21:03:33.000Z | 2022-03-11T19:38:15.000Z | funowl/__init__.py | hsolbrig/funowl | 9345591e6c6cdf246fb8f4b0fcdae0b904c2a45d | [
"CC0-1.0"
] | 5 | 2019-10-07T13:28:14.000Z | 2021-12-20T08:28:58.000Z | import sys
from warnings import warn
from . prefix_declarations import Prefix
from . declarations import Declaration
from . literals import Datatype, StringLiteralNoLanguage, TypedLiteral, StringLiteralWithLanguage, Literal
from . dataproperty_expressions import DataProperty
from . individuals import NamedIndividual
from . annotations import AnnotationProperty, AnnotationSubject, AnnotationValue, Annotation, Annotatable, \
AnnotationAssertion, SubAnnotationPropertyOf, AnnotationPropertyDomain, AnnotationPropertyRange, AnnotationAxiom
from . assertions import SameIndividual, DifferentIndividuals, ClassAssertion, ObjectPropertyExpression, \
ObjectPropertyAssertion, NegativeObjectPropertyAssertion, DataPropertyAssertion, NegativeDataPropertyAssertion, \
Assertion
from . class_axioms import SubClassOf, EquivalentClasses, DisjointClasses, DisjointUnion, HasKey, ClassAxiom
from . class_expressions import Class, ObjectIntersectionOf, ObjectUnionOf, ObjectComplementOf, ObjectOneOf, \
ObjectSomeValuesFrom, ObjectAllValuesFrom, ObjectHasValue, ObjectHasSelf, ObjectMinCardinality, \
ObjectMaxCardinality, ObjectExactCardinality, DataSomeValuesFrom, DataAllValuesFrom, DataHasValue, \
DataMinCardinality, DataMaxCardinality, DataExactCardinality, ClassExpression
from . dataproperty_axioms import SubDataPropertyOf, EquivalentDataProperties, DisjointDataProperties, \
DataPropertyDomain, DataPropertyRange, FunctionalDataProperty, DatatypeDefinition, DataPropertyAxiom
from . dataproperty_expressions import DataProperty, DataPropertyExpression
from . dataranges import DataIntersectionOf, DataUnionOf, DataComplementOf, DataOneOf, FacetRestriction, \
DatatypeRestriction, DataRange
from .identifiers import IRI
from . individuals import NamedIndividual, AnonymousIndividual, Individual
from . objectproperty_axioms import ObjectPropertyChain, SubObjectPropertyExpression, SubObjectPropertyOf,\
EquivalentObjectProperties, DisjointObjectProperties, ObjectPropertyDomain, ObjectPropertyRange, \
InverseObjectProperties, FunctionalObjectProperty, InverseFunctionalObjectProperty, ReflexiveObjectProperty, \
IrreflexiveObjectProperty, SymmetricObjectProperty, AsymmetricObjectProperty, TransitiveObjectProperty, \
ObjectPropertyAxiom
from . objectproperty_expressions import ObjectProperty, ObjectInverseOf, ObjectPropertyExpression
from . ontology_document import OntologyDocument, Ontology, Import
from . axioms import Axiom
if sys.version_info < (3, 8, 0):
warn(f"FunOwl needs python 3.8 or later. Current version: {sys.version_info}")
# TODO: Run coverage and test or toss anything that isn't executed
# TODO: Consider removing the streaming IO feature -- it doesn't seem to do a lot for performance and makes things compicated
# TODO: Put an official unit test in -- something like rdflib
# TODO: See table 5 in OWL Spec -- builtin solution?
| 67.581395 | 125 | 0.842739 |
acf3f1eb9172799920a4cbb85e39686be3429be7 | 766 | py | Python | IVT_root_finder.py | Ilweran/ScientificProgramming | 5b7fff9340cecdd5e231f59cf2d33b1d4a345ada | [
"CC0-1.0"
] | null | null | null | IVT_root_finder.py | Ilweran/ScientificProgramming | 5b7fff9340cecdd5e231f59cf2d33b1d4a345ada | [
"CC0-1.0"
] | null | null | null | IVT_root_finder.py | Ilweran/ScientificProgramming | 5b7fff9340cecdd5e231f59cf2d33b1d4a345ada | [
"CC0-1.0"
] | null | null | null |
import math
def sign(a,b): #compares the sign of two numbers: sign > 0 iff a and b have same sign
return a*b
def Bisection(xl, xr, tol):
n = 1
NMAX = 50
while n <= NMAX: # limit iterations to prevent infinite loop
error = abs(xl-xr)/2 #defines error measure
c = (xr+xl)/2 # midpoint
if f(c) == 0 or error < tol: # solution found
print(c)
return c
n = n + 1 # increment step counter
if sign(f(c),f(xl)) >= 0:
print(c)
xl = c
else:
xr = c # new interval
print(c)
return c
def f(x):
return math.cos(math.pi/2*x)-x
xl = 0.0
xr = 1.0
tol = 0.0001
Bisection(xl, xr, tol)
| 20.702703 | 85 | 0.486945 |
acf3f1ff76647ac8571fe0c592888d5f6d0abcc1 | 20,051 | py | Python | TCCILC/tcc_ilc/ilc_tcc_coordinator.py | shwethanidd/volttron-GS | f3b87c0001774251e833e4c3fe875b6734844a84 | [
"BSD-3-Clause"
] | 1 | 2021-08-05T04:01:55.000Z | 2021-08-05T04:01:55.000Z | TCCILC/tcc_ilc/ilc_tcc_coordinator.py | kevinatkinson-pnnl/volttron-GS | 479c614a6f7cd779fcc208e8e35d27d0961a16f8 | [
"BSD-3-Clause"
] | null | null | null | TCCILC/tcc_ilc/ilc_tcc_coordinator.py | kevinatkinson-pnnl/volttron-GS | 479c614a6f7cd779fcc208e8e35d27d0961a16f8 | [
"BSD-3-Clause"
] | null | null | null | """
-*- coding: utf-8 -*- {{{
vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
Copyright (c) 2019, Battelle Memorial Institute
All rights reserved.
1. Battelle Memorial Institute (hereinafter Battelle) hereby grants
permission to any person or entity lawfully obtaining a copy of this
software and associated documentation files (hereinafter "the Software")
to redistribute and use the Software in source and binary forms, with or
without modification. Such person or entity may use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software, and
may permit others to do so, subject to the following conditions:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimers.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Other than as used herein, neither the name Battelle Memorial Institute
or Battelle may be used in any form whatsoever without the express
written consent of Battelle.
2. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL BATTELLE OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
This material was prepared as an account of work sponsored by an agency of the
United States Government. Neither the United States Government nor the United
States Department of Energy, nor Battelle, nor any of their employees, nor any
jurisdiction or organization that has cooperated in the development of these
materials, makes any warranty, express or implied, or assumes any legal
liability or responsibility for the accuracy, completeness, or usefulness or
any information, apparatus, product, software, or process disclosed, or
represents that its use would not infringe privately owned rights.
Reference herein to any specific commercial product, process, or service by
trade name, trademark, manufacturer, or otherwise does not necessarily
constitute or imply its endorsement, recommendation, or favoring by the
United States Government or any agency thereof, or Battelle Memorial Institute.
The views and opinions of authors expressed herein do not necessarily state or
reflect those of the United States Government or any agency thereof.
PACIFIC NORTHWEST NATIONAL LABORATORY
operated by
BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
under Contract DE-AC05-76RL01830
}}}
"""
import os
import sys
import logging
from datetime import timedelta as td, datetime as dt
import uuid
from dateutil.parser import parse
from tcc_ilc.device_handler import ClusterContainer, DeviceClusters, parse_sympy, init_schedule, check_schedule
import pandas as pd
from volttron.platform.agent import utils
from volttron.platform.messaging import topics, headers as headers_mod
from volttron.platform.agent.utils import setup_logging, format_timestamp, get_aware_utc_now
from volttron.platform.agent.math_utils import mean, stdev
from volttron.platform.vip.agent import Agent, Core
from volttron.platform.agent.base_market_agent import MarketAgent
from volttron.platform.agent.base_market_agent.poly_line import PolyLine
from volttron.platform.agent.base_market_agent.point import Point
from volttron.platform.agent.base_market_agent.error_codes import NOT_FORMED, SHORT_OFFERS, BAD_STATE, NO_INTERSECT
from volttron.platform.agent.base_market_agent.buy_sell import BUYER
__version__ = "0.2"
setup_logging()
_log = logging.getLogger(__name__)
class TransactiveIlcCoordinator(MarketAgent):
def __init__(self, config_path, **kwargs):
super(TransactiveIlcCoordinator, self).__init__(**kwargs)
config = utils.load_config(config_path)
campus = config.get("campus", "")
building = config.get("building", "")
logging_topic = config.get("logging_topic", "tnc")
self.target_topic = '/'.join(['record', 'target_agent', campus, building, 'goal'])
self.logging_topic = '/'.join([logging_topic, campus, building, "TCILC"])
cluster_configs = config["clusters"]
self.clusters = ClusterContainer()
for cluster_config in cluster_configs:
device_cluster_config = cluster_config["device_cluster_file"]
load_type = cluster_config.get("load_type", "discreet")
if device_cluster_config[0] == "~":
device_cluster_config = os.path.expanduser(device_cluster_config)
cluster_config = utils.load_config(device_cluster_config)
cluster = DeviceClusters(cluster_config, load_type)
self.clusters.add_curtailment_cluster(cluster)
self.device_topic_list = []
self.device_topic_map = {}
all_devices = self.clusters.get_device_name_list()
occupancy_schedule = config.get("occupancy_schedule", False)
self.occupancy_schedule = init_schedule(occupancy_schedule)
for device_name in all_devices:
device_topic = topics.DEVICES_VALUE(campus=campus,
building=building,
unit=device_name,
path="",
point="all")
self.device_topic_list.append(device_topic)
self.device_topic_map[device_topic] = device_name
power_token = config["power_meter"]
power_meter = power_token["device"]
self.power_point = power_token["point"]
self.current_time = None
self.power_meter_topic = topics.DEVICES_VALUE(campus=campus,
building=building,
unit=power_meter,
path="",
point="all")
self.demand_limit = None
self.bldg_power = []
self.avg_power = 0.
self.last_demand_update = None
self.demand_curve = None
self.power_prices = None
self.power_min = None
self.power_max = None
self.average_building_power_window = td(minutes=config.get("average_building_power_window", 15))
self.minimum_update_time = td(minutes=config.get("minimum_update_time", 5))
self.market_name = config.get("market", "electric")
self.tz = None
# self.prices = power_prices
self.oat_predictions = []
self.comfort_to_dollar = config.get('comfort_to_dollar', 1.0)
self.prices_from = config.get("prices_from", 'pubsub')
self.prices_topic = config.get("price_topic", "prices")
self.prices_file = config.get("price_file")
self.join_market(self.market_name, BUYER, None, self.offer_callback, None, self.price_callback, self.error_callback)
def setup_prices(self):
_log.debug("Prices from {}".format(self.prices_from))
if self.prices_from == "file":
self.power_prices = pd.read_csv(self.prices_file)
self.power_prices = self.power_prices.set_index(self.power_prices.columns[0])
self.power_prices.index = pd.to_datetime(self.power_prices.index)
self.power_prices.resample('H').mean()
self.power_prices['MA'] = self.power_prices[::-1].rolling(window=24, min_periods=1).mean()[::-1]
self.power_prices['STD'] = self.power_prices["price"][::-1].rolling(window=24, min_periods=1).std()[::-1]
self.power_prices['month'] = self.power_prices.index.month.astype(int)
self.power_prices['day'] = self.power_prices.index.day.astype(int)
self.power_prices['hour'] = self.power_prices.index.hour.astype(int)
elif self.prices_from == "pubsub":
self.vip.pubsub.subscribe(peer="pubsub", prefix=self.prices_topic, callback=self.update_prices)
def update_prices(self, peer, sender, bus, topic, headers, message):
self.power_prices = pd.DataFrame(message)
self.power_prices = self.power_prices.set_index(self.power_prices.columns[0])
self.power_prices.index = pd.to_datetime(self.power_prices.index)
self.power_prices["price"] = self.power_prices
self.power_prices.resample('H').mean()
self.power_prices['MA'] = self.power_prices["price"][::-1].rolling(window=24, min_periods=1).mean()[::-1]
self.power_prices['STD'] = self.power_prices["price"][::-1].rolling(window=24, min_periods=1).std()[::-1]
self.power_prices['month'] = self.power_prices.index.month.astype(int)
self.power_prices['day'] = self.power_prices.index.day.astype(int)
self.power_prices['hour'] = self.power_prices.index.hour.astype(int)
@Core.receiver("onstart")
def starting_base(self, sender, **kwargs):
"""
Startup method:
- Setup subscriptions to devices.
- Setup subscription to building power meter.
:param sender:
:param kwargs:
:return:
"""
for device_topic in self.device_topic_list:
_log.debug("Subscribing to " + device_topic)
self.vip.pubsub.subscribe(peer="pubsub", prefix=device_topic, callback=self.new_data)
_log.debug("Subscribing to " + self.power_meter_topic)
self.vip.pubsub.subscribe(peer="pubsub", prefix=self.power_meter_topic, callback=self.load_message_handler)
self.setup_prices()
def offer_callback(self, timestamp, market_name, buyer_seller):
if self.current_time is not None:
demand_curve = self.create_demand_curve()
if demand_curve is not None:
self.make_offer(market_name, buyer_seller, demand_curve)
topic_suffix = "/".join([self.logging_topic, "DemandCurve"])
message = {"Curve": demand_curve.tuppleize(), "Commodity": "Electricity"}
self.publish_record(topic_suffix, message)
def create_demand_curve(self):
if self.power_min is not None and self.power_max is not None:
demand_curve = PolyLine()
price_min, price_max = self.generate_price_points()
demand_curve.add(Point(price=price_max, quantity=self.power_min))
demand_curve.add(Point(price=price_min, quantity=self.power_max))
else:
demand_curve = None
self.demand_curve = demand_curve
return demand_curve
def price_callback(self, timestamp, market_name, buyer_seller, price, quantity):
if self.bldg_power:
_log.debug("Price is {} at {}".format(price, self.bldg_power[-1][0]))
dt = self.bldg_power[-1][0]
occupied = check_schedule(dt, self.occupancy_schedule)
if self.demand_curve is not None and price is not None and occupied:
demand_goal = self.demand_curve.x(price)
self.publish_demand_limit(demand_goal, str(uuid.uuid1()))
elif not occupied:
demand_goal = None
self.publish_demand_limit(demand_goal, str(uuid.uuid1()))
else:
_log.debug("Possible market problem price: {} - quantity: {}".format(price, quantity))
demand_goal = None
if price is None:
price = "None"
demand_goal = "None"
message = {"Price": price, "Quantity": demand_goal, "Commodity": "Electricity"}
topic_suffix = "/".join([self.logging_topic, "MarketClear"])
self.publish_record(topic_suffix, message)
def publish_demand_limit(self, demand_goal, task_id):
"""
Publish the demand goal determined by clearing price.
:param demand_goal:
:param task_id:
:return:
"""
_log.debug("Updating demand limit: {}".format(demand_goal))
self.demand_limit = demand_goal
if self.last_demand_update is not None:
if (self.current_time - self.last_demand_update) < self.minimum_update_time:
_log.debug("Minimum demand update time has not elapsed.")
return
if self.current_time is None:
_log.debug("No data received, not updating demand goal!")
return
self.last_demand_update = self.current_time
start_time = format(self.current_time)
end_time = format_timestamp(self.current_time.replace(hour=23, minute=59, second=59))
_log.debug("Publish target: {}".format(demand_goal))
headers = {'Date': start_time}
target_msg = [
{
"value": {
"target": self.demand_limit,
"start": start_time,
"end": end_time,
"id": task_id
}
},
{
"value": {"tz": "UTC"}
}
]
self.vip.pubsub.publish('pubsub', self.target_topic, headers, target_msg).get(timeout=15)
def new_data(self, peer, sender, bus, topic, headers, message):
"""
Call back method for device data subscription.
:param peer:
:param sender:
:param bus:
:param topic:
:param headers:
:param message:
:return:
"""
_log.info("Data Received for {}".format(topic))
# topic of form: devices/campus/building/device
device_name = self.device_topic_map[topic]
data = message[0]
self.current_time = parse(headers["Date"])
parsed_data = parse_sympy(data)
self.clusters.get_device(device_name).ingest_data(parsed_data)
def generate_price_points(self):
# need to figure out where we are getting the pricing information and the form
# probably via RPC
_log.debug("DEBUG_PRICES: {}".format(self.current_time))
df_query = self.power_prices[(self.power_prices["hour"] == self.current_time.hour) & (self.power_prices["day"] == self.current_time.day) & (self.power_prices["month"] == self.current_time.month)]
price_min = df_query['MA'] - df_query['STD']*self.comfort_to_dollar
price_max = df_query['MA'] + df_query['STD']*self.comfort_to_dollar
_log.debug("DEBUG TCC price - min {} - max {}".format(float(price_min), float(price_max)))
return max(float(price_min), 0.0), float(price_max)
def generate_power_points(self, current_power):
positive_power, negative_power = self.clusters.get_power_bounds()
_log.debug("DEBUG TCC - pos {} - neg {}".format(positive_power, negative_power))
return float(current_power + sum(positive_power)), float(current_power - sum(negative_power))
def load_message_handler(self, peer, sender, bus, topic, headers, message):
"""
Call back method for building power meter. Calculates the average
building demand over a configurable time and manages the curtailment
time and curtailment break times.
:param peer:
:param sender:
:param bus:
:param topic:
:param headers:
:param message:
:return:
"""
# Use instantaneous power or average building power.
data = message[0]
current_power = data[self.power_point]
current_time = parse(headers["Date"])
power_max, power_min = self.generate_power_points(current_power)
_log.debug("QUANTITIES: max {} - min {} - cur {}".format(power_max, power_min, current_power))
topic_suffix = "/".join([self.logging_topic, "BuildingFlexibility"])
message = {"MaximumPower": power_max, "MinimumPower": power_min, "AveragePower": current_power}
self.publish_record(topic_suffix, message)
if self.bldg_power:
current_average_window = self.bldg_power[-1][0] - self.bldg_power[0][0] + td(seconds=15)
else:
current_average_window = td(minutes=0)
if current_average_window >= self.average_building_power_window and current_power > 0:
self.bldg_power.append((current_time, current_power, power_min, power_max))
self.bldg_power.pop(0)
elif current_power > 0:
self.bldg_power.append((current_time, current_power, power_min, power_max))
smoothing_constant = 2.0 / (len(self.bldg_power) + 1.0) * 2.0 if self.bldg_power else 1.0
smoothing_constant = smoothing_constant if smoothing_constant <= 1.0 else 1.0
power_sort = list(self.bldg_power)
power_sort.sort(reverse=True)
avg_power_max = 0.
avg_power_min = 0.
avg_power = 0.
for n in xrange(len(self.bldg_power)):
avg_power += power_sort[n][1] * smoothing_constant * (1.0 - smoothing_constant) ** n
avg_power_min += power_sort[n][2] * smoothing_constant * (1.0 - smoothing_constant) ** n
avg_power_max += power_sort[n][3] * smoothing_constant * (1.0 - smoothing_constant) ** n
self.avg_power = avg_power
self.power_min = avg_power_min
self.power_max = avg_power_max
def error_callback(self, timestamp, market_name, buyer_seller, error_code, error_message, aux):
# figure out what to send if the market is not formed or curves don't intersect.
_log.debug("AUX: {}".format(aux))
if market_name == "electric":
if self.bldg_power:
dt = self.bldg_power[-1][0]
occupied = check_schedule(dt, self.occupancy_schedule)
_log.debug("AUX: {}".format(aux))
if not occupied:
demand_goal = None
self.publish_demand_limit(demand_goal, str(uuid.uuid1()))
else:
if aux.get('SQn,DQn', 0) == -1 and aux.get('SQx,DQx', 0) == -1:
demand_goal = self.demand_curve.min_x()
self.publish_demand_limit(demand_goal, str(uuid.uuid1()))
elif aux.get('SPn,DPn', 0) == 1 and aux.get('SPx,DPx', 0) == 1:
demand_goal = self.demand_curve.min_x()
self.publish_demand_limit(demand_goal, str(uuid.uuid1()))
elif aux.get('SPn,DPn', 0) == -1 and aux.get('SPx,DPx', 0) == -1:
demand_goal = self.demand_curve.max_x()
self.publish_demand_limit(demand_goal, str(uuid.uuid1()))
else:
demand_goal = None
self.publish_demand_limit(demand_goal, str(uuid.uuid1()))
return
def publish_record(self, topic, message):
headers = {headers_mod.DATE: format_timestamp(get_aware_utc_now())}
message["TimeStamp"] = format_timestamp(self.current_time)
self.vip.pubsub.publish("pubsub", topic, headers, message).get()
def main(argv=sys.argv):
"""Main method called by the aip."""
try:
utils.vip_main(TransactiveIlcCoordinator)
except Exception as exception:
_log.exception("unhandled exception")
_log.error(repr(exception))
if __name__ == "__main__":
# Entry point for script
try:
sys.exit(main())
except KeyboardInterrupt:
pass
| 47.401891 | 203 | 0.656775 |
acf3f23f10f3249a354dac1992e8e23fc89431e0 | 4,517 | py | Python | hug/directives.py | QuantumGhost/hug | 167e07d2c39c6c18fc8992b838d53ccbe1a0d73c | [
"MIT"
] | 1 | 2021-10-05T03:53:43.000Z | 2021-10-05T03:53:43.000Z | hug/directives.py | QuantumGhost/hug | 167e07d2c39c6c18fc8992b838d53ccbe1a0d73c | [
"MIT"
] | null | null | null | hug/directives.py | QuantumGhost/hug | 167e07d2c39c6c18fc8992b838d53ccbe1a0d73c | [
"MIT"
] | 2 | 2021-03-21T08:07:02.000Z | 2021-10-05T03:53:47.000Z | """hug/directives.py
Defines the directives built into hug. Directives allow attaching behaviour to an API handler based simply
on an argument it takes and that arguments default value. The directive gets called with the default supplied,
ther request data, and api_version. The result of running the directive method is then set as the argument value.
Directive attributes are always prefixed with 'hug_'
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
from functools import partial
from timeit import default_timer as python_timer
from hug import introspect
def _built_in_directive(directive):
"""Marks a callable as a built-in directive"""
directive.directive = True
return directive
@_built_in_directive
class Timer(object):
"""Keeps track of time surpased since instantiation, outputed by doing float(instance)"""
__slots__ = ('start', 'round_to')
def __init__(self, round_to=None, **kwargs):
self.start = python_timer()
self.round_to = round_to
def __float__(self):
time_taken = python_timer() - self.start
return round(time_taken, self.round_to) if self.round_to else time_taken
def __int__(self):
return int(round(float(self)))
def __native_types__(self):
return self.__float__()
@_built_in_directive
def module(default=None, api=None, **kwargs):
"""Returns the module that is running this hug API function"""
return api.module if api else default
@_built_in_directive
def api(default=None, api=None, **kwargs):
"""Returns the api instance in which this API function is being ran"""
return api if api else default
@_built_in_directive
def api_version(default=None, api_version=None, **kwargs):
"""Returns the current api_version as a directive for use in both request and not request handling code"""
return api_version
@_built_in_directive
def documentation(default=None, api_version=None, api=None, **kwargs):
"""returns documentation for the current api"""
api_version = default or api_version
if api:
return api.http.documentation(base_url="", api_version=api_version)
@_built_in_directive
def session(context_name='session', request=None, **kwargs):
"""Returns the session associated with the current request"""
return request and request.context.get(context_name, None)
@_built_in_directive
def user(default=None, request=None, **kwargs):
"""Returns the current logged in user"""
return request and request.context.get('user', None) or default
@_built_in_directive
class CurrentAPI(object):
"""Returns quick access to all api functions on the current version of the api"""
__slots__ = ('api_version', 'api')
def __init__(self, default=None, api_version=None, **kwargs):
self.api_version = api_version
self.api = api(**kwargs)
def __getattr__(self, name):
function = self.api.http.versioned.get(self.api_version, {}).get(name, None)
if not function:
function = self.api.http.versioned.get(None, {}).get(name, None)
if not function:
raise AttributeError('API Function {0} not found'.format(name))
accepts = function.interface.arguments
if 'hug_api_version' in accepts:
function = partial(function, hug_api_version=self.api_version)
if 'hug_current_api' in accepts:
function = partial(function, hug_current_api=self)
return function
| 37.641667 | 113 | 0.738543 |
acf3f25e34c7215cd5a0941e3f92664a3841b057 | 362 | py | Python | keras_tpgan/test_datalist.py | MahmoodHB/LFMTP-GAN | df27689b85b94c0f97ce3dfc362b8f0cd3412dc8 | [
"Unlicense"
] | 1 | 2021-10-20T11:44:27.000Z | 2021-10-20T11:44:27.000Z | keras_tpgan/test_datalist.py | MahmoodHB/LMFTP-GAN | df27689b85b94c0f97ce3dfc362b8f0cd3412dc8 | [
"Unlicense"
] | 1 | 2022-01-29T06:22:11.000Z | 2022-01-29T06:22:11.000Z | keras_tpgan/test_datalist.py | MahmoodHB/LFMTP-GAN | df27689b85b94c0f97ce3dfc362b8f0cd3412dc8 | [
"Unlicense"
] | null | null | null | import pickle
#src_dataset_dir = 'E:/tpgan-chinese/tpgan_keras(for chinese)/crop_img(jpg)'
#src_dataset_dir = 'D:/desktop/tpgan_keras/dataset_png'
#datalist_dir = 'E:/tpgan-chinese/tpgan_keras(for chinese)/datalist_test.pkl'
datalist_dir = 'D:/keras_tpgan(FEI)/keras_tpgan/landmarks(FEI).pkl'
with open(datalist_dir, 'rb') as f:
datalist = pickle.load(f) | 40.222222 | 77 | 0.762431 |
acf3f2ef270a6bcad43bfeba2f2f2788e8a5be93 | 1,293 | py | Python | CircleciScripts/cocoapods_release.py | netvue/aws-sdk-xos | 4ca68b251d0d9cbc19043bf12325b205b83bddcb | [
"Apache-2.0"
] | null | null | null | CircleciScripts/cocoapods_release.py | netvue/aws-sdk-xos | 4ca68b251d0d9cbc19043bf12325b205b83bddcb | [
"Apache-2.0"
] | null | null | null | CircleciScripts/cocoapods_release.py | netvue/aws-sdk-xos | 4ca68b251d0d9cbc19043bf12325b205b83bddcb | [
"Apache-2.0"
] | null | null | null | import sys
from framework_list import frameworks
from functions import log, run_command
log("Publishing CocoaPods")
for framework in frameworks:
log(f"Publishing {framework}")
# Most pods take a few minutes to build, and a few seconds to push to trunk. However, the
# AWSiOSSDK podspec can take a long time to build, since it builds each dependent pod as
# part of its linting process, so set the timeout accordingly.
(exit_code, out, err) = run_command(
["pod", "trunk", "push", f"{framework}.podspec", "--allow-warnings", "--synchronous"],
keepalive_interval=300,
timeout=3600,
)
if exit_code != 0 and "Unable to accept duplicate entry for" in str(out):
log(f"Already published {framework}")
elif exit_code == 0:
log(f"Published {framework}")
else:
log(f"Could not publish {framework}: output: {out}; error: {err}")
sys.exit(exit_code)
if framework == "AWSCore":
log(f"pod repo update after {framework}")
(exit_code, out, err) = run_command(
["pod", "repo", "update"],
keepalive_interval=300,
timeout=3600,
)
if exit_code != 0:
log(f"Failed to update CocoaPods repo'; output={out}, error={err}")
sys.exit(0)
| 33.153846 | 94 | 0.629544 |
acf3f42c38d837481c6843d46c601ebca1ffb7e0 | 73,528 | py | Python | selfdrive/car/toyota/values.py | snedelkoski/openpilot | 3e945fb1fb8562949f801ea316e66d16ffa5a4ca | [
"MIT"
] | 1 | 2021-12-17T22:48:30.000Z | 2021-12-17T22:48:30.000Z | selfdrive/car/toyota/values.py | kimjk2629/crwusiz | efcfd13c5b5a768d4ec566878a4e606ee5bee82e | [
"MIT"
] | null | null | null | selfdrive/car/toyota/values.py | kimjk2629/crwusiz | efcfd13c5b5a768d4ec566878a4e606ee5bee82e | [
"MIT"
] | null | null | null | from collections import defaultdict
from enum import IntFlag
from cereal import car
from common.conversions import Conversions as CV
from selfdrive.car import dbc_dict
Ecu = car.CarParams.Ecu
MIN_ACC_SPEED = 19. * CV.MPH_TO_MS
PEDAL_TRANSITION = 10. * CV.MPH_TO_MS
class CarControllerParams:
ACCEL_MAX = 1.5 # m/s2, lower than allowed 2.0 m/s2 for tuning reasons
ACCEL_MIN = -3.5 # m/s2
STEER_MAX = 1500
STEER_DELTA_UP = 10 # 1.5s time to peak torque
STEER_DELTA_DOWN = 25 # always lower than 45 otherwise the Rav4 faults (Prius seems ok with 50)
STEER_ERROR_MAX = 350 # max delta between torque cmd and torque motor
class ToyotaFlags(IntFlag):
HYBRID = 1
class CAR:
# Toyota
ALPHARD_TSS2 = "TOYOTA ALPHARD 2020"
AVALON = "TOYOTA AVALON 2016"
AVALON_2019 = "TOYOTA AVALON 2019"
AVALONH_2019 = "TOYOTA AVALON HYBRID 2019"
AVALON_TSS2 = "TOYOTA AVALON 2022" # TSS 2.5
CAMRY = "TOYOTA CAMRY 2018"
CAMRYH = "TOYOTA CAMRY HYBRID 2018"
CAMRY_TSS2 = "TOYOTA CAMRY 2021" # TSS 2.5
CAMRYH_TSS2 = "TOYOTA CAMRY HYBRID 2021"
CHR = "TOYOTA C-HR 2018"
CHRH = "TOYOTA C-HR HYBRID 2018"
COROLLA = "TOYOTA COROLLA 2017"
COROLLA_TSS2 = "TOYOTA COROLLA TSS2 2019"
# LSS2 Lexus UX Hybrid is same as a TSS2 Corolla Hybrid
COROLLAH_TSS2 = "TOYOTA COROLLA HYBRID TSS2 2019"
HIGHLANDER = "TOYOTA HIGHLANDER 2017"
HIGHLANDER_TSS2 = "TOYOTA HIGHLANDER 2020"
HIGHLANDERH = "TOYOTA HIGHLANDER HYBRID 2018"
HIGHLANDERH_TSS2 = "TOYOTA HIGHLANDER HYBRID 2020"
PRIUS = "TOYOTA PRIUS 2017"
PRIUS_V = "TOYOTA PRIUS v 2017"
PRIUS_TSS2 = "TOYOTA PRIUS TSS2 2021"
RAV4 = "TOYOTA RAV4 2017"
RAV4H = "TOYOTA RAV4 HYBRID 2017"
RAV4_TSS2 = "TOYOTA RAV4 2019"
RAV4H_TSS2 = "TOYOTA RAV4 HYBRID 2019"
MIRAI = "TOYOTA MIRAI 2021" # TSS 2.5
SIENNA = "TOYOTA SIENNA 2018"
# Lexus
LEXUS_CTH = "LEXUS CT HYBRID 2018"
LEXUS_ESH = "LEXUS ES HYBRID 2018"
LEXUS_ES_TSS2 = "LEXUS ES 2019"
LEXUS_ESH_TSS2 = "LEXUS ES HYBRID 2019"
LEXUS_IS = "LEXUS IS 2018"
LEXUS_NX = "LEXUS NX 2018"
LEXUS_NXH = "LEXUS NX HYBRID 2018"
LEXUS_NX_TSS2 = "LEXUS NX 2020"
LEXUS_RC = "LEXUS RC 2020"
LEXUS_RX = "LEXUS RX 2016"
LEXUS_RXH = "LEXUS RX HYBRID 2017"
LEXUS_RX_TSS2 = "LEXUS RX 2020"
LEXUS_RXH_TSS2 = "LEXUS RX HYBRID 2020"
# (addr, cars, bus, 1/freq*100, vl)
STATIC_DSU_MSGS = [
(0x128, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.AVALON), 1, 3, b'\xf4\x01\x90\x83\x00\x37'),
(0x128, (CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH), 1, 3, b'\x03\x00\x20\x00\x00\x52'),
(0x141, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX, CAR.PRIUS_V), 1, 2, b'\x00\x00\x00\x46'),
(0x160, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX, CAR.PRIUS_V), 1, 7, b'\x00\x00\x08\x12\x01\x31\x9c\x51'),
(0x161, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.AVALON, CAR.LEXUS_RX, CAR.PRIUS_V), 1, 7, b'\x00\x1e\x00\x00\x00\x80\x07'),
(0X161, (CAR.HIGHLANDERH, CAR.HIGHLANDER, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH), 1, 7, b'\x00\x1e\x00\xd4\x00\x00\x5b'),
(0x283, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX, CAR.PRIUS_V), 0, 3, b'\x00\x00\x00\x00\x00\x00\x8c'),
(0x2E6, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH), 0, 3, b'\xff\xf8\x00\x08\x7f\xe0\x00\x4e'),
(0x2E7, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH), 0, 3, b'\xa8\x9c\x31\x9c\x00\x00\x00\x02'),
(0x33E, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH), 0, 20, b'\x0f\xff\x26\x40\x00\x1f\x00'),
(0x344, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX, CAR.PRIUS_V), 0, 5, b'\x00\x00\x01\x00\x00\x00\x00\x50'),
(0x365, (CAR.PRIUS, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.HIGHLANDERH), 0, 20, b'\x00\x00\x00\x80\x03\x00\x08'),
(0x365, (CAR.RAV4, CAR.RAV4H, CAR.COROLLA, CAR.HIGHLANDER, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX, CAR.PRIUS_V), 0, 20, b'\x00\x00\x00\x80\xfc\x00\x08'),
(0x366, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.HIGHLANDERH), 0, 20, b'\x00\x00\x4d\x82\x40\x02\x00'),
(0x366, (CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDER, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX, CAR.PRIUS_V), 0, 20, b'\x00\x72\x07\xff\x09\xfe\x00'),
(0x470, (CAR.PRIUS, CAR.LEXUS_RXH), 1, 100, b'\x00\x00\x02\x7a'),
(0x470, (CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.RAV4H, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.PRIUS_V), 1, 100, b'\x00\x00\x01\x79'),
(0x4CB, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDERH, CAR.HIGHLANDER, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX, CAR.PRIUS_V), 0, 100, b'\x0c\x00\x00\x00\x00\x00\x00\x00'),
]
FW_VERSIONS = {
CAR.AVALON: {
(Ecu.esp, 0x7b0, None): [
b'F152607060\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881510701300\x00\x00\x00\x00',
b'881510705100\x00\x00\x00\x00',
b'881510705200\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B41051\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\x0230721100\x00\x00\x00\x00\x00\x00\x00\x00A0C01000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230721200\x00\x00\x00\x00\x00\x00\x00\x00A0C01000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702000\x00\x00\x00\x00',
b'8821F4702100\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F0701100\x00\x00\x00\x00',
b'8646F0703000\x00\x00\x00\x00',
],
},
CAR.AVALON_2019: {
(Ecu.esp, 0x7b0, None): [
b'F152607140\x00\x00\x00\x00\x00\x00',
b'F152607171\x00\x00\x00\x00\x00\x00',
b'F152607110\x00\x00\x00\x00\x00\x00',
b'F152607180\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881510703200\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B41080\x00\x00\x00\x00\x00\x00',
b'8965B07010\x00\x00\x00\x00\x00\x00',
b'8965B41090\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x700, None): [
b'\x01896630725200\x00\x00\x00\x00',
b'\x01896630725300\x00\x00\x00\x00',
b'\x01896630735100\x00\x00\x00\x00',
b'\x01896630738000\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F0702100\x00\x00\x00\x00',
],
},
CAR.AVALONH_2019: {
(Ecu.esp, 0x7b0, None): [
b'F152641040\x00\x00\x00\x00\x00\x00',
b'F152641061\x00\x00\x00\x00\x00\x00',
b'F152641050\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881510704200\x00\x00\x00\x00',
b'881514107100\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B07010\x00\x00\x00\x00\x00\x00',
b'8965B41090\x00\x00\x00\x00\x00\x00',
b'8965B41070\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x700, None): [
b'\x02896630724000\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x02896630737000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00',
b'\x02896630728000\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F0702100\x00\x00\x00\x00',
],
},
CAR.AVALON_TSS2: {
(Ecu.esp, 0x7b0, None): [
b'\x01F152607280\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B41110\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x700, None): [
b'\x01896630742000\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F6201200\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F4104100\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',
],
},
CAR.CAMRY: {
(Ecu.engine, 0x700, None): [
b'\x018966306L3100\x00\x00\x00\x00',
b'\x018966306L4200\x00\x00\x00\x00',
b'\x018966306L5200\x00\x00\x00\x00',
b'\x018966306P8000\x00\x00\x00\x00',
b'\x018966306Q3100\x00\x00\x00\x00',
b'\x018966306Q4000\x00\x00\x00\x00',
b'\x018966306Q4100\x00\x00\x00\x00',
b'\x018966306Q4200\x00\x00\x00\x00',
b'\x018966333Q9200\x00\x00\x00\x00',
b'\x018966333P3100\x00\x00\x00\x00',
b'\x018966333P3200\x00\x00\x00\x00',
b'\x018966333P4200\x00\x00\x00\x00',
b'\x018966333P4300\x00\x00\x00\x00',
b'\x018966333P4400\x00\x00\x00\x00',
b'\x018966333P4500\x00\x00\x00\x00',
b'\x018966333P4700\x00\x00\x00\x00',
b'\x018966333P4900\x00\x00\x00\x00',
b'\x018966333Q6000\x00\x00\x00\x00',
b'\x018966333Q6200\x00\x00\x00\x00',
b'\x018966333Q6300\x00\x00\x00\x00',
b'\x018966333W6000\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\x02333P1100\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'8821F0601200 ',
b'8821F0601300 ',
b'8821F0602000 ',
b'8821F0603300 ',
b'8821F0604100 ',
b'8821F0605200 ',
b'8821F0607200 ',
b'8821F0608000 ',
b'8821F0608200 ',
b'8821F0609100 ',
],
(Ecu.esp, 0x7b0, None): [
b'F152606210\x00\x00\x00\x00\x00\x00',
b'F152606230\x00\x00\x00\x00\x00\x00',
b'F152606270\x00\x00\x00\x00\x00\x00',
b'F152606290\x00\x00\x00\x00\x00\x00',
b'F152606410\x00\x00\x00\x00\x00\x00',
b'F152633540\x00\x00\x00\x00\x00\x00',
b'F152633A10\x00\x00\x00\x00\x00\x00',
b'F152633A20\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B33540\x00\x00\x00\x00\x00\x00',
b'8965B33542\x00\x00\x00\x00\x00\x00',
b'8965B33580\x00\x00\x00\x00\x00\x00',
b'8965B33581\x00\x00\x00\x00\x00\x00',
b'8965B33621\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [ # Same as 0x791
b'8821F0601200 ',
b'8821F0601300 ',
b'8821F0602000 ',
b'8821F0603300 ',
b'8821F0604100 ',
b'8821F0605200 ',
b'8821F0607200 ',
b'8821F0608000 ',
b'8821F0608200 ',
b'8821F0609100 ',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F0601200 ',
b'8646F0601300 ',
b'8646F0601400 ',
b'8646F0603400 ',
b'8646F0604100 ',
b'8646F0605000 ',
b'8646F0606000 ',
b'8646F0606100 ',
b'8646F0607100 ',
],
},
CAR.CAMRYH: {
(Ecu.engine, 0x700, None): [
b'\x018966306Q6000\x00\x00\x00\x00',
b'\x018966333N1100\x00\x00\x00\x00',
b'\x018966333N4300\x00\x00\x00\x00',
b'\x018966333X0000\x00\x00\x00\x00',
b'\x018966333X4000\x00\x00\x00\x00',
b'\x01896633T16000\x00\x00\x00\x00',
b'\x028966306B2100\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x028966306B2300\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x028966306B2500\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x028966306N8100\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x028966306N8200\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x028966306N8300\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x028966306N8400\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x028966306R5000\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x028966306R5000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00',
b'\x028966306R6000\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x028966306R6000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00',
b'\x028966306S0000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00',
b'\x028966306S0100\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00',
b'\x028966306S1100\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152633214\x00\x00\x00\x00\x00\x00',
b'F152633660\x00\x00\x00\x00\x00\x00',
b'F152633712\x00\x00\x00\x00\x00\x00',
b'F152633713\x00\x00\x00\x00\x00\x00',
b'F152633B51\x00\x00\x00\x00\x00\x00',
b'F152633B60\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'8821F0601200 ',
b'8821F0601300 ',
b'8821F0603400 ',
b'8821F0604000 ',
b'8821F0604100 ',
b'8821F0604200 ',
b'8821F0605200 ',
b'8821F0606200 ',
b'8821F0607200 ',
b'8821F0608000 ',
b'8821F0608200 ',
b'8821F0609000 ',
b'8821F0609100 ',
],
(Ecu.eps, 0x7a1, None): [
b'8965B33540\x00\x00\x00\x00\x00\x00',
b'8965B33542\x00\x00\x00\x00\x00\x00',
b'8965B33550\x00\x00\x00\x00\x00\x00',
b'8965B33551\x00\x00\x00\x00\x00\x00',
b'8965B33580\x00\x00\x00\x00\x00\x00',
b'8965B33581\x00\x00\x00\x00\x00\x00',
b'8965B33611\x00\x00\x00\x00\x00\x00',
b'8965B33621\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [ # Same as 0x791
b'8821F0601200 ',
b'8821F0601300 ',
b'8821F0603400 ',
b'8821F0604000 ',
b'8821F0604100 ',
b'8821F0604200 ',
b'8821F0605200 ',
b'8821F0606200 ',
b'8821F0607200 ',
b'8821F0608000 ',
b'8821F0608200 ',
b'8821F0609000 ',
b'8821F0609100 ',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F0601200 ',
b'8646F0601300 ',
b'8646F0601400 ',
b'8646F0603400 ',
b'8646F0603500 ',
b'8646F0604100 ',
b'8646F0605000 ',
b'8646F0606000 ',
b'8646F0606100 ',
b'8646F0607000 ',
b'8646F0607100 ',
],
},
CAR.CAMRY_TSS2: {
(Ecu.eps, 0x7a1, None): [
b'8965B33630\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'\x01F152606370\x00\x00\x00\x00\x00\x00',
b'\x01F152606390\x00\x00\x00\x00\x00\x00',
b'\x01F152606400\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x700, None): [
b'\x018966306Q5000\x00\x00\x00\x00',
b'\x018966306T3100\x00\x00\x00\x00',
b'\x018966306T3200\x00\x00\x00\x00',
b'\x018966306T4000\x00\x00\x00\x00',
b'\x018966306T4100\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F6201200\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F0602100\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',
b'\x028646F0602200\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',
b'\x028646F3305200\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',
b'\x028646F3305300\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',
],
},
CAR.CAMRYH_TSS2: {
(Ecu.eps, 0x7a1, None): [
b'8965B33630\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152633D00\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x700, None): [
b'\x018966306Q6000\x00\x00\x00\x00',
b'\x018966306Q7000\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 15): [
b'\x018821F6201200\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 109): [
b'\x028646F3305200\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',
b'\x028646F3305300\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',
b'\x028646F3305300\x00\x00\x00\x008646G3304000\x00\x00\x00\x00',
],
},
CAR.CHR: {
(Ecu.engine, 0x700, None): [
b'\x01896631021100\x00\x00\x00\x00',
b'\x01896631017100\x00\x00\x00\x00',
b'\x01896631017200\x00\x00\x00\x00',
b'\x0189663F413100\x00\x00\x00\x00',
b'\x0189663F414100\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'8821F0W01000 ',
b'8821F0W01100 ',
b'8821FF401600 ',
b'8821FF404000 ',
b'8821FF404100 ',
b'8821FF405100 ',
b'8821FF406000 ',
b'8821FF407100 ',
],
(Ecu.esp, 0x7b0, None): [
b'F152610020\x00\x00\x00\x00\x00\x00',
b'F152610153\x00\x00\x00\x00\x00\x00',
b'F152610210\x00\x00\x00\x00\x00\x00',
b'F1526F4034\x00\x00\x00\x00\x00\x00',
b'F1526F4044\x00\x00\x00\x00\x00\x00',
b'F1526F4073\x00\x00\x00\x00\x00\x00',
b'F1526F4121\x00\x00\x00\x00\x00\x00',
b'F1526F4122\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B10011\x00\x00\x00\x00\x00\x00',
b'8965B10040\x00\x00\x00\x00\x00\x00',
b'8965B10070\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\x0331024000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203202\x00\x00\x00\x00',
b'\x0331024000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203302\x00\x00\x00\x00',
b'\x0331036000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203302\x00\x00\x00\x00',
b'\x033F401100\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203102\x00\x00\x00\x00',
b'\x033F401200\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203202\x00\x00\x00\x00',
b'\x033F424000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203202\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F0W01000 ',
b'8821FF401600 ',
b'8821FF404000 ',
b'8821FF404100 ',
b'8821FF405100 ',
b'8821FF406000 ',
b'8821FF407100 ',
b'8821F0W01100 ',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646FF401700 ',
b'8646FF401800 ',
b'8646FF404000 ',
b'8646FF406000 ',
b'8646FF407000 ',
],
},
CAR.CHRH: {
(Ecu.engine, 0x700, None): [
b'\x0289663F405100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896631013200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x0289663F405000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x0289663F418000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x0289663F423000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x0289663F431000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x0189663F438000\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152610012\x00\x00\x00\x00\x00\x00',
b'F152610013\x00\x00\x00\x00\x00\x00',
b'F152610014\x00\x00\x00\x00\x00\x00',
b'F152610040\x00\x00\x00\x00\x00\x00',
b'F152610190\x00\x00\x00\x00\x00\x00',
b'F152610200\x00\x00\x00\x00\x00\x00',
b'F152610230\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'8821F0W01000 ',
b'8821FF402300 ',
b'8821FF402400 ',
b'8821FF404000 ',
b'8821FF404100 ',
b'8821FF405000 ',
b'8821FF406000 ',
b'8821FF407100 ',
],
(Ecu.eps, 0x7a1, None): [
b'8965B10011\x00\x00\x00\x00\x00\x00',
b'8965B10020\x00\x00\x00\x00\x00\x00',
b'8965B10040\x00\x00\x00\x00\x00\x00',
b'8965B10050\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F0W01000 ',
b'8821FF402300 ',
b'8821FF402400 ',
b'8821FF404000 ',
b'8821FF404100 ',
b'8821FF405000 ',
b'8821FF406000 ',
b'8821FF407100 ',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646FF401700 ',
b'8646FF402100 ',
b'8646FF404000 ',
b'8646FF406000 ',
b'8646FF407000 ',
],
},
CAR.COROLLA: {
(Ecu.engine, 0x7e0, None): [
b'\x0230ZC2000\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230ZC2100\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230ZC2200\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230ZC2300\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230ZC3000\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230ZC3100\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230ZC3200\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230ZC3300\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0330ZC1200\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00895231203202\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881510201100\x00\x00\x00\x00',
b'881510201200\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152602190\x00\x00\x00\x00\x00\x00',
b'F152602191\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B02181\x00\x00\x00\x00\x00\x00',
b'8965B02191\x00\x00\x00\x00\x00\x00',
b'8965B48150\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702100\x00\x00\x00\x00',
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F0201101\x00\x00\x00\x00',
b'8646F0201200\x00\x00\x00\x00',
],
},
CAR.COROLLA_TSS2: {
(Ecu.engine, 0x700, None): [
b'\x01896630ZG2000\x00\x00\x00\x00',
b'\x01896630ZG5000\x00\x00\x00\x00',
b'\x01896630ZG5100\x00\x00\x00\x00',
b'\x01896630ZG5200\x00\x00\x00\x00',
b'\x01896630ZG5300\x00\x00\x00\x00',
b'\x01896630ZP1000\x00\x00\x00\x00',
b'\x01896630ZP2000\x00\x00\x00\x00',
b'\x01896630ZQ5000\x00\x00\x00\x00',
b'\x018966312L8000\x00\x00\x00\x00',
b'\x018966312M0000\x00\x00\x00\x00',
b'\x018966312M9000\x00\x00\x00\x00',
b'\x018966312P9000\x00\x00\x00\x00',
b'\x018966312P9100\x00\x00\x00\x00',
b'\x018966312P9200\x00\x00\x00\x00',
b'\x018966312P9300\x00\x00\x00\x00',
b'\x018966312Q2300\x00\x00\x00\x00',
b'\x018966312Q8000\x00\x00\x00\x00',
b'\x018966312R0000\x00\x00\x00\x00',
b'\x018966312R0100\x00\x00\x00\x00',
b'\x018966312R1000\x00\x00\x00\x00',
b'\x018966312R1100\x00\x00\x00\x00',
b'\x018966312R3100\x00\x00\x00\x00',
b'\x018966312S5000\x00\x00\x00\x00',
b'\x018966312S7000\x00\x00\x00\x00',
b'\x018966312W3000\x00\x00\x00\x00',
b'\x018966312W9000\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\x0230A10000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230A11000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230ZN4000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x03312K7000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203402\x00\x00\x00\x00',
b'\x03312M3000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203402\x00\x00\x00\x00',
b'\x03312N6000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203202\x00\x00\x00\x00',
b'\x03312N6000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203302\x00\x00\x00\x00',
b'\x03312N6000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203402\x00\x00\x00\x00',
b'\x03312N6100\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203302\x00\x00\x00\x00',
b'\x03312N6100\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203402\x00\x00\x00\x00',
b'\x02312K4000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'\x018965B12350\x00\x00\x00\x00\x00\x00',
b'\x018965B12470\x00\x00\x00\x00\x00\x00',
b'\x018965B12490\x00\x00\x00\x00\x00\x00',
b'\x018965B12500\x00\x00\x00\x00\x00\x00',
b'\x018965B12520\x00\x00\x00\x00\x00\x00',
b'\x018965B12530\x00\x00\x00\x00\x00\x00',
b'\x018965B1255000\x00\x00\x00\x00',
b'8965B12361\x00\x00\x00\x00\x00\x00',
b'8965B16011\x00\x00\x00\x00\x00\x00',
b'\x018965B12510\x00\x00\x00\x00\x00\x00'
],
(Ecu.esp, 0x7b0, None): [
b'\x01F152602280\x00\x00\x00\x00\x00\x00',
b'\x01F152602560\x00\x00\x00\x00\x00\x00',
b'\x01F152602590\x00\x00\x00\x00\x00\x00',
b'\x01F152602650\x00\x00\x00\x00\x00\x00',
b"\x01F15260A010\x00\x00\x00\x00\x00\x00",
b'\x01F15260A050\x00\x00\x00\x00\x00\x00',
b'\x01F152612641\x00\x00\x00\x00\x00\x00',
b'\x01F152612651\x00\x00\x00\x00\x00\x00',
b'\x01F152612B10\x00\x00\x00\x00\x00\x00',
b'\x01F152612B51\x00\x00\x00\x00\x00\x00',
b'\x01F152612B60\x00\x00\x00\x00\x00\x00',
b'\x01F152612B61\x00\x00\x00\x00\x00\x00',
b'\x01F152612B62\x00\x00\x00\x00\x00\x00',
b'\x01F152612B71\x00\x00\x00\x00\x00\x00',
b'\x01F152612B81\x00\x00\x00\x00\x00\x00',
b'\x01F152612B90\x00\x00\x00\x00\x00\x00',
b'\x01F152612C00\x00\x00\x00\x00\x00\x00',
b'F152602191\x00\x00\x00\x00\x00\x00',
b'\x01F152612862\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301100\x00\x00\x00\x00',
b'\x018821F3301200\x00\x00\x00\x00',
b'\x018821F3301300\x00\x00\x00\x00',
b'\x018821F3301400\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F12010D0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F1201100\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F1201200\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F1201300\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
b'\x028646F1201400\x00\x00\x00\x008646G2601500\x00\x00\x00\x00',
b'\x028646F1202000\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F1202100\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
b'\x028646F1202200\x00\x00\x00\x008646G2601500\x00\x00\x00\x00',
b'\x028646F1601100\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
],
},
CAR.COROLLAH_TSS2: {
(Ecu.engine, 0x700, None): [
b'\x01896630ZJ1000\x00\x00\x00\x00',
b'\x01896630ZU8000\x00\x00\x00\x00',
b'\x01896637621000\x00\x00\x00\x00',
b'\x01896637624000\x00\x00\x00\x00',
b'\x01896637626000\x00\x00\x00\x00',
b'\x01896637648000\x00\x00\x00\x00',
b'\x01896637643000\x00\x00\x00\x00',
b'\x02896630ZJ5000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896630ZN8000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896630ZQ3000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896630ZR2000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896630ZT8000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896630ZT9000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x028966312K6000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x028966312L0000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x028966312Q3000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x028966312Q4000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x038966312L7000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF1205001\x00\x00\x00\x00',
b'\x038966312N1000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF1203001\x00\x00\x00\x00',
b'\x038966312T3000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF1205001\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B12361\x00\x00\x00\x00\x00\x00',
b'8965B12451\x00\x00\x00\x00\x00\x00',
b'8965B76012\x00\x00\x00\x00\x00\x00',
b'8965B76050\x00\x00\x00\x00\x00\x00',
b'\x018965B12350\x00\x00\x00\x00\x00\x00',
b'\x018965B12470\x00\x00\x00\x00\x00\x00',
b'\x018965B12490\x00\x00\x00\x00\x00\x00',
b'\x018965B12500\x00\x00\x00\x00\x00\x00',
b'\x018965B12510\x00\x00\x00\x00\x00\x00',
b'\x018965B12520\x00\x00\x00\x00\x00\x00',
b'\x018965B12530\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152612590\x00\x00\x00\x00\x00\x00',
b'F152612691\x00\x00\x00\x00\x00\x00',
b'F152612692\x00\x00\x00\x00\x00\x00',
b'F152612700\x00\x00\x00\x00\x00\x00',
b'F152612710\x00\x00\x00\x00\x00\x00',
b'F152612790\x00\x00\x00\x00\x00\x00',
b'F152612800\x00\x00\x00\x00\x00\x00',
b'F152612820\x00\x00\x00\x00\x00\x00',
b'F152612840\x00\x00\x00\x00\x00\x00',
b'F152612890\x00\x00\x00\x00\x00\x00',
b'F152612A00\x00\x00\x00\x00\x00\x00',
b'F152612A10\x00\x00\x00\x00\x00\x00',
b'F152642540\x00\x00\x00\x00\x00\x00',
b'F152676293\x00\x00\x00\x00\x00\x00',
b'F152676303\x00\x00\x00\x00\x00\x00',
b'F152676304\x00\x00\x00\x00\x00\x00',
b'F152612D00\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301100\x00\x00\x00\x00',
b'\x018821F3301200\x00\x00\x00\x00',
b'\x018821F3301300\x00\x00\x00\x00',
b'\x018821F3301400\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F12010D0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F1201100\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F1201300\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
b'\x028646F1201400\x00\x00\x00\x008646G2601500\x00\x00\x00\x00',
b'\x028646F1202000\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F1202100\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
b'\x028646F1202200\x00\x00\x00\x008646G2601500\x00\x00\x00\x00',
b"\x028646F1601300\x00\x00\x00\x008646G2601400\x00\x00\x00\x00",
b'\x028646F4203400\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F76020C0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F7603100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F7603200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
],
},
CAR.HIGHLANDER: {
(Ecu.engine, 0x700, None): [
b'\x01896630E09000\x00\x00\x00\x00',
b'\x01896630E43000\x00\x00\x00\x00',
b'\x01896630E43100\x00\x00\x00\x00',
b'\x01896630E43200\x00\x00\x00\x00',
b'\x01896630E44200\x00\x00\x00\x00',
b'\x01896630E45000\x00\x00\x00\x00',
b'\x01896630E45100\x00\x00\x00\x00',
b'\x01896630E45200\x00\x00\x00\x00',
b'\x01896630E46000\x00\x00\x00\x00',
b'\x01896630E46200\x00\x00\x00\x00',
b'\x01896630E74000\x00\x00\x00\x00',
b'\x01896630E75000\x00\x00\x00\x00',
b'\x01896630E76000\x00\x00\x00\x00',
b'\x01896630E77000\x00\x00\x00\x00',
b'\x01896630E83000\x00\x00\x00\x00',
b'\x01896630E84000\x00\x00\x00\x00',
b'\x01896630E85000\x00\x00\x00\x00',
b'\x01896630E86000\x00\x00\x00\x00',
b'\x01896630E88000\x00\x00\x00\x00',
b'\x01896630EA0000\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B48140\x00\x00\x00\x00\x00\x00',
b'8965B48150\x00\x00\x00\x00\x00\x00',
b'8965B48210\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [b'F15260E011\x00\x00\x00\x00\x00\x00'],
(Ecu.dsu, 0x791, None): [
b'881510E01100\x00\x00\x00\x00',
b'881510E01200\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702100\x00\x00\x00\x00',
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F0E01200\x00\x00\x00\x00',
b'8646F0E01300\x00\x00\x00\x00',
],
},
CAR.HIGHLANDERH: {
(Ecu.eps, 0x7a1, None): [
b'8965B48160\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152648541\x00\x00\x00\x00\x00\x00',
b'F152648542\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\x0230E40000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230E40100\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230EA2000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0230EA2100\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702100\x00\x00\x00\x00',
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F0E01200\x00\x00\x00\x00',
b'8646F0E01300\x00\x00\x00\x00',
],
},
CAR.HIGHLANDER_TSS2: {
(Ecu.eps, 0x7a1, None): [
b'8965B48241\x00\x00\x00\x00\x00\x00',
b'8965B48310\x00\x00\x00\x00\x00\x00',
b'8965B48320\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'\x01F15260E051\x00\x00\x00\x00\x00\x00',
b'\x01F15260E061\x00\x00\x00\x00\x00\x00',
b'\x01F15260E110\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x700, None): [
b'\x01896630E62100\x00\x00\x00\x00',
b'\x01896630E62200\x00\x00\x00\x00',
b'\x01896630E64100\x00\x00\x00\x00',
b'\x01896630E64200\x00\x00\x00\x00',
b'\x01896630EB1000\x00\x00\x00\x00',
b'\x01896630EB1100\x00\x00\x00\x00',
b'\x01896630EB1200\x00\x00\x00\x00',
b'\x01896630EB2000\x00\x00\x00\x00',
b'\x01896630EB2100\x00\x00\x00\x00',
b'\x01896630EB2200\x00\x00\x00\x00',
b'\x01896630EC4000\x00\x00\x00\x00',
b'\x01896630ED9000\x00\x00\x00\x00',
b'\x01896630EE1000\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301400\x00\x00\x00\x00',
b'\x018821F6201200\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F0E02100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F4803000\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',
],
},
CAR.HIGHLANDERH_TSS2: {
(Ecu.eps, 0x7a1, None): [
b'8965B48241\x00\x00\x00\x00\x00\x00',
b'8965B48310\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'\x01F15264872300\x00\x00\x00\x00',
b'\x01F15264872400\x00\x00\x00\x00',
b'\x01F15264872500\x00\x00\x00\x00',
b'\x01F15264873500\x00\x00\x00\x00',
b'\x01F152648C6300\x00\x00\x00\x00',
],
(Ecu.engine, 0x700, None): [
b'\x01896630E67000\x00\x00\x00\x00',
b'\x01896630EA1000\x00\x00\x00\x00',
b'\x01896630EE4000\x00\x00\x00\x00',
b'\x01896630EA1000\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00',
b'\x02896630E66000\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00',
b'\x02896630EB3000\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00',
b'\x02896630EB3100\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00',
b'\x02896630E66100\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301400\x00\x00\x00\x00',
b'\x018821F6201200\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F0E02100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F4803000\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',
],
},
CAR.LEXUS_IS: {
(Ecu.engine, 0x700, None): [
b'\x018966353M7000\x00\x00\x00\x00',
b'\x018966353M7100\x00\x00\x00\x00',
b'\x018966353Q2000\x00\x00\x00\x00',
b'\x018966353Q2300\x00\x00\x00\x00',
b'\x018966353Q4000\x00\x00\x00\x00',
b'\x018966353R1100\x00\x00\x00\x00',
b'\x018966353R7100\x00\x00\x00\x00',
b'\x018966353R8100\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\x0232480000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02353P7000\x00\x00\x00\x00\x00\x00\x00\x00530J5000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02353P9000\x00\x00\x00\x00\x00\x00\x00\x00553C1000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152653300\x00\x00\x00\x00\x00\x00',
b'F152653301\x00\x00\x00\x00\x00\x00',
b'F152653310\x00\x00\x00\x00\x00\x00',
b'F152653330\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881515306200\x00\x00\x00\x00',
b'881515306400\x00\x00\x00\x00',
b'881515306500\x00\x00\x00\x00',
b'881515307400\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B53270\x00\x00\x00\x00\x00\x00',
b'8965B53271\x00\x00\x00\x00\x00\x00',
b'8965B53280\x00\x00\x00\x00\x00\x00',
b'8965B53281\x00\x00\x00\x00\x00\x00',
b'8965B53311\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702300\x00\x00\x00\x00',
b'8821F4702100\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F5301101\x00\x00\x00\x00',
b'8646F5301200\x00\x00\x00\x00',
b'8646F5301300\x00\x00\x00\x00',
b'8646F5301400\x00\x00\x00\x00',
],
},
CAR.PRIUS: {
(Ecu.engine, 0x700, None): [
b'\x02896634761000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634761100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634761200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634762000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634763000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634763100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634765000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634765100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634769000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634769100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634769200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634770000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634774000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634774100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634774200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634782000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x02896634784000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x028966347A0000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x028966347A5000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x028966347A8000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x028966347B0000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x03896634759100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00',
b'\x03896634759200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00',
b'\x03896634759200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701004\x00\x00\x00\x00',
b'\x03896634759300\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701004\x00\x00\x00\x00',
b'\x03896634760000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701002\x00\x00\x00\x00',
b'\x03896634760000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00',
b'\x03896634760000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701004\x00\x00\x00\x00',
b'\x03896634760100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00',
b'\x03896634760200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00',
b'\x03896634760200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701004\x00\x00\x00\x00',
b'\x03896634760300\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701004\x00\x00\x00\x00',
b'\x03896634768000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4703001\x00\x00\x00\x00',
b'\x03896634768000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4703002\x00\x00\x00\x00',
b'\x03896634768100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4703002\x00\x00\x00\x00',
b'\x03896634785000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4705001\x00\x00\x00\x00',
b'\x03896634785000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710001\x00\x00\x00\x00',
b'\x03896634786000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4705001\x00\x00\x00\x00',
b'\x03896634786000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710001\x00\x00\x00\x00',
b'\x03896634789000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4703002\x00\x00\x00\x00',
b'\x038966347A3000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00',
b'\x038966347A3000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4707001\x00\x00\x00\x00',
b'\x038966347B6000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710001\x00\x00\x00\x00',
b'\x038966347B7000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710001\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B47021\x00\x00\x00\x00\x00\x00',
b'8965B47022\x00\x00\x00\x00\x00\x00',
b'8965B47023\x00\x00\x00\x00\x00\x00',
b'8965B47050\x00\x00\x00\x00\x00\x00',
b'8965B47060\x00\x00\x00\x00\x00\x00', # This is the EPS with good angle sensor
],
(Ecu.esp, 0x7b0, None): [
b'F152647290\x00\x00\x00\x00\x00\x00',
b'F152647300\x00\x00\x00\x00\x00\x00',
b'F152647310\x00\x00\x00\x00\x00\x00',
b'F152647414\x00\x00\x00\x00\x00\x00',
b'F152647415\x00\x00\x00\x00\x00\x00',
b'F152647416\x00\x00\x00\x00\x00\x00',
b'F152647417\x00\x00\x00\x00\x00\x00',
b'F152647470\x00\x00\x00\x00\x00\x00',
b'F152647490\x00\x00\x00\x00\x00\x00',
b'F152647682\x00\x00\x00\x00\x00\x00',
b'F152647683\x00\x00\x00\x00\x00\x00',
b'F152647684\x00\x00\x00\x00\x00\x00',
b'F152647862\x00\x00\x00\x00\x00\x00',
b'F152647863\x00\x00\x00\x00\x00\x00',
b'F152647864\x00\x00\x00\x00\x00\x00',
b'F152647865\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881514702300\x00\x00\x00\x00',
b'881514702400\x00\x00\x00\x00',
b'881514703100\x00\x00\x00\x00',
b'881514704100\x00\x00\x00\x00',
b'881514706000\x00\x00\x00\x00',
b'881514706100\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702000\x00\x00\x00\x00',
b'8821F4702100\x00\x00\x00\x00',
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F4701300\x00\x00\x00\x00',
b'8646F4702001\x00\x00\x00\x00',
b'8646F4702100\x00\x00\x00\x00',
b'8646F4702200\x00\x00\x00\x00',
b'8646F4705000\x00\x00\x00\x00',
b'8646F4705200\x00\x00\x00\x00',
],
},
CAR.PRIUS_V: {
(Ecu.esp, 0x7b0, None): [
b'F152647280\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\x0234781000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881514705100\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F4703300\x00\x00\x00\x00',
],
},
CAR.RAV4: {
(Ecu.engine, 0x7e0, None): [
b'\x02342Q1000\x00\x00\x00\x00\x00\x00\x00\x0054212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02342Q1100\x00\x00\x00\x00\x00\x00\x00\x0054212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02342Q1200\x00\x00\x00\x00\x00\x00\x00\x0054212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02342Q1300\x00\x00\x00\x00\x00\x00\x00\x0054212000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02342Q2000\x00\x00\x00\x00\x00\x00\x00\x0054213000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02342Q2100\x00\x00\x00\x00\x00\x00\x00\x0054213000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02342Q2200\x00\x00\x00\x00\x00\x00\x00\x0054213000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02342Q4000\x00\x00\x00\x00\x00\x00\x00\x0054215000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B42063\x00\x00\x00\x00\x00\x00',
b'8965B42073\x00\x00\x00\x00\x00\x00',
b'8965B42082\x00\x00\x00\x00\x00\x00',
b'8965B42083\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F15260R102\x00\x00\x00\x00\x00\x00',
b'F15260R103\x00\x00\x00\x00\x00\x00',
b'F152642493\x00\x00\x00\x00\x00\x00',
b'F152642492\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881514201200\x00\x00\x00\x00',
b'881514201300\x00\x00\x00\x00',
b'881514201400\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702000\x00\x00\x00\x00',
b'8821F4702100\x00\x00\x00\x00',
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F4201100\x00\x00\x00\x00',
b'8646F4201200\x00\x00\x00\x00',
b'8646F4202001\x00\x00\x00\x00',
b'8646F4202100\x00\x00\x00\x00',
b'8646F4204000\x00\x00\x00\x00',
],
},
CAR.RAV4H: {
(Ecu.engine, 0x7e0, None): [
b'\x02342N9000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02342N9100\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02342P0000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B42102\x00\x00\x00\x00\x00\x00',
b'8965B42103\x00\x00\x00\x00\x00\x00',
b'8965B42112\x00\x00\x00\x00\x00\x00',
b'8965B42162\x00\x00\x00\x00\x00\x00',
b'8965B42163\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152642090\x00\x00\x00\x00\x00\x00',
b'F152642110\x00\x00\x00\x00\x00\x00',
b'F152642120\x00\x00\x00\x00\x00\x00',
b'F152642400\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881514202200\x00\x00\x00\x00',
b'881514202300\x00\x00\x00\x00',
b'881514202400\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702000\x00\x00\x00\x00',
b'8821F4702100\x00\x00\x00\x00',
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F4201100\x00\x00\x00\x00',
b'8646F4201200\x00\x00\x00\x00',
b'8646F4202001\x00\x00\x00\x00',
b'8646F4202100\x00\x00\x00\x00',
b'8646F4204000\x00\x00\x00\x00',
],
},
CAR.RAV4_TSS2: {
(Ecu.engine, 0x700, None): [
b'\x01896630R58000\x00\x00\x00\x00',
b'\x01896630R58100\x00\x00\x00\x00',
b'\x018966342E2000\x00\x00\x00\x00',
b'\x018966342M8000\x00\x00\x00\x00',
b'\x018966342S9000\x00\x00\x00\x00',
b'\x018966342T1000\x00\x00\x00\x00',
b'\x018966342T6000\x00\x00\x00\x00',
b'\x018966342T9000\x00\x00\x00\x00',
b'\x018966342U4000\x00\x00\x00\x00',
b'\x018966342U4100\x00\x00\x00\x00',
b'\x018966342U5100\x00\x00\x00\x00',
b'\x018966342V0000\x00\x00\x00\x00',
b'\x018966342V3000\x00\x00\x00\x00',
b'\x018966342V3100\x00\x00\x00\x00',
b'\x018966342V3200\x00\x00\x00\x00',
b'\x01896634A05000\x00\x00\x00\x00',
b'\x01896634A19000\x00\x00\x00\x00',
b'\x01896634A19100\x00\x00\x00\x00',
b'\x01896634A20000\x00\x00\x00\x00',
b'\x01896634A20100\x00\x00\x00\x00',
b'\x01896634A22000\x00\x00\x00\x00',
b'\x01896634A22100\x00\x00\x00\x00',
b'\x01896634A30000\x00\x00\x00\x00',
b'\x01896634A44000\x00\x00\x00\x00',
b'\x01896634A45000\x00\x00\x00\x00',
b'\x01896634A46000\x00\x00\x00\x00',
b'\x028966342M7000\x00\x00\x00\x00897CF1201001\x00\x00\x00\x00',
b'\x028966342T0000\x00\x00\x00\x00897CF1201001\x00\x00\x00\x00',
b'\x028966342V1000\x00\x00\x00\x00897CF1202001\x00\x00\x00\x00',
b'\x028966342Y8000\x00\x00\x00\x00897CF1201001\x00\x00\x00\x00',
b'\x02896634A18000\x00\x00\x00\x00897CF1201001\x00\x00\x00\x00',
b'\x02896634A18100\x00\x00\x00\x00897CF1201001\x00\x00\x00\x00',
b'\x02896634A43000\x00\x00\x00\x00897CF4201001\x00\x00\x00\x00',
b'\x02896634A47000\x00\x00\x00\x00897CF4201001\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'\x01F15260R210\x00\x00\x00\x00\x00\x00',
b'\x01F15260R220\x00\x00\x00\x00\x00\x00',
b'\x01F15260R290\x00\x00\x00\x00\x00\x00',
b'\x01F15260R300\x00\x00\x00\x00\x00\x00',
b'\x01F152642551\x00\x00\x00\x00\x00\x00',
b'\x01F152642561\x00\x00\x00\x00\x00\x00',
b'\x01F152642700\x00\x00\x00\x00\x00\x00',
b'\x01F152642701\x00\x00\x00\x00\x00\x00',
b'\x01F152642710\x00\x00\x00\x00\x00\x00',
b'\x01F152642711\x00\x00\x00\x00\x00\x00',
b'\x01F152642750\x00\x00\x00\x00\x00\x00',
b'\x01F152642751\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B42170\x00\x00\x00\x00\x00\x00',
b'8965B42171\x00\x00\x00\x00\x00\x00',
b'8965B42180\x00\x00\x00\x00\x00\x00',
b'8965B42181\x00\x00\x00\x00\x00\x00',
b'\x028965B0R01200\x00\x00\x00\x008965B0R02200\x00\x00\x00\x00',
b'\x028965B0R01300\x00\x00\x00\x008965B0R02300\x00\x00\x00\x00',
b'\x028965B0R01400\x00\x00\x00\x008965B0R02400\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301100\x00\x00\x00\x00',
b'\x018821F3301200\x00\x00\x00\x00',
b'\x018821F3301300\x00\x00\x00\x00',
b'\x018821F3301400\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F4203200\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F4203300\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F4203400\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F4203500\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F4203700\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
b'\x028646F4203800\x00\x00\x00\x008646G2601500\x00\x00\x00\x00',
],
},
CAR.RAV4H_TSS2: {
(Ecu.engine, 0x700, None): [
b'\x01896634A15000\x00\x00\x00\x00',
b'\x018966342M5000\x00\x00\x00\x00',
b'\x018966342W8000\x00\x00\x00\x00',
b'\x018966342X5000\x00\x00\x00\x00',
b'\x018966342X6000\x00\x00\x00\x00',
b'\x01896634A25000\x00\x00\x00\x00',
b'\x018966342W5000\x00\x00\x00\x00',
b'\x028966342W4001\x00\x00\x00\x00897CF1203001\x00\x00\x00\x00',
b'\x02896634A13000\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02896634A13001\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00',
b'\x02896634A13101\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00',
b'\x02896634A14001\x00\x00\x00\x00897CF1203001\x00\x00\x00\x00',
b'\x02896634A23000\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02896634A23001\x00\x00\x00\x00897CF1203001\x00\x00\x00\x00',
b'\x02896634A14001\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00',
b'\x02896634A14101\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152642291\x00\x00\x00\x00\x00\x00',
b'F152642290\x00\x00\x00\x00\x00\x00',
b'F152642322\x00\x00\x00\x00\x00\x00',
b'F152642330\x00\x00\x00\x00\x00\x00',
b'F152642331\x00\x00\x00\x00\x00\x00',
b'F152642531\x00\x00\x00\x00\x00\x00',
b'F152642532\x00\x00\x00\x00\x00\x00',
b'F152642520\x00\x00\x00\x00\x00\x00',
b'F152642521\x00\x00\x00\x00\x00\x00',
b'F152642540\x00\x00\x00\x00\x00\x00',
b'F152642541\x00\x00\x00\x00\x00\x00',
b'F152642542\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B42170\x00\x00\x00\x00\x00\x00',
b'8965B42171\x00\x00\x00\x00\x00\x00',
b'8965B42180\x00\x00\x00\x00\x00\x00',
b'8965B42181\x00\x00\x00\x00\x00\x00',
b'\x028965B0R01200\x00\x00\x00\x008965B0R02200\x00\x00\x00\x00',
b'\x028965B0R01300\x00\x00\x00\x008965B0R02300\x00\x00\x00\x00',
b'\x028965B0R01400\x00\x00\x00\x008965B0R02400\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301100\x00\x00\x00\x00',
b'\x018821F3301200\x00\x00\x00\x00',
b'\x018821F3301300\x00\x00\x00\x00',
b'\x018821F3301400\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F4203200\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F4203300\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F4203400\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F4203500\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F4203700\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
b'\x028646F4203800\x00\x00\x00\x008646G2601500\x00\x00\x00\x00',
],
},
CAR.SIENNA: {
(Ecu.engine, 0x700, None): [
b'\x01896630832100\x00\x00\x00\x00',
b'\x01896630832200\x00\x00\x00\x00',
b'\x01896630838000\x00\x00\x00\x00',
b'\x01896630838100\x00\x00\x00\x00',
b'\x01896630842000\x00\x00\x00\x00',
b'\x01896630843000\x00\x00\x00\x00',
b'\x01896630851000\x00\x00\x00\x00',
b'\x01896630851100\x00\x00\x00\x00',
b'\x01896630851200\x00\x00\x00\x00',
b'\x01896630852000\x00\x00\x00\x00',
b'\x01896630852100\x00\x00\x00\x00',
b'\x01896630859000\x00\x00\x00\x00',
b'\x01896630860000\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B45070\x00\x00\x00\x00\x00\x00',
b'8965B45080\x00\x00\x00\x00\x00\x00',
b'8965B45082\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152608130\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881510801100\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702100\x00\x00\x00\x00',
b'8821F4702200\x00\x00\x00\x00',
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F0801100\x00\x00\x00\x00',
],
},
CAR.LEXUS_CTH: {
(Ecu.dsu, 0x791, None): [
b'881517601100\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152676144\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\x0237635000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F7601100\x00\x00\x00\x00',
],
},
CAR.LEXUS_ES_TSS2: {
(Ecu.engine, 0x700, None): [
b'\x01896630EC9100\x00\x00\x00\x00',
b'\x018966333T5000\x00\x00\x00\x00',
b'\x018966333T5100\x00\x00\x00\x00',
b'\x018966333X6000\x00\x00\x00\x00',
b'\x01896633T07000\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'\x01F152606281\x00\x00\x00\x00\x00\x00',
b'\x01F152606340\x00\x00\x00\x00\x00\x00',
b'\x01F152606461\x00\x00\x00\x00\x00\x00',
b'\x01F15260E031\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B33252\x00\x00\x00\x00\x00\x00',
b'8965B33590\x00\x00\x00\x00\x00\x00',
b'8965B33690\x00\x00\x00\x00\x00\x00',
b'8965B48271\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301100\x00\x00\x00\x00',
b'\x018821F3301200\x00\x00\x00\x00',
b'\x018821F3301400\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F33030D0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F3303200\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F3304100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F3304300\x00\x00\x00\x008646G2601500\x00\x00\x00\x00',
b'\x028646F4810200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
],
},
CAR.LEXUS_ESH_TSS2: {
(Ecu.engine, 0x700, None): [
b'\x028966333S8000\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00',
b'\x028966333S8000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00',
b'\x028966333T0100\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00',
b'\x028966333V4000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00',
b'\x02896633T09000\x00\x00\x00\x00897CF3307001\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152633423\x00\x00\x00\x00\x00\x00',
b'F152633680\x00\x00\x00\x00\x00\x00',
b'F152633681\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B33252\x00\x00\x00\x00\x00\x00',
b'8965B33590\x00\x00\x00\x00\x00\x00',
b'8965B33690\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301100\x00\x00\x00\x00',
b'\x018821F3301200\x00\x00\x00\x00',
b'\x018821F3301300\x00\x00\x00\x00',
b'\x018821F3301400\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F33030D0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F3303100\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F3303200\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F3304100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F3304200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
b'\x028646F3304300\x00\x00\x00\x008646G2601500\x00\x00\x00\x00',
],
},
CAR.LEXUS_ESH: {
(Ecu.engine, 0x7e0, None): [
b'\x02333M4200\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152633171\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881513310400\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B33512\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4701100\x00\x00\x00\x00',
b'8821F4701300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F3302001\x00\x00\x00\x00',
b'8646F3302200\x00\x00\x00\x00',
],
},
CAR.LEXUS_NX: {
(Ecu.engine, 0x700, None): [
b'\x01896637850000\x00\x00\x00\x00',
b'\x01896637851000\x00\x00\x00\x00',
b'\x01896637852000\x00\x00\x00\x00',
b'\x01896637854000\x00\x00\x00\x00',
b'\x01896637878000\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152678130\x00\x00\x00\x00\x00\x00',
b'F152678140\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881517803100\x00\x00\x00\x00',
b'881517803300\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B78060\x00\x00\x00\x00\x00\x00',
b'8965B78080\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702100\x00\x00\x00\x00',
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F7801100\x00\x00\x00\x00',
b'8646F7801300\x00\x00\x00\x00',
],
},
CAR.LEXUS_NX_TSS2: {
(Ecu.engine, 0x700, None): [
b'\x018966378B2100\x00\x00\x00\x00',
b'\x018966378G3000\x00\x00\x00\x00',
b'\x018966378B3000\x00\x00\x00\x00'
],
(Ecu.esp, 0x7b0, None): [
b'\x01F152678221\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B78120\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b"\x018821F3301400\x00\x00\x00\x00",
b'\x018821F3301200\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F78030A0\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F7803100\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
],
},
CAR.LEXUS_NXH: {
(Ecu.engine, 0x7e0, None): [
b'\x0237841000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0237842000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0237880000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0237882000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0237886000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152678160\x00\x00\x00\x00\x00\x00',
b'F152678170\x00\x00\x00\x00\x00\x00',
b'F152678171\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881517804300\x00\x00\x00\x00',
b'881517804100\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B78060\x00\x00\x00\x00\x00\x00',
b'8965B78080\x00\x00\x00\x00\x00\x00',
b'8965B78100\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702300\x00\x00\x00\x00',
b'8821F4702100\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F7801300\x00\x00\x00\x00',
b'8646F7801100\x00\x00\x00\x00',
],
},
CAR.LEXUS_RC: {
(Ecu.engine, 0x7e0, None): [
b'\x0232484000\x00\x00\x00\x00\x00\x00\x00\x0052422000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152624221\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881512409100\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B24081\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F2402200\x00\x00\x00\x00',
],
},
CAR.LEXUS_RX: {
(Ecu.engine, 0x700, None): [
b'\x01896630E36200\x00\x00\x00\x00',
b'\x01896630E36300\x00\x00\x00\x00',
b'\x01896630E37200\x00\x00\x00\x00',
b'\x01896630E37300\x00\x00\x00\x00',
b'\x01896630E41000\x00\x00\x00\x00',
b'\x01896630E41100\x00\x00\x00\x00',
b'\x01896630E41200\x00\x00\x00\x00',
b'\x01896630E41500\x00\x00\x00\x00',
b'\x01896630EA3100\x00\x00\x00\x00',
b'\x01896630EA3400\x00\x00\x00\x00',
b'\x01896630EA4100\x00\x00\x00\x00',
b'\x01896630EA4300\x00\x00\x00\x00',
b'\x01896630EA4400\x00\x00\x00\x00',
b'\x01896630EA6300\x00\x00\x00\x00',
b'\x018966348R1300\x00\x00\x00\x00',
b'\x018966348R8500\x00\x00\x00\x00',
b'\x018966348W1300\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152648472\x00\x00\x00\x00\x00\x00',
b'F152648473\x00\x00\x00\x00\x00\x00',
b'F152648492\x00\x00\x00\x00\x00\x00',
b'F152648493\x00\x00\x00\x00\x00\x00',
b'F152648474\x00\x00\x00\x00\x00\x00',
b'F152648630\x00\x00\x00\x00\x00\x00',
b'F152648494\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881514810300\x00\x00\x00\x00',
b'881514810500\x00\x00\x00\x00',
b'881514810700\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B0E011\x00\x00\x00\x00\x00\x00',
b'8965B0E012\x00\x00\x00\x00\x00\x00',
b'8965B48102\x00\x00\x00\x00\x00\x00',
b'8965B48111\x00\x00\x00\x00\x00\x00',
b'8965B48112\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4701000\x00\x00\x00\x00',
b'8821F4701100\x00\x00\x00\x00',
b'8821F4701200\x00\x00\x00\x00',
b'8821F4701300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F4801100\x00\x00\x00\x00',
b'8646F4801200\x00\x00\x00\x00',
b'8646F4802001\x00\x00\x00\x00',
b'8646F4802100\x00\x00\x00\x00',
b'8646F4802200\x00\x00\x00\x00',
b'8646F4809000\x00\x00\x00\x00',
],
},
CAR.LEXUS_RXH: {
(Ecu.engine, 0x7e0, None): [
b'\x02348J7000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02348N0000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02348Q4000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02348Q4100\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02348T1100\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02348T3000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02348V6000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x02348Z3000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152648361\x00\x00\x00\x00\x00\x00',
b'F152648501\x00\x00\x00\x00\x00\x00',
b'F152648502\x00\x00\x00\x00\x00\x00',
b'F152648504\x00\x00\x00\x00\x00\x00',
b'F152648740\x00\x00\x00\x00\x00\x00',
b'F152648A30\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
b'881514811300\x00\x00\x00\x00',
b'881514811500\x00\x00\x00\x00',
b'881514811700\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B0E011\x00\x00\x00\x00\x00\x00',
b'8965B0E012\x00\x00\x00\x00\x00\x00',
b'8965B48111\x00\x00\x00\x00\x00\x00',
b'8965B48112\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4701000\x00\x00\x00\x00',
b'8821F4701100\x00\x00\x00\x00',
b'8821F4701200\x00\x00\x00\x00',
b'8821F4701300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'8646F4801200\x00\x00\x00\x00',
b'8646F4802001\x00\x00\x00\x00',
b'8646F4802100\x00\x00\x00\x00',
b'8646F4802200\x00\x00\x00\x00',
b'8646F4809000\x00\x00\x00\x00',
],
},
CAR.LEXUS_RX_TSS2: {
(Ecu.engine, 0x700, None): [
b'\x01896630EA9000\x00\x00\x00\x00',
b'\x01896630EB0000\x00\x00\x00\x00',
b'\x01896630EC9000\x00\x00\x00\x00',
b'\x01896630ED0000\x00\x00\x00\x00',
b'\x01896630ED6000\x00\x00\x00\x00',
b'\x018966348W5100\x00\x00\x00\x00',
b'\x018966348W9000\x00\x00\x00\x00',
b'\x01896634D12000\x00\x00\x00\x00',
b'\x01896634D12100\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'\x01F15260E031\x00\x00\x00\x00\x00\x00',
b'\x01F15260E041\x00\x00\x00\x00\x00\x00',
b'\x01F152648781\x00\x00\x00\x00\x00\x00',
b'\x01F152648801\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B48261\x00\x00\x00\x00\x00\x00',
b'8965B48271\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301100\x00\x00\x00\x00',
b'\x018821F3301300\x00\x00\x00\x00',
b'\x018821F3301400\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F4810100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
b'\x028646F4810200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
b'\x028646F4810300\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
],
},
CAR.LEXUS_RXH_TSS2: {
(Ecu.engine, 0x7e0, None): [
b'\x02348X8000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0234D14000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0234D16000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152648831\x00\x00\x00\x00\x00\x00',
b'F152648D00\x00\x00\x00\x00\x00\x00',
b'F152648D60\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B48271\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301400\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F4810200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
b'\x028646F4810100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00',
],
},
CAR.PRIUS_TSS2: {
(Ecu.engine, 0x700, None): [
b'\x028966347B1000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x028966347C6000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x028966347C8000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00',
b'\x038966347C0000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710101\x00\x00\x00\x00',
b'\x038966347C1000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710101\x00\x00\x00\x00',
b'\x038966347C5000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4707101\x00\x00\x00\x00',
b'\x038966347C5100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4707101\x00\x00\x00\x00',
],
(Ecu.esp, 0x7b0, None): [
b'F152647500\x00\x00\x00\x00\x00\x00',
b'F152647510\x00\x00\x00\x00\x00\x00',
b'F152647520\x00\x00\x00\x00\x00\x00',
b'F152647521\x00\x00\x00\x00\x00\x00',
b'F152647531\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B47070\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301400\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F4707000\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
b'\x028646F4710000\x00\x00\x00\x008646G2601500\x00\x00\x00\x00',
],
},
CAR.MIRAI: {
(Ecu.esp, 0x7D1, None): [b'\x01898A36203000\x00\x00\x00\x00',],
(Ecu.esp, 0x7B0, None): [b'\x01F15266203200\x00\x00\x00\x00',], # a second ESP ECU
(Ecu.eps, 0x7A1, None): [b'\x028965B6204100\x00\x00\x00\x008965B6203100\x00\x00\x00\x00',],
(Ecu.fwdRadar, 0x750, 0xf): [b'\x018821F6201200\x00\x00\x00\x00',],
(Ecu.fwdCamera, 0x750, 0x6d): [b'\x028646F6201400\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',],
},
CAR.ALPHARD_TSS2: {
(Ecu.engine, 0x7e0, None): [
b'\x0235870000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x0235883000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B58040\x00\x00\x00\x00\x00\x00',
b'8965B58052\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F3301200\x00\x00\x00\x00',
b'\x018821F3301400\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
b'\x028646F58010C0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00',
b'\x028646F5803200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',
],
},
}
STEER_THRESHOLD = 100
DBC = {
CAR.RAV4H: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.RAV4: dbc_dict('toyota_new_mc_pt_generated', 'toyota_adas'),
CAR.PRIUS: dbc_dict('toyota_nodsu_pt_generated', 'toyota_adas'),
CAR.PRIUS_V: dbc_dict('toyota_new_mc_pt_generated', 'toyota_adas'),
CAR.COROLLA: dbc_dict('toyota_new_mc_pt_generated', 'toyota_adas'),
CAR.LEXUS_RC: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.LEXUS_RX: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.LEXUS_RXH: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.LEXUS_RX_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.LEXUS_RXH_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.CHR: dbc_dict('toyota_nodsu_pt_generated', 'toyota_adas'),
CAR.CHRH: dbc_dict('toyota_nodsu_pt_generated', 'toyota_adas'),
CAR.CAMRY: dbc_dict('toyota_nodsu_pt_generated', 'toyota_adas'),
CAR.CAMRYH: dbc_dict('toyota_nodsu_pt_generated', 'toyota_adas'),
CAR.CAMRY_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.CAMRYH_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.HIGHLANDER: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.HIGHLANDER_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.HIGHLANDERH: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.HIGHLANDERH_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.AVALON: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.AVALON_2019: dbc_dict('toyota_nodsu_pt_generated', 'toyota_adas'),
CAR.AVALONH_2019: dbc_dict('toyota_nodsu_pt_generated', 'toyota_adas'),
CAR.AVALON_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.RAV4_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.COROLLA_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.COROLLAH_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.LEXUS_ES_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.LEXUS_ESH_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.LEXUS_ESH: dbc_dict('toyota_new_mc_pt_generated', 'toyota_adas'),
CAR.SIENNA: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.LEXUS_IS: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.LEXUS_CTH: dbc_dict('toyota_new_mc_pt_generated', 'toyota_adas'),
CAR.RAV4H_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.LEXUS_NXH: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.LEXUS_NX: dbc_dict('toyota_tnga_k_pt_generated', 'toyota_adas'),
CAR.LEXUS_NX_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.PRIUS_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.MIRAI: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.ALPHARD_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
}
# These cars have non-standard EPS torque scale factors. All others are 73
EPS_SCALE = defaultdict(lambda: 73, {CAR.PRIUS: 66, CAR.COROLLA: 88, CAR.LEXUS_IS: 77, CAR.LEXUS_RC: 77, CAR.LEXUS_CTH: 100, CAR.PRIUS_V: 100})
# Toyota/Lexus Safety Sense 2.0 and 2.5
TSS2_CAR = {CAR.RAV4_TSS2, CAR.COROLLA_TSS2, CAR.COROLLAH_TSS2, CAR.LEXUS_ES_TSS2, CAR.LEXUS_ESH_TSS2, CAR.RAV4H_TSS2,
CAR.LEXUS_RX_TSS2, CAR.LEXUS_RXH_TSS2, CAR.HIGHLANDER_TSS2, CAR.HIGHLANDERH_TSS2, CAR.PRIUS_TSS2, CAR.CAMRY_TSS2, CAR.CAMRYH_TSS2,
CAR.MIRAI, CAR.LEXUS_NX_TSS2, CAR.ALPHARD_TSS2, CAR.AVALON_TSS2}
NO_DSU_CAR = TSS2_CAR | {CAR.CHR, CAR.CHRH, CAR.CAMRY, CAR.CAMRYH}
EV_HYBRID_CAR = {CAR.AVALONH_2019, CAR.CAMRYH, CAR.CAMRYH_TSS2, CAR.CHRH, CAR.COROLLAH_TSS2, CAR.HIGHLANDERH, CAR.HIGHLANDERH_TSS2, CAR.PRIUS,
CAR.PRIUS_V, CAR.RAV4H, CAR.RAV4H_TSS2, CAR.LEXUS_CTH, CAR.MIRAI, CAR.LEXUS_ESH, CAR.LEXUS_ESH_TSS2, CAR.LEXUS_NXH, CAR.LEXUS_RXH,
CAR.LEXUS_RXH_TSS2, CAR.PRIUS_TSS2}
# no resume button press required
NO_STOP_TIMER_CAR = TSS2_CAR | {CAR.PRIUS_V, CAR.RAV4H, CAR.HIGHLANDERH, CAR.HIGHLANDER, CAR.SIENNA, CAR.LEXUS_ESH}
| 42.501734 | 260 | 0.651629 |
acf3f712868cb3792b402af5940d08fd5e35650a | 2,190 | py | Python | eval/publish_msg.py | cysec-lab/verimqtt | 6327946d155948db8e3a5ba6b4ef9b9763ab0e9e | [
"Apache-2.0"
] | 1 | 2021-03-27T17:55:41.000Z | 2021-03-27T17:55:41.000Z | eval/publish_msg.py | cysec-lab/verimqtt | 6327946d155948db8e3a5ba6b4ef9b9763ab0e9e | [
"Apache-2.0"
] | 9 | 2020-05-25T02:14:20.000Z | 2020-09-23T02:17:34.000Z | eval/publish_msg.py | cysec-lab/verimqtt | 6327946d155948db8e3a5ba6b4ef9b9763ab0e9e | [
"Apache-2.0"
] | null | null | null | import socket
def get_topic_length(topic_name_length):
hex_str = '{0:04x}'.format(topic_name_length)
return hex_str[0:2] + ' ' + hex_str[2:4]
def encode_remaining_length(remaining_length):
remaining_length_hex_str = ''
while True:
d = remaining_length % 128
remaining_length //= 128
if remaining_length > 0:
d |= 128
remaining_length_hex_str += '{0:02x} '.format(d & 255)
if remaining_length == 0:
break
return remaining_length_hex_str
target_ip = "localhost"
target_port = 1883
buffer_size = 4096
tcp_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcp_client.connect((target_ip,target_port))
tcp_client.send(b"\x10\x10\x00\x04\x4d\x51\x54\x54\x05\x02\x00\x3c\x03\x21\x00\x14\x00\x00")
_ = tcp_client.recv(buffer_size)
i = 0
total_packet_size = 0
total_time = 0.0
n = 50
max_interval = 268435460
interval = 268435460 // (n - 1)
j = 0
x = []
y = []
while True:
# 1,65535
# topic_name_length = random.randint(1, 65535)
topic_name_length = 1
# 0, 268435455
# payload_length = random.randint(0, 268435455)
payload_length = j
# print(encode_remaining_length(topic_name_length + payload_length + 3))
data = '30{}{}{}00{}'.format(
encode_remaining_length(topic_name_length + payload_length + 3),
get_topic_length(topic_name_length),
'61' * topic_name_length,
'61' * payload_length
)
mqtt_packet_data = bytearray.fromhex(data)
if len(mqtt_packet_data) <= max_interval:
pass
else:
print('done')
break
tcp_client.send(mqtt_packet_data)
total_packet_size += len(mqtt_packet_data)
i += 1
j += interval
totol_packet_count = i
print('計測パケット合計数:', totol_packet_count, '個')
average_packet_size_b = total_packet_size / totol_packet_count
print('平均パケットサイズ: {:.15f}'.format(average_packet_size_b), 'byte')
x.append(payload_length/1000/1000) # mb
# tcp_client.send(b"\x30\x30\x00\x0D\x65\x78\x61\x6D\x70\x6C\x65\x2F\x74\x6F\x70\x69\x63\x00\x68\x65\x6C\x6C\x6F\x20\x6D\x71\x74\x74\x21\x20\xE3\x81\x93\xE3\x82\x93\xE3\x81\xAB\xE3\x81\xA1\xE3\x81\xAF\x4D\x51\x54\x54\x21")
print(x)
tcp_client.send(b"\xe0\x00")
| 25.465116 | 222 | 0.694521 |
acf3f723e4aba20abdb9baa7b67a4c975a0f64e6 | 2,446 | py | Python | tests/algos/test_bcq.py | ningyixue/AIPI530_Final_Project | b95353ffd003692a37a59042dfcd744a18b7e802 | [
"MIT"
] | 565 | 2020-08-01T02:44:28.000Z | 2022-03-30T15:00:54.000Z | tests/algos/test_bcq.py | ningyixue/AIPI530_Final_Project | b95353ffd003692a37a59042dfcd744a18b7e802 | [
"MIT"
] | 144 | 2020-08-01T03:45:10.000Z | 2022-03-30T14:51:16.000Z | tests/algos/test_bcq.py | ningyixue/AIPI530_Final_Project | b95353ffd003692a37a59042dfcd744a18b7e802 | [
"MIT"
] | 103 | 2020-08-26T13:27:34.000Z | 2022-03-31T12:24:27.000Z | import pytest
from d3rlpy.algos.bcq import BCQ, DiscreteBCQ
from tests import performance_test
from .algo_test import (
algo_cartpole_tester,
algo_pendulum_tester,
algo_tester,
algo_update_tester,
)
@pytest.mark.parametrize("observation_shape", [(100,), (4, 84, 84)])
@pytest.mark.parametrize("action_size", [2])
@pytest.mark.parametrize("q_func_factory", ["mean", "qr", "iqn", "fqf"])
@pytest.mark.parametrize(
"scalers", [(None, None, None), ("min_max", "min_max", "min_max")]
)
def test_bcq(
observation_shape,
action_size,
q_func_factory,
scalers,
):
scaler, action_scaler, reward_scaler = scalers
bcq = BCQ(
q_func_factory=q_func_factory,
scaler=scaler,
action_scaler=action_scaler,
reward_scaler=reward_scaler,
rl_start_epoch=0,
)
algo_tester(bcq, observation_shape, test_q_function_copy=True)
algo_update_tester(
bcq,
observation_shape,
action_size,
test_q_function_optim_copy=True,
test_policy_optim_copy=True,
)
@pytest.mark.skip(reason="BCQ is computationally expensive.")
def test_bcq_performance():
bcq = BCQ(use_batch_norm=False)
algo_pendulum_tester(bcq, n_trials=5)
@pytest.mark.parametrize("observation_shape", [(100,), (4, 84, 84)])
@pytest.mark.parametrize("action_size", [2])
@pytest.mark.parametrize("n_critics", [1])
@pytest.mark.parametrize("q_func_factory", ["mean", "qr", "iqn", "fqf"])
@pytest.mark.parametrize("scalers", [(None, None), ("min_max", "min_max")])
@pytest.mark.parametrize("target_reduction_type", ["min", "none"])
def test_discrete_bcq(
observation_shape,
action_size,
n_critics,
q_func_factory,
scalers,
target_reduction_type,
):
scaler, reward_scaler = scalers
bcq = DiscreteBCQ(
n_critics=n_critics,
q_func_factory=q_func_factory,
scaler=scaler,
reward_scaler=reward_scaler,
target_reduction_type=target_reduction_type,
)
algo_tester(bcq, observation_shape, test_q_function_copy=True)
algo_update_tester(
bcq,
observation_shape,
action_size,
discrete=True,
test_q_function_optim_copy=True,
)
@performance_test
@pytest.mark.parametrize("q_func_factory", ["mean", "qr", "iqn", "fqf"])
def test_discrete_bcq_performance(q_func_factory):
bcq = DiscreteBCQ(q_func_factory=q_func_factory)
algo_cartpole_tester(bcq)
| 28.114943 | 75 | 0.694195 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.