id
stringlengths 3
8
| content
stringlengths 100
981k
|
|---|---|
1605354
|
from __future__ import absolute_import
#!/usr/bin/env python
import sys
import unittest
sys.path.append('xypath')
import xypath
import messytables
try:
import hamcrest
except ImportError:
hamcrest = None
import re
import tcore
class Test_Import_Missing(tcore.TMissing):
def test_table_has_properties_at_all(self):
self.table.sheet
class Test_Import(tcore.TCore):
def test_table_has_sheet_properties(self):
self.assertIn('xlrd', repr(self.table.sheet))
#import
def test_from_filename_with_table_name(self):
"""Can we specify only the filename and 'name' of the table?"""
if hamcrest is None:
raise unittest.SkipTest("Requires Hamcrest")
table = xypath.Table.from_filename(
self.wpp_filename,
table_name='NOTES')
self.assertEqual(32, len(table))
table.filter(
hamcrest.contains_string('(2) Including Zanzibar.')).assert_one()
#import
def test_from_filename_with_table_index(self):
"""Can we specify only the filename and index of the table?"""
new_table = xypath.Table.from_filename(self.wpp_filename,
table_index=5)
self.assertEqual(1, len(new_table.filter('(2) Including Zanzibar.')))
#import
def test_from_file_object_table_index(self):
with open(self.wpp_filename, 'rb') as f:
extension = tcore.get_extension(self.wpp_filename)
new_table = xypath.Table.from_file_object(
f, extension, table_index=5)
self.assertEqual(1, len(new_table.filter('(2) Including Zanzibar.')))
#import
def test_from_file_object_table_name(self):
with open(self.wpp_filename, 'rb') as f:
extension = tcore.get_extension(self.wpp_filename)
new_table = xypath.Table.from_file_object(
f, extension, table_name='NOTES')
self.assertEqual(1, len(new_table.filter('(2) Including Zanzibar.')))
#import
def test_from_file_object_no_table_specifier(self):
with open(self.wpp_filename, 'rb') as f:
extension = tcore.get_extension(self.wpp_filename)
self.assertRaises(
TypeError,
lambda: xypath.Table.from_file_object(f, extension))
#import
def test_from_file_object_ambiguous_table_specifier(self):
with open(self.wpp_filename, 'rb') as f:
extension = tcore.get_extension(self.wpp_filename)
self.assertRaises(
TypeError,
lambda: xypath.Table.from_file_object(
f, extension, table_name='NOTES', table_index=4))
#import
def test_from_messy(self):
new_table = xypath.Table.from_messy(self.messy.tables[0])
self.assertEqual(265, len(new_table.filter('Estimates')))
|
1605368
|
import numpy as np
import scipy.sparse as sp
from pySDC.implementations.problem_classes.boussinesq_helpers.build2DFDMatrix import get2DMatrix, getBCHorizontal, \
get2DUpwindMatrix
def getBoussinesq2DUpwindMatrix(N, dx, u_adv, order):
Dx = get2DUpwindMatrix(N, dx, order)
# Note: In the equations it is u_t + u_adv* D_x u = ... so in order to comply with the form u_t = M u,
# add a minus sign in front of u_adv
Zero = np.zeros((N[0] * N[1], N[0] * N[1]))
M1 = sp.hstack((-u_adv * Dx, Zero, Zero, Zero), format="csr")
M2 = sp.hstack((Zero, -u_adv * Dx, Zero, Zero), format="csr")
M3 = sp.hstack((Zero, Zero, -u_adv * Dx, Zero), format="csr")
M4 = sp.hstack((Zero, Zero, Zero, -u_adv * Dx), format="csr")
M = sp.vstack((M1, M2, M3, M4), format="csr")
return sp.csc_matrix(M)
def getBoussinesq2DMatrix(N, h, bc_hor, bc_ver, c_s, Nfreq, order):
Dx_u, Dz_u = get2DMatrix(N, h, bc_hor[0], bc_ver[0], order)
Dx_w, Dz_w = get2DMatrix(N, h, bc_hor[1], bc_ver[1], order)
# Dx_b, Dz_b = get2DMatrix(N, h, bc_hor[2], bc_ver[2], order)
Dx_p, Dz_p = get2DMatrix(N, h, bc_hor[3], bc_ver[3], order)
# Id_N = sp.eye(N[0] * N[1])
Zero = np.zeros((N[0] * N[1], N[0] * N[1]))
Id_w = sp.eye(N[0] * N[1])
# Note: Bring all terms to right hand side, therefore a couple of minus signs
# are needed
M1 = sp.hstack((Zero, Zero, Zero, -Dx_p), format="csr")
M2 = sp.hstack((Zero, Zero, Id_w, -Dz_p), format="csr")
M3 = sp.hstack((Zero, -Nfreq ** 2 * Id_w, Zero, Zero), format="csr")
M4 = sp.hstack((-c_s ** 2 * Dx_u, -c_s ** 2 * Dz_w, Zero, Zero), format="csr")
M = sp.vstack((M1, M2, M3, M4), format="csr")
Id = sp.eye(4 * N[0] * N[1])
return sp.csc_matrix(Id), sp.csc_matrix(M)
def getBoussinesqBCHorizontal(value, N, dx, bc_hor):
bu_left, bu_right = getBCHorizontal(value[0], N, dx, bc_hor[0])
bw_left, bw_right = getBCHorizontal(value[1], N, dx, bc_hor[1])
# bb_left, bb_right = getBCHorizontal(value[2], N, dx, bc_hor[2])
bp_left, bp_right = getBCHorizontal(value[3], N, dx, bc_hor[3])
b_left = np.concatenate((bp_left, bp_left, bu_left + bw_left))
b_right = np.concatenate((bp_right, bp_right, bu_right + bw_right))
return b_left, b_right
def getBoussinesqBCVertical():
return 0.0
|
1605381
|
import re
from fontTools.agl import AGL2UV
import defcon
from . import registry
from .wrappers import *
# Unicode Value
uniNamePattern = re.compile(
"uni"
"([0-9A-Fa-f]{4})"
"$"
)
def testUnicodeValue(glyph):
"""
A Unicode value should appear only once per font.
"""
font = wrapFont(glyph.font)
layer = font.getLayer(glyph.layer.name)
glyph = layer[glyph.name]
report = []
uni = glyph.unicode
name = glyph.name
# test for uniXXXX name
m = uniNamePattern.match(name)
if m is not None:
uniFromName = m.group(1)
uniFromName = int(uniFromName, 16)
if uni != uniFromName:
report.append("The Unicode value for this glyph does not match its name.")
# test against AGLFN
else:
expectedUni = AGL2UV.get(name)
if expectedUni != uni:
report.append("The Unicode value for this glyph may not be correct.")
# look for duplicates
if uni is not None:
duplicates = []
for name in sorted(font.keys()):
if name == glyph.name:
continue
other = font[name]
if other.unicode == uni:
duplicates.append(name)
if duplicates:
report.append("The Unicode for this glyph is also used by: %s." % " ".join(duplicates))
return report
registry.registerTest(
identifier="unicodeValue",
level="glyphInfo",
title="Unicode Value",
description="Unicode value may have problems.",
testFunction=testUnicodeValue,
defconClass=defcon.Glyph,
destructiveNotifications=["Glyph.UnicodesChanged"]
)
|
1605395
|
import os
from typing import List
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from service.github_service import get_contributors
from service.stats_service import compute_stat, get_stats, get_stats_summary
from service.twitch_service import TwitchService
from view_model.stats_viewmodel import StatsViewModel
from view_model.stream_viewmodel import StreamViewModel
from view_model.tag_viewmodel import TagViewModel
from view_model.vod_viewmodel import VodViewModel
from twitchAPI.twitch import Twitch
origins = ["*"]
app_public = FastAPI(openapi_prefix="/public")
twitch = Twitch(os.environ["CLIENT_ID"], os.environ["CLIENT_SECRET"])
app_public.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app_public.get("/streams", response_model=List[StreamViewModel])
async def streams():
twitch_service = TwitchService(twitch)
return twitch_service.get_streamers()
@app_public.get("/vods", response_model=List[VodViewModel])
async def vods():
twitch_service = TwitchService(twitch)
return twitch_service.get_vods()
@app_public.get("/stats", response_model=List[StatsViewModel])
async def stats():
return get_stats()
@app_public.get("/tags", response_model=List[TagViewModel])
async def tags():
twitch_service = TwitchService(twitch)
return twitch_service.get_tags()
@app_public.get("/stats/summary")
async def stats_summary():
return get_stats_summary()
@app_public.get("/contributors")
async def contributors():
return get_contributors()
|
1605424
|
from datetime import datetime
from peewee import SqliteDatabase, CharField, DateTimeField, Model
db = SqliteDatabase("clipboard.db")
class Paste(Model):
text = CharField()
date = DateTimeField()
class Meta:
database = db
db.create_tables([Paste])
def save_new_paste(text):
paste = Paste(text=text, date=datetime.now())
paste.save()
def get_lastest_paste():
for paste in Paste.select().order_by(Paste.date.desc()):
return paste.text
def get_pastes():
return Paste.select().order_by(Paste.date.desc())
def delete_all():
Paste.delete().execute()
|
1605449
|
from pandas import DataFrame
import pandas as pd
# 첫번째 데이터프레임
data = {
'종가': [113000, 111500],
'거래량': [555850, 282163]
}
index = ['2019-06-21', '2019-06-20']
df1 = DataFrame(data=data, index=index)
# 두번째 데이터프레임
data = {
'시가': [112500, 110000],
'고가': [115000, 112000],
'저가': [111500, 109000]
}
index = ['2019-06-21', '2019-06-20']
df2 = DataFrame(data=data, index=index)
df = pd.concat([df1, df2], axis=1)
print(df)
|
1605450
|
class ConfigError(Exception):
"""
This is an exception thrown whenever there's something wrong with
the Raven configuration, from the perspective of RavenPy.
"""
pass
|
1605475
|
from setuptools import setup
setup(name='fmq',
version='0.1',
description='Fast MP Queue, Feed Me Queue',
url='https://github.com/weitang114/FMQ',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['fmq'],
zip_safe=False)
|
1605539
|
from __future__ import print_function
try:
from SimpleHTTPServer import SimpleHTTPRequestHandler
except ImportError:
from http.server import SimpleHTTPRequestHandler
try:
import SocketServer as socketserver
except ImportError:
import socketserver
import logging
import cgi
PORT = 8080
class ServerHandler(SimpleHTTPRequestHandler):
def do_GET(self):
logging.error(self.headers)
super().do_GET()
def do_POST(self):
logging.error(self.headers)
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type'],
})
for item in form.list:
logging.error(item)
super().do_GET()
Handler = ServerHandler
httpd = socketserver.TCPServer(("", PORT), Handler)
print("serving at port", PORT)
httpd.serve_forever()
|
1605542
|
import json
from collections import defaultdict
import numpy as np
import tensorflow as tf
from matplotlib import pyplot as plt, patches
from dataset_utils.kitti_datum import KITTIDataset
from dataset_utils.mot_datum import MOTDataset
from trainer.dataset_info import kitti_classes_reverse
from vis_utils.vis_datum import ImageBoxes
def init_track_json(kind="KITTI",
path="/Users/kanchana/Documents/current/FYP/data/KITTI_tracking/data_tracking_image_2/training",
o_path="/Users/kanchana/Documents/current/FYP/data/KITTI_tracking/generate/tracks.json"):
if kind == "KITTI":
dataset = KITTIDataset(path)
elif kind == "MOT":
dataset = MOTDataset(path)
else:
dataset = None
tracks = defaultdict(dict)
for seq_id, sequence in enumerate(dataset.sequences):
t_id = 0
for datum in sequence.datums():
t_id += 1
i_w, i_h = datum.image.size
for obj in datum.objects:
x, y = (obj.x_min + obj.x_max) / (2 * i_w), (obj.y_min + obj.y_max) / (2 * i_h)
w = (obj.x_max - obj.x_min) / i_w
h = (obj.y_max - obj.y_min) / i_h
ar = h / w
if kind == "MOT":
category = 0
else:
category = kitti_classes_reverse[obj.category]
data = {'x': x, 'y': y, 'h': h, 'w': w, 'ar': ar, 'class': category, 'im': datum.im_path}
tracks["{}_{}".format(seq_id, obj.track)][str(t_id)] = data
with open(o_path.format("train"), 'w+') as fo:
json.dump(dict(list(tracks.items())[:-80]), fo)
with open(o_path.format("val"), 'w+') as fo:
json.dump(dict(list(tracks.items())[-80:]), fo)
def run_init():
base_path = "/Users/kanchana/Documents/current/FYP/"
init_track_json(
kind="KITTI",
path="{}/data/KITTI_tracking/data_tracking_image_2/training".format(base_path),
o_path="{}/fyp_2019/LSTM_Kanchana/data/kitti_tracks_{}.json".format(base_path, "{}")
)
init_track_json(
kind="MOT",
path="{}/data/MOT16/train".format(base_path),
o_path="{}/fyp_2019/LSTM_Kanchana/data/mot_tracks_{}.json".format(base_path, "{}")
)
def kitti_data_gen(path="/Users/kanchana/Documents/current/FYP/fyp_2019/LSTM_Kanchana/data/kitti_tracks_{}.json",
split="train", testing=False, one_hot_classes=False, anchors=False, num_classes=9):
"""
Args:
path:
split: train / val
testing:
one_hot_classes:
anchors:
num_classes:
Returns:
Tuple containing np.arrays of shape [10, 5] and [5,]
"""
assert split in ["train", "val"], "invalid split type: {}".format(split)
if isinstance(path, bytes):
path = path.decode()
tracks = json.load(tf.gfile.GFile(path.format(split), "r"))
valid_tracks = list(tracks.keys())
if split == "train":
np.random.shuffle(valid_tracks)
for track_id in valid_tracks:
track = tracks[track_id]
f_step = len(track.keys())
if f_step < 11:
continue
x = np.zeros(shape=(10, 5), dtype=np.float32)
if testing:
x_im = []
for start in range(0, f_step - 11):
l_step = int(sorted(list(track.keys()), key=lambda a: int(a))[start]) - 1
i = 0
for t_step, data in sorted(list(track.items())[start:start + 10], key=lambda vid: int(vid[0])):
assert int(t_step) > l_step, "order error; keys t-{} & l-{}".format(int(t_step), l_step)
l_step = int(t_step)
x[i] = np.array([data['x'], data['y'], data['h'], data['w'], data['class']])
if testing:
x_im.append(data['im'])
i += 1
_, data = list(track.items())[start + 10]
y = np.array([data['x'], data['y'], data['h'], data['w'], data['class']], dtype=np.float32)
if testing:
y_im = data["im"]
if one_hot_classes:
temp = np.zeros(shape=(x.shape[0], num_classes))
temp[np.array(range(len(x[:, 4]))), x[:, 4].astype(int)] = 1
x_ = np.concatenate([x[:, :4], temp.astype(float)], axis=-1)
assert x_.shape == (10, 4 + num_classes), "wrong shape for x"
if anchors:
y_x, y_y, y_h, y_w = (y[:4] - x[-1, :4])
y_x, y_y = make_anchors(y_x, (-0.5, 0, 0.1, 0.2, 0.5)), make_anchors(y_y, (-0.5, 0, 0.1, 0.2, 0.5))
y_h, y_w = make_anchors(y_h, (-0.5, 0, 0.1, 0.2, 0.5)), make_anchors(y_w, (-0.5, 0, 0.1, 0.2, 0.5))
y = np.array([y_x, y_y, y_h, y_w])
if testing:
if one_hot_classes:
yield (x_, y, x_im, y_im)
else:
yield (x, y, x_im, y_im)
else:
assert x.shape == (10, 5), "invalid shape"
yield (x, y)
def mot_data_gen(path="/Users/kanchana/Documents/current/FYP/fyp_2019/LSTM_Kanchana/data/mot_tracks_{}.json",
split="train", testing=False, one_hot_classes=False, anchors=False, num_classes=9):
"""
Args:
path:
split: train / val
testing: True to get image path
Returns:
Tuple containing np.arrays of shape [10, 5] and [5,]
"""
assert split in ["train", "val"], "invalid split type: {}".format(split)
if isinstance(path, bytes):
path = path.decode()
tracks = json.load(tf.gfile.GFile(path.format(split), "r"))
valid_tracks = list(tracks.keys())
if split == 'train':
np.random.shuffle(valid_tracks)
for track_id in valid_tracks:
track = tracks[track_id]
f_step = len(track.keys())
if f_step < 11:
continue
x = np.zeros(shape=(10, 5), dtype=np.float32)
if testing:
x_im = []
for start in range(0, f_step - 11):
l_step = int(sorted(list(track.keys()), key=lambda a: int(a))[start]) - 1
i = 0
for t_step, data in sorted(list(track.items())[start:start + 10], key=lambda vid: int(vid[0])):
assert int(t_step) > l_step, "order error; keys t-{} & l-{}".format(int(t_step), l_step)
l_step = int(t_step)
x[i] = np.array([data['x'], data['y'], data['h'], data['w'], data['class']])
if testing:
x_im.append(data['im'])
i += 1
_, data = list(track.items())[start + 10]
y = np.array([data['x'], data['y'], data['h'], data['w'], data['class']], dtype=np.float32)
if testing:
y_im = data["im"]
if one_hot_classes:
temp = np.zeros(shape=(x.shape[0], num_classes))
temp[np.array(range(len(x[:, 4]))), x[:, 4].astype(int)] = 1
x_ = np.concatenate([x[:, :4], temp.astype(float)], axis=-1)
assert x_.shape == (10, 4 + num_classes), "wrong shape for x"
if anchors:
y_x, y_y, y_h, y_w = (y[:4] - x[-1, :4])
y_x, y_y = make_anchors(y_x, (-0.5, 0, 0.1, 0.2, 0.5)), make_anchors(y_y, (-0.5, 0, 0.1, 0.2, 0.5))
y_h, y_w = make_anchors(y_h, (-0.5, 0, 0.1, 0.2, 0.5)), make_anchors(y_w, (-0.5, 0, 0.1, 0.2, 0.5))
y = np.array([y_x, y_y, y_h, y_w])
if testing:
if one_hot_classes:
yield (x_, y, x_im, y_im)
else:
yield (x, y, x_im, y_im)
else:
assert x.shape == (10, 5), "invalid shape"
yield (x, y)
def make_anchors(val, anchor_centres=(-0.5, 0, 0.1, 0.2, 0.5)):
"""
Args:
val: value to anchor
anchor_centres: tuple of anchor centres
Returns:
np.array of shape (6,) containing confidence and distance to anchor centre respectively.
output[:3] is confidence, and output[3:] is distance.
"""
idx = np.argmin(np.abs(val - np.array(anchor_centres)))
conf = np.zeros(shape=len(anchor_centres))
dist = np.zeros(shape=len(anchor_centres))
conf[idx] = 1
dist[idx] = val - anchor_centres[idx]
return np.concatenate((conf, dist), axis=0)
def joint_data_gen(paths=("/Users/kanchana/Documents/current/FYP/fyp_2019/LSTM_Kanchana/data/kitti_tracks_{}.json",
"/Users/kanchana/Documents/current/FYP/fyp_2019/LSTM_Kanchana/data/mot_tracks_{}.json"),
split="train", num_classes=9, anchors=True, one_hot_classes=True):
"""
Args:
paths: list/tuple of str to KITTI path, MOT path respectively
split: train / val
num_classes: number of classes
anchors: output y as anchors
one_hot_classes: output x with classes in one hot encoding
Returns:
generator with each iteration yielding a tuple containing (x,y), ie the ground truth and label for a track.
If anchors, output y is of shape (4, 6). 6 corresponds to 3 anchors confidence and distance respectively. Else,
shape is (4,) for [centre_x, centre_y, height, width].
If one_hot_classes, output x is of shape (10, 4 + num_classes). Else, shape is (10, 5). 10 corresponds to the
time steps in both cases.
"""
if isinstance(split, bytes):
split = split.decode()
assert split in ["train", "val"], "invalid split type: {}".format(split)
gens = (kitti_data_gen, mot_data_gen)
gens = [gen(path=path, split=split) for gen, path in zip(gens, paths)]
while True:
a = np.random.choice(range(4)) # MOT has over 3 times tracks as KITTI
if a < 1:
x, y = next(gens[1])
else:
x, y = next(gens[0])
if one_hot_classes:
temp = np.zeros(shape=(x.shape[0], num_classes))
temp[np.array(range(len(x[:, 4]))), x[:, 4].astype(int)] = 1
x = np.concatenate([x[:, :4], temp.astype(float)], axis=-1)
assert x.shape == (10, 4 + num_classes), "wrong shape for x"
if anchors:
y_x, y_y, y_h, y_w = (y[:4] - x[-1, :4])
y_x, y_y = make_anchors(y_x, (-0.5, 0, 0.1, 0.2, 0.5)), make_anchors(y_y, (-0.5, 0, 0.1, 0.2, 0.5))
y_h, y_w = make_anchors(y_h, (-0.5, 0, 0.1, 0.2, 0.5)), make_anchors(y_w, (-0.5, 0, 0.1, 0.2, 0.5))
y = np.array([y_x, y_y, y_h, y_w])
else:
y = y[:4]
yield x, y
def val_data_gen(paths=("/Users/kanchana/Documents/current/FYP/data/KITTI_tracking/generate/tracks.json",
"/Users/kanchana/Documents/current/FYP/data/MOT16/generate/tracks.json"),
split="train", num_classes=9):
"""
Method to return images for validation data gen.
"""
gens = (kitti_data_gen, mot_data_gen)
gens = [gen(path=path, split=split, testing=True, one_hot_classes=True, anchors=True, num_classes=num_classes)
for gen, path in zip(gens, paths)]
while True:
a = np.random.choice(range(4)) # MOT has over 3 times tracks as KITTI
if a < 1:
x, y, x_im, y_im = next(gens[1])
else:
x, y, x_im, y_im = next(gens[0])
yield x, y
def to_bbox(x, y):
idx = np.argmax(y[:, :3], axis=-1)
dist = y[:, 3:][np.array(range(len(idx))), idx]
y = (dist + idx) * (x[-1, :4] - x[-2, :4] + 1e-5) + x[-1, :4]
return y
def vis_gen(auto_time=False):
gen = kitti_data_gen(testing=True)
# gen = mot_data_gen(testing=True)
while True:
x, y, x_im, y_im = next(gen)
fig, ax = plt.subplots(1)
image = ImageBoxes(path=y_im)
plt.axis("off")
for num, i in enumerate(x):
im = plt.imread(x_im[num])
ih, iw, _ = im.shape
cx, cy, h, w = i[:4]
image.add_box([cx, cy, w, h], color='blue')
for i in [y]:
im = plt.imread(y_im)
ih, iw, _ = im.shape
cx, cy, h, w = i[:4]
image.add_box([cx, cy, w, h])
ax.imshow(np.array(image.get_final()))
if auto_time:
plt.pause(0.1)
else:
plt.waitforbuttonpress()
plt.close()
|
1605577
|
import pytest
from pandagg.document import DocumentSource
from pandagg.node.mappings import Text, Keyword
from pandagg.utils import equal_queries, equal_search, is_subset, get_action_modifier
def test_equal():
q1 = {"bool": {"must": [{"term": {"field_A": 1}}, {"term": {"field_B": 2}}]}}
q2 = {"bool": {"must": [{"term": {"field_B": 2}}, {"term": {"field_A": 1}}]}}
non_equal_q = {
"bool": {"must": [{"term": {"field_B": 2}}, {"term": {"field_A": 123}}]}
}
assert equal_queries(q1, q2)
assert not equal_queries(q1, non_equal_q)
assert equal_search(
{"query": q1, "sort": ["title", {"category": {"order": "desc"}}, "_score"]},
{"query": q2, "sort": ["title", {"category": {"order": "desc"}}, "_score"]},
)
assert not equal_search(
{"query": q1, "sort": ["title", {"category": {"order": "desc"}}, "_score"]},
{"query": q2, "sort": ["title", "_score", {"category": {"order": "desc"}}]},
)
def test_is_subset():
assert is_subset(1, 1)
assert is_subset({1}, {1, 2})
assert not is_subset({1, 2}, {1})
assert is_subset({"1": 1}, {"1": 1, "2": 2})
assert not is_subset({"1": 1, "3": 3}, {"1": 1, "2": 2})
assert is_subset([1, 2], [3, 2, 1])
assert not is_subset([1, 2, 5], [3, 2, 1])
def test_get_action_modifier():
modifier = get_action_modifier(index_name="test-index")
update_modifier = get_action_modifier(
index_name="test-index", _op_type_overwrite="update"
)
# simple source
assert modifier({"_source": {"stuff": 1}}) == {
"_index": "test-index",
"_source": {"stuff": 1},
}
assert update_modifier({"_id": 1, "doc": {"stuff": 1}}) == {
"_id": 1,
"_index": "test-index",
"_op_type": "update",
"doc": {"stuff": 1},
}
class Article(DocumentSource):
name = Text()
type = Keyword()
# document
article = Article(name="hello", type="test")
assert modifier(article) == {
"_index": "test-index",
"_source": {"name": "hello", "type": "test"},
}
assert modifier({"_source": article}) == {
"_index": "test-index",
"_source": {"name": "hello", "type": "test"},
}
with pytest.raises(TypeError) as e:
update_modifier(article)
assert e.value.args == ("Update operation requires an '_id'",)
assert update_modifier({"doc": article, "_id": 1}) == {
"_index": "test-index",
"_id": 1,
"_op_type": "update",
"doc": {"name": "hello", "type": "test"},
}
|
1605634
|
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.test import Client
from suite.views import ClubEmails
from django.urls import reverse
from suite.models import Club
class View_ClubEmails_TestCase(TestCase):
def setUp(self):
self.client = Client()
#create club owner
self.owner=get_user_model().objects.create(first_name="Owner",last_name="McPerson",email="<EMAIL>")
self.owner.set_password("<PASSWORD>")
self.owner.save()
#Create club
self.club=Club.objects.create(club_name="club",club_type="PUB",club_description="a club")
self.club._create_permissions()
self.club._set_owner(self.owner)
self.user_list=[]
self.user_list.append(self.owner)
#add 19 users to club
for i in range(1,20):
#create users
user=get_user_model().objects.create(first_name="Person"+str(i),last_name="McPerson",email="<EMAIL>"+str(i)+"<EMAIL>")
user.set_password("<PASSWORD>")
user.save()
# add user to club
self.club.add_member(self.owner,user)
if (i%5 == 0):
self.club.promote_to_officer(self.owner,user)
self.user_list.append(user)
self.club._set_owner(self.user_list[1])
def test_get_login(self):
self.client.force_login(get_user_model().objects.get(first_name='Owner'))
response = self.client.get(reverse('suite:club_emails',kwargs={'club_id':self.club.id}),club_id=self.club.pk)
self.assertEqual(response.status_code,200)
def test_get_not_logged_in(self):
response = self.client.get(reverse('suite:club_emails',kwargs={'club_id':self.club.id}),club_id=self.club.pk)
self.assertEqual(response.status_code,403)
def test_post(self):
self.client.force_login(get_user_model().objects.get(first_name='Owner'))
#test member filter
response = self.client.post(reverse('suite:club_emails',kwargs={'club_id':self.club.id}),club_id=self.club.pk,data={'member_type':"Member"},follow=True)
self.assertEqual(response.status_code,200)
for i in range(2,20):
if ( i%5 != 0):
self.assertContains(response, self.user_list[i].email)
#test officer
response = self.client.post(reverse('suite:club_emails',kwargs={'club_id':self.club.id}),club_id=self.club.pk,data={'member_type':"Officer"},follow=True)
self.assertContains(response, self.user_list[5].email)
self.assertContains(response, self.user_list[10].email)
self.assertContains(response, self.user_list[15].email)
#test owner
response = self.client.post(reverse('suite:club_emails',kwargs={'club_id':self.club.id}),club_id=self.club.pk,data={'member_type':"Owner"},follow=True)
self.assertNotContains(response, self.user_list[0].email)
self.assertContains(response, self.user_list[1].email)
|
1605642
|
from collections import namedtuple
'''
A datatype defining either an array of qubits or classical bits.
These are defined in-program using the following syntax:
(def blocka 1 2) # A two-qubit block using qubits one and two
(def blockb 1 3) # Qubits 1-3
(def blockc 1 2 classical) # Classical two-bit block for measurement
(def blocka 1 2 quantum) # Explicit syntax for two-qubit block
'''
Block = namedtuple('Block', ['start', 'end', 'type'])
Block.__new__.__defaults__ = ('quantum',)
Block.expand = lambda self : ' '.join(map(str, range(int(self.start), int(self.end))))
Block.__str__ = lambda self : self.expand()
|
1605681
|
from dataclasses import dataclass
from typing import Any, List
@dataclass
class TaskUnit:
request: Any
def run(self, extractor):
yield extractor.run(self)
@dataclass
class MultiTaskUnit(TaskUnit):
request: Any
def run(self, extractor):
return extractor.run_multiple(self)
@dataclass
class Task:
units: List[TaskUnit]
extractor: Any
def _run_unit(self, unit: TaskUnit):
return unit.run(self.extractor)
def run(self):
for unit in self.units:
yield self._run_unit(unit)
|
1605686
|
from .config import *
def test_getClashTournaments():
try:
data = loop.run_until_complete(panth.getClashTournaments())
except Exception as e:
print(e)
assert type(data) == list
if len(data) > 0:
assert "id" in data[0]
assert "themeId" in data[0]
assert "schedule" in data[0]
def test_getClashTournamentById():
try:
data = loop.run_until_complete(panth.getClashTournamentById(clash_tournamentId))
except Exception as e:
print(e)
assert "id" in data
assert "themeId" in data
assert "schedule" in data
def test_getClashTournamentByTeamId():
try:
data = loop.run_until_complete(panth.getClashTournamentByTeamId(clash_teamId))
except Exception as e:
print(e)
assert "id" in data
assert "themeId" in data
assert "schedule" in data
def test_getClashTeamById():
try:
data = loop.run_until_complete(panth.getClashTeamById(clash_teamId))
except Exception as e:
print(e)
assert "id" in data
assert "tournamentId" in data
assert "players" in data
def test_getClashPlayersBySummonerId():
try:
data = loop.run_until_complete(panth.getClashPlayersBySummonerId(clash_summonerId))
except Exception as e:
print(e)
assert type(data) == list
if len(data) > 0:
assert "teamId" in data[0]
assert "role" in data[0]
|
1605698
|
from tensorflow.python.ops import nn_ops, gen_nn_ops
import tensorflow as tf
class MNIST_NN:
def __init__(self, name):
self.name = name
def __call__(self, X, reuse=False):
with tf.variable_scope(self.name) as scope:
if reuse:
scope.reuse_variables()
dense1 = tf.layers.dense(inputs=X, units=512, activation=tf.nn.relu, use_bias=True, name='layer1')
dense2 = tf.layers.dense(inputs=dense1, units=128, activation=tf.nn.relu, use_bias=True, name='layer2')
logits = tf.layers.dense(inputs=dense2, units=10, activation=None, use_bias=True, name='layer3')
prediction = tf.nn.softmax(logits)
return [dense1, dense2, prediction], logits
@property
def vars(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.name)
class MNIST_DNN:
def __init__(self, name):
self.name = name
def __call__(self, X, reuse=False):
with tf.variable_scope(self.name) as scope:
if reuse:
scope.reuse_variables()
dense1 = tf.layers.dense(inputs=X, units=512, activation=tf.nn.relu, use_bias=True)
dense2 = tf.layers.dense(inputs=dense1, units=512, activation=tf.nn.relu, use_bias=True)
dense3 = tf.layers.dense(inputs=dense2, units=512, activation=tf.nn.relu, use_bias=True)
dense4 = tf.layers.dense(inputs=dense3, units=512, activation=tf.nn.relu, use_bias=True)
logits = tf.layers.dense(inputs=dense4, units=10, activation=None, use_bias=True)
prediction = tf.nn.softmax(logits)
return [dense1, dense2, dense3, dense4, prediction], logits
@property
def vars(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.name)
class LRP:
def __init__(self, alpha, activations, weights, biases, conv_ksize, pool_ksize, conv_strides, pool_strides, name):
self.alpha = alpha
self.activations = activations
self.weights = weights
self.biases = biases
self.conv_ksize = conv_ksize
self.pool_ksize = pool_ksize
self.conv_strides = conv_strides
self.pool_strides = pool_strides
self.name = name
def __call__(self, logit):
with tf.name_scope(self.name):
Rs = []
j = 0
for i in range(len(self.activations) - 1):
if i is 0:
Rs.append(self.activations[i][:,logit,None])
Rs.append(self.backprop_dense(self.activations[i + 1], self.weights[j][:,logit,None], self.biases[j][logit,None], Rs[-1]))
j += 1
continue
elif 'dense' in self.activations[i].name.lower():
Rs.append(self.backprop_dense(self.activations[i + 1], self.weights[j], self.biases[j], Rs[-1]))
j += 1
elif 'reshape' in self.activations[i].name.lower():
shape = self.activations[i + 1].get_shape().as_list()
shape[0] = -1
Rs.append(tf.reshape(Rs[-1], shape))
elif 'conv' in self.activations[i].name.lower():
Rs.append(self.backprop_conv(self.activations[i + 1], self.weights[j], self.biases[j], Rs[-1], self.conv_strides))
j += 1
elif 'pooling' in self.activations[i].name.lower():
if 'max' in self.activations[i].name.lower():
pooling_type = 'max'
else:
pooling_type = 'avg'
Rs.append(self.backprop_pool(self.activations[i + 1], Rs[-1], self.pool_ksize, self.pool_strides, pooling_type))
else:
raise Error('Unknown operation.')
return Rs[-1]
def backprop_conv(self, activation, kernel, bias, relevance, strides, padding='SAME'):
W_p = tf.maximum(0., kernel)
b_p = tf.maximum(0., bias)
z_p = nn_ops.conv2d(activation, W_p, strides, padding) + b_p
s_p = relevance / z_p
c_p = nn_ops.conv2d_backprop_input(tf.shape(activation), W_p, s_p, strides, padding)
W_n = tf.minimum(0., kernel)
b_n = tf.minimum(0., bias)
z_n = nn_ops.conv2d(activation, W_n, strides, padding) + b_n
s_n = relevance / z_n
c_n = nn_ops.conv2d_backprop_input(tf.shape(activation), W_n, s_n, strides, padding)
return activation * (self.alpha * c_p + (1 - self.alpha) * c_n)
def backprop_pool(self, activation, relevance, ksize, strides, pooling_type, padding='SAME'):
if pooling_type.lower() is 'avg':
z = nn_ops.avg_pool(activation, ksize, strides, padding) + 1e-10
s = relevance / z
c = gen_nn_ops._avg_pool_grad(tf.shape(activation), s, ksize, strides, padding)
return activation * c
else:
z = nn_ops.max_pool(activation, ksize, strides, padding) + 1e-10
s = relevance / z
c = gen_nn_ops._max_pool_grad(activation, z, s, ksize, strides, padding)
return activation * c
def backprop_dense(self, activation, kernel, bias, relevance):
W_p = tf.maximum(0., kernel)
b_p = tf.maximum(0., bias)
z_p = tf.matmul(activation, W_p) + b_p
s_p = relevance / z_p
c_p = tf.matmul(s_p, tf.transpose(W_p))
W_n = tf.minimum(0., kernel)
b_n = tf.minimum(0., bias)
z_n = tf.matmul(activation, W_n) + b_n
s_n = relevance / z_n
c_n = tf.matmul(s_n, tf.transpose(W_n))
return activation * (self.alpha * c_p + (1 - self.alpha) * c_n)
|
1605766
|
from django.core.management.base import BaseCommand, CommandError
import snippets.contact.fixtures as contact_fixtures
'''
Loads service_page fixtures into your joplin environment.
Run with:
pipenv run python joplin/manage.py load_test_contacts
'''
class Command(BaseCommand):
help = "Loads test data for manual exploration of test contacts"
def handle(self, *args, **options):
contact_fixtures.load_all()
|
1605767
|
n = int(input())
assert (n >= 0 and n <= 10 ^ 7), "n should be in range 0 ≤ n ≤ 10^7 "
def fib(n):
a = 0
b = 1
for i in range(n):
a, b = b, a + b
return a
print fib(n)
|
1605782
|
import pickle
from collections import deque, defaultdict
from numbers import Number
from time import perf_counter
from typing import Dict, Text, List
import numpy as np
from prl.utils.misc import colors
DEQUE_MAX_LEN = 10 ** 6
def limited_deque():
"""Auxiliary function for Logger class.
Returns: Deque with maximum length set to DEQUE_MAX_LEN
"""
return deque(maxlen=DEQUE_MAX_LEN)
class Logger:
"""
Class for logging scalar values to limited queues. Logged data send to each
client is tracked by the Logger, so each client can ask for unseen data and recieve it.
"""
def __init__(self):
self._data = defaultdict(limited_deque)
self._timestamps = defaultdict(limited_deque)
self._consumer_list = list()
self._flush_indicies = dict()
self._new_consumer_id = 0
def add(self, key: str, value: Number):
"""Add a value to queue assigned to key value.
Args:
key: logged value name
value: logged number
"""
self._data[key].append(value)
self._timestamps[key].append(perf_counter())
def save(self, path: str):
"""Saves data to file.
Args:
path: path to the file.
"""
pickle.dump(self._data, open(path, "wb"))
self._clear()
def register(self) -> int:
""" Registers client in order to receive data from Logger object.
Returns:
client ID used to identify client while requesting for a new data.
"""
consumer_id = self._new_consumer_id
self._new_consumer_id += 1
self._consumer_list.append(consumer_id)
self._flush_indicies[consumer_id] = dict()
return consumer_id
def flush(
self, consumer_id: int
) -> (Dict[str, List], Dict[str, range], Dict[str, List]):
"""Method used by clients to recieve only new unseed data from logger.
Args:
consumer_id: value returned by register method.
Returns:
dict with new data.
"""
data_to_flush = dict()
indicies_to_flush = dict()
timestamps_to_flush = dict()
for k in self._data:
length = len(self._data[k])
last_index = self._flush_indicies[consumer_id].get(k, 0)
if last_index < length:
data_to_flush[k] = list(self._data[k])[last_index:]
indicies_to_flush[k] = tuple(range(last_index, length))
timestamps_to_flush[k] = list(self._timestamps[k])[last_index:]
self._flush_indicies[consumer_id][k] = length
return data_to_flush, indicies_to_flush, timestamps_to_flush
def get_data(self) -> Dict[str, deque]:
"""
Returns:
all logged data.
"""
return self._data
def _clear(self):
"""Clears the logged data"""
self._data = defaultdict(limited_deque)
self._timestamps = defaultdict(limited_deque)
def __repr__(self):
return "_data:\n%r\n_timestamps:\n%r\n_flush_indicies:\n%r\n" % (
str(self._data),
str(self._timestamps),
str(self._flush_indicies),
)
class TimeLogger(Logger):
"""
Storage for measurements of function and methods exectuion time. Used by timeit function/decorator. Can be used to
print summary of a time profiling or save all data to generate a plot how execution times are changing during the
program execution.
"""
def __str__(self):
string = (
"\n"
+ "_" * 27
+ "function_name_|__N_samp_|_mean_time"
+ "_" * 19
+ "|_total_time___\n"
)
format_str = "%s%40s | %7d | %12.4fms \xb1 %8.4fms | %10.2fs%s\n"
for k, v in self._data.items():
len_v = len(v)
if v.maxlen > len_v:
color = colors.END_FORMAT
elif v.maxlen == len_v:
color = colors.RED
if len(v) > 1:
string += format_str % (
color,
k,
len_v,
np.mean(list(v)[1:]) * 1000,
np.std(list(v)[1:]) * 1000,
np.sum(list(v)[1:]),
colors.END_FORMAT,
)
else:
string += format_str % (
color,
k,
len_v,
np.mean(list(v)[:]) * 1000,
np.std(list(v)[:]) * 1000,
np.sum(list(v)[:]),
colors.END_FORMAT,
)
string += "_________________________________________|_________|_____________________________|______________\n\n"
string += (
"Rows with maximum length fo times buffer reached are marked in "
+ colors.RED
+ "RED.\n"
+ colors.END_FORMAT
)
return string
time_logger = TimeLogger()
memory_logger = Logger()
agent_logger = Logger()
nn_logger = Logger()
misc_logger = Logger()
|
1605810
|
import pytest
import torch
from torch_geometric.nn import GraphConv, GroupAddRev, SAGEConv
from torch_geometric.nn.dense.linear import Linear
@pytest.mark.parametrize('num_groups', [2, 4, 8, 16])
def test_revgnn_forward_inverse(num_groups):
x = torch.randn(4, 32)
edge_index = torch.tensor([[0, 1, 2, 3], [0, 0, 1, 1]])
lin = Linear(32, 32)
conv = SAGEConv(32 // num_groups, 32 // num_groups)
conv = GroupAddRev(conv, num_groups=num_groups)
assert str(conv) == (f'GroupAddRev(SAGEConv({32 // num_groups}, '
f'{32 // num_groups}, aggr=mean), '
f'num_groups={num_groups})')
h = lin(x)
h_o = h.clone().detach()
out = conv(h, edge_index)
assert h.storage().size() == 0
h_rev = conv.inverse(out, edge_index)
assert torch.allclose(h_o, h_rev, atol=0.001)
@pytest.mark.parametrize('num_groups', [2, 4, 8, 16])
def test_revgnn_backward(num_groups):
x = torch.randn(4, 32)
edge_index = torch.tensor([[0, 1, 2, 3], [0, 0, 1, 1]])
lin = Linear(32, 32)
conv = SAGEConv(32 // num_groups, 32 // num_groups)
conv = GroupAddRev(conv, num_groups=num_groups)
h = lin(x)
out = conv(h, edge_index)
target = out.mean()
target.backward()
@pytest.mark.parametrize('num_groups', [2, 4, 8, 16])
def test_revgnn_multi_backward(num_groups):
x = torch.randn(4, 32)
edge_index = torch.tensor([[0, 1, 2, 3], [0, 0, 1, 1]])
lin = Linear(32, 32)
conv = SAGEConv(32 // num_groups, 32 // num_groups)
conv = GroupAddRev(conv, num_groups=num_groups, num_bwd_passes=4)
h = lin(x)
out = conv(h, edge_index)
target = out.mean()
target.backward(retain_graph=True)
target.backward(retain_graph=True)
torch.autograd.grad(outputs=target, inputs=[h] + list(conv.parameters()),
retain_graph=True)
torch.autograd.grad(outputs=target, inputs=[h] + list(conv.parameters()))
@pytest.mark.parametrize('num_groups', [2, 4, 8, 16])
def test_revgnn_diable(num_groups):
x = torch.randn(4, 32)
edge_index = torch.tensor([[0, 1, 2, 3], [0, 0, 1, 1]])
lin = Linear(32, 32)
conv = SAGEConv(32 // num_groups, 32 // num_groups)
conv = GroupAddRev(conv, num_groups=num_groups, disable=True)
h = lin(x)
out = conv(h, edge_index)
target = out.mean()
target.backward()
# Memory will not be freed if disable:
assert h.storage().size() == 4 * 32
@pytest.mark.parametrize('num_groups', [2, 4, 8, 16])
def test_revgnn_with_args(num_groups):
x = torch.randn(4, 32)
edge_index = torch.tensor([[0, 1, 2, 3], [0, 0, 1, 1]])
edge_weight = torch.rand(4)
lin = Linear(32, 32)
conv = GraphConv(32 // num_groups, 32 // num_groups)
conv = GroupAddRev(conv, num_groups=num_groups)
h = lin(x)
out = conv(h, edge_index, edge_weight)
target = out.mean()
target.backward()
|
1605850
|
import click
from virl.api import VIRLServer
from subprocess import call
from virl import helpers
from virl.helpers import get_mgmt_lxc_ip, get_node_from_roster, get_cml_client, get_current_lab, safe_join_existing_lab, get_node_mgmt_ip
from virl2_client.exceptions import NodeNotFound
@click.command()
@click.argument("node", nargs=1)
def ssh(node):
"""
ssh to a node
"""
server = VIRLServer()
client = get_cml_client(server)
username = server.config.get("VIRL_SSH_USERNAME", "cisco")
current_lab = get_current_lab()
if current_lab:
lab = safe_join_existing_lab(current_lab, client)
if lab:
try:
node_obj = lab.get_node_by_label(node)
except NodeNotFound:
click.secho("Node {} was not found in lab {}".format(node, current_lab), fg="red")
exit(1)
if node_obj.is_active():
mgmtip = get_node_mgmt_ip(node_obj)
if mgmtip:
if "VIRL_SSH_COMMAND" in server.config:
cmd = server.config["VIRL_SSH_COMMAND"]
cmd = cmd.format(host=mgmtip, username=username)
print("Calling user specified command: {}".format(cmd))
exit(call(cmd.split()))
else:
click.secho("Attemping ssh connection to {} at {}".format(node_obj.label, mgmtip))
exit(call(["ssh", "{}@{}".format(username, mgmtip)]))
else:
click.secho("Node {} does not have an external management IP".format(node_obj.label))
else:
click.secho("Node {} is not active".format(node_obj.label), fg="yellow")
else:
click.secho("Unable to find lab {}".format(current_lab), fg="red")
exit(1)
else:
click.secho("No current lab set", fg="red")
exit(1)
@click.command()
@click.argument("node", nargs=-1)
def ssh1(node):
"""
ssh to a node
"""
if len(node) == 2:
# we received env and node name
env = node[0]
running = helpers.check_sim_running(env)
node = node[1]
elif len(node) == 1:
# assume default env
env = "default"
running = helpers.check_sim_running(env)
node = node[0]
else:
exit(call(["virl", "ssh", "--help"]))
if running:
sim_name = running
server = VIRLServer()
details = server.get_sim_roster(sim_name)
# default ssh username can be overriden
username = server.config.get("VIRL_SSH_USERNAME", "cisco")
if node:
try:
node_dict = get_node_from_roster(node, details)
node_name = node_dict.get("NodeName")
ip = node_dict["managementIP"]
proxy = node_dict.get("managementProxy")
if "VIRL_SSH_COMMAND" in server.config:
cmd = server.config["VIRL_SSH_COMMAND"]
cmd = cmd.format(host=ip, username=username)
print("Calling user specified command: {}".format(cmd))
exit(call(cmd.split()))
if proxy == "lxc":
lxc = get_mgmt_lxc_ip(details)
if lxc:
click.secho("Attemping ssh connection" "to {} at {} via {}".format(node_name, ip, lxc))
cmd = 'ssh -o "ProxyCommand ssh -W %h:%p {}@{}" {}@{}'
cmd = cmd.format(server.user, lxc, username, ip)
exit(call(cmd, shell=True))
else:
# handle the "flat" networking case
click.secho("Attemping ssh connection" "to {} at {}".format(node_name, ip))
exit(call(["ssh", "{}@{}".format(username, ip)]))
except AttributeError:
click.secho("Could not find management info" " for {}:{}".format(env, node), fg="red")
except KeyError:
click.secho("Unknown node {}:{}".format(env, node), fg="red")
else:
return details.json()
|
1605862
|
from yacs.config import CfgNode as CN
# -----------------------------------------------------------------------------
# Convention about Training / Test specific parameters
# -----------------------------------------------------------------------------
# Whenever an argument can be either used for training or for testing, the
# corresponding name will be post-fixed by a _TRAIN for a training parameter,
# or _TEST for a test-specific parameter.
# For example, the number of images during training will be
# IMAGES_PER_BATCH_TRAIN, while the number of images for testing will be
# IMAGES_PER_BATCH_TEST
_C = CN()
# -----------------------------------------------------------------------------
# MODEL
# -----------------------------------------------------------------------------
_C.MODEL = CN()
# Name of backbone
_C.MODEL.NAME = "resnet50"
# Size of embeddings from backbone
_C.MODEL.BACKBONE_EMB_SIZE = 2048
# Last stride of backbone
_C.MODEL.LAST_STRIDE = 1
# Use ImageNet pretrained model to initialize backbone or use 'self' trained
# model to initialize the whole model
# Options: True | False
_C.MODEL.PRETRAINED = True
# Path to weights to load
_C.MODEL.PRETRAIN_PATH = ""
# Create centroids
_C.MODEL.USE_CENTROIDS = False
# Ensures images to build centroids during retrieval
# do not come from the same camera as the query
_C.MODEL.KEEP_CAMID_CENTROIDS = True
# Set True if Pre-traing path points to previously trained/aborted model
_C.MODEL.RESUME_TRAINING = False
# -----------------------------------------------------------------------------
# INPUT
# -----------------------------------------------------------------------------
_C.INPUT = CN()
# Size of the image during training
_C.INPUT.SIZE_TRAIN = [256, 128]
# Size of the image during test
_C.INPUT.SIZE_TEST = [256, 128]
# Random probability for image horizontal flip
_C.INPUT.PROB = 0.5
# Random probability for random erasing
_C.INPUT.RE_PROB = 0.5
# Values to be used for image normalization
_C.INPUT.PIXEL_MEAN = [0.485, 0.456, 0.406]
# Values to be used for image normalization
_C.INPUT.PIXEL_STD = [0.229, 0.224, 0.225]
# Value of padding size
_C.INPUT.PADDING = 10
# -----------------------------------------------------------------------------
# DATASET
# -----------------------------------------------------------------------------
_C.DATASETS = CN()
# List of the dataset names for training, as present in paths_catalog.py
_C.DATASETS.NAMES = "market1501"
# Root directory where datasets should be used (and downloaded if not found)
_C.DATASETS.ROOT_DIR = "/home/data"
# Path to json train file for datasets that require it
_C.DATASETS.JSON_TRAIN_PATH = ""
# -----------------------------------------------------------------------------
# DATALOADER
# -----------------------------------------------------------------------------
_C.DATALOADER = CN()
# Number of data loading threads
_C.DATALOADER.NUM_WORKERS = 6
# Sampler for data loading
_C.DATALOADER.SAMPLER = "random_identity"
# Number of instance for one batch
_C.DATALOADER.NUM_INSTANCE = 4
# Whether to drop last not full batch
_C.DATALOADER.DROP_LAST = True
# Whether to use resampling in case when number of samples < DATALOADER.NUM_INSTANCE:
# True for Baseline. False for CTLModel
_C.DATALOADER.USE_RESAMPLING = True
# ---------------------------------------------------------------------------- #
# SOLVER
# ---------------------------------------------------------------------------- #
_C.SOLVER = CN()
# Name of optimizer
_C.SOLVER.OPTIMIZER_NAME = "Adam"
# Number of max epoches
_C.SOLVER.MAX_EPOCHS = 120
# Base learning rate
_C.SOLVER.BASE_LR = 1e-4
# Momentum
_C.SOLVER.MOMENTUM = 0.9
# Margin of triplet loss
_C.SOLVER.MARGIN = 0.5
# Function used to compute distance (euclidean or cosine for now)
_C.SOLVER.DISTANCE_FUNC = "euclidean"
# # Margin of cluster
_C.SOLVER.CLUSTER_MARGIN = 0.3
# # Learning rate of SGD to learn the centers of center loss
_C.SOLVER.CENTER_LR = 0.5
# # Balanced weight of center loss
_C.SOLVER.CENTER_LOSS_WEIGHT = 0.0005
# Settings of weight decay
_C.SOLVER.WEIGHT_DECAY = 0.0005
_C.SOLVER.WEIGHT_DECAY_BIAS = 0.0005
# name of LR scheduler
_C.SOLVER.LR_SCHEDULER_NAME = "multistep_lr"
# decay rate of learning rate
_C.SOLVER.GAMMA = 0.1
# decay step of learning rate
_C.SOLVER.LR_STEPS = (40, 70)
# warm up factor
_C.SOLVER.USE_WARMUP_LR = True
# epochs of warm up
_C.SOLVER.WARMUP_EPOCHS = 10
# Metric name for checkpointing best model
_C.SOLVER.MONITOR_METRIC_NAME = "mAP"
# Metric value mode used for checkpointing (max, min, auto)
_C.SOLVER.MONITOR_METRIC_MODE = "max"
# epoch number of saving checkpoints
_C.SOLVER.CHECKPOINT_PERIOD = 50
# epoch number of validation
_C.SOLVER.EVAL_PERIOD = 5
# Number of images per batch PER GPU
_C.SOLVER.IMS_PER_BATCH = 64
# 'dp', 'ddp', 'ddp2', 'ddp_spawn' - see pytorch lighning options
_C.SOLVER.DIST_BACKEND = "ddp"
# Losses weights
# Weight of classification loss on query vectors
_C.SOLVER.QUERY_XENT_WEIGHT = 1.0
# Weight of contrastive loss on query vectors
_C.SOLVER.QUERY_CONTRASTIVE_WEIGHT = 1.0
# Weight of contrastive loss on centroids-query vectors
_C.SOLVER.CENTROID_CONTRASTIVE_WEIGHT = 1.0
# Whether to use automatic Python Lightning optimization
_C.SOLVER.USE_AUTOMATIC_OPTIM = False
# ---------------------------------------------------------------------------- #
# TEST
# ---------------------------------------------------------------------------- #
_C.TEST = CN()
# Number of images per batch during test
_C.TEST.IMS_PER_BATCH = 128
# Path to trained model
_C.TEST.WEIGHT = ""
# # Whether feature is nomalized before test, if yes, it is equivalent to cosine distance
_C.TEST.FEAT_NORM = True
# Only run test
_C.TEST.ONLY_TEST = False
# If to visualize rank results
_C.TEST.VISUALIZE = "no"
# What top-k results to rank
_C.TEST.VISUALIZE_TOPK = 10
# Max number of query images plotted
_C.TEST.VISUALIZE_MAX_NUMBER = 1000000
# ---------------------------------------------------------------------------- #
# MISC
# ---------------------------------------------------------------------------- #
# Ids of GPU devices to use during training, especially when using ddp backend
_C.GPU_IDS = [0]
# Log root directory
_C.LOG_DIR = "logs"
# Whether to use mixed precision
_C.USE_MIXED_PRECISION = True
# If output dir is specified it overrides automatic output path creation
_C.OUTPUT_DIR = ""
# ---------------------------------------------------------------------------- #
# REPORDUCIBLE EXPERIMENTS
# ---------------------------------------------------------------------------- #
# Whether to seed everything
_C.REPRODUCIBLE = False
# Number of runs with seeded generators
_C.REPRODUCIBLE_NUM_RUNS = 3
# Seed to start with
_C.REPRODUCIBLE_SEED = 0
|
1605922
|
import tarfile
from metaflow import S3
with S3() as s3:
res = s3.get('s3://fast-ai-nlp/yelp_review_full_csv.tgz')
with tarfile.open(res.path) as tar:
datafile = tar.extractfile('yelp_review_full_csv/train.csv')
reviews = [line.decode('utf-8') for line in datafile]
print('\n'.join(reviews[:2]))
|
1605930
|
import argparse
import os
import re
import time
import numpy as np
from time import sleep
from datasets import audio
import tensorflow as tf
from hparams import hparams, hparams_debug_string
from infolog import log
from tacotron.synthesizer import Synthesizer
from tqdm import tqdm
def generate_fast(model, text):
model.synthesize(text, None, None, None, None)
def run_live(args, checkpoint_path, hparams):
#Log to Terminal without keeping any records in files
log(hparams_debug_string())
synth = Synthesizer()
synth.load(checkpoint_path, hparams)
synth.session_open()
#Generate fast greeting message
greetings = 'Hello, Welcome to the Live testing tool. Please type a message and I will try to read it!'
log(greetings)
generate_fast(synth, greetings)
#Interaction loop
while True:
try:
text = input()
generate_fast(synth, text)
except KeyboardInterrupt:
leave = 'Thank you for testing our features. see you soon.'
log(leave)
generate_fast(synth, leave)
sleep(2)
break
synth.session_close()
def run_eval(args, checkpoint_path, output_dir, hparams, sentences):
eval_dir = os.path.join(output_dir, 'eval')
log_dir = os.path.join(output_dir, 'logs-eval')
if args.model == 'Tacotron-2':
assert os.path.normpath(eval_dir) == os.path.normpath(args.mels_dir) #mels_dir = wavenet_input_dir
#Create output path if it doesn't exist
os.makedirs(eval_dir, exist_ok=True)
os.makedirs(log_dir, exist_ok=True)
os.makedirs(os.path.join(log_dir, 'wavs'), exist_ok=True)
os.makedirs(os.path.join(log_dir, 'plots'), exist_ok=True)
log(hparams_debug_string())
synth = Synthesizer()
synth.load(checkpoint_path, hparams)
synth.session_open()
sentences = list(map(lambda s: s.strip(), sentences))
delta_size = hparams.tacotron_synthesis_batch_size if hparams.tacotron_synthesis_batch_size < len(sentences) else len(sentences)
batch_sentences = [sentences[i: i+hparams.tacotron_synthesis_batch_size] for i in range(0, len(sentences), delta_size)]
start = time.time()
for i, batch in enumerate(tqdm(batch_sentences)):
mel_filename = os.path.join(eval_dir, f'{i:03d}.npy')
mel = synth.eval(batch, args.speaker_id)
np.save(mel_filename, mel.T, allow_pickle=False)
wav = audio.inv_mel_spectrogram(mel.T, hparams)
audio.save_wav(wav, os.path.join(eval_dir, f'{i:03d}.wav'), hparams)
end = time.time() - start
log(f'Generated total batch of {delta_size} in {end:.3f} sec')
synth.session_close()
def run_synthesis(args, checkpoint_path, output_dir, hparams):
GTA = (args.GTA == 'True')
if GTA:
synth_dir = os.path.join(output_dir, 'gta')
#Create output path if it doesn't exist
os.makedirs(synth_dir, exist_ok=True)
else:
synth_dir = os.path.join(output_dir, 'natural')
#Create output path if it doesn't exist
os.makedirs(synth_dir, exist_ok=True)
log(hparams_debug_string())
synth = Synthesizer()
synth.load(checkpoint_path, hparams, gta=GTA)
synth.session_open()
frame_shift_ms = hparams.hop_size / hparams.sample_rate
for speaker_id, anchor_dir in enumerate(hparams.anchor_dirs):
metadata_filename = os.path.join(args.input_dir, anchor_dir, 'train.txt')
with open(metadata_filename, encoding='utf-8') as f:
metadata = [line.strip().split('|') for line in f]
hours = sum([int(x[2]) for x in metadata]) * frame_shift_ms / 3600
log(f'Loaded {anchor_dir} for {len(metadata)} examples ({hours:.2f} hours)')
metadata = [metadata[i: i+hparams.tacotron_synthesis_batch_size] for i in range(0, len(metadata), hparams.tacotron_synthesis_batch_size)]
mel_dir = os.path.join(args.input_dir, anchor_dir, 'mels')
for meta in tqdm(metadata):
texts = [m[3] for m in meta]
mel_filenames = [os.path.join(mel_dir, m[0]) for m in meta]
basenames = [os.path.basename(m).replace('.npy', '').replace('mel-', '') for m in mel_filenames]
synth.synthesize(texts, basenames, synth_dir, None, mel_filenames, speaker_id)
log(f'synthesized mel spectrograms at {synth_dir}')
synth.session_close()
def tacotron_synthesize(args, hparams, checkpoint, sentences=None):
output_dir = 'tacotron_' + args.output_dir
try:
checkpoint_path = tf.train.get_checkpoint_state(checkpoint).model_checkpoint_path
log(f'loaded model at {checkpoint_path}')
except:
raise RuntimeError(f'Failed to load checkpoint at {checkpoint}')
if args.mode == 'eval':
run_eval(args, checkpoint_path, output_dir, hparams, sentences)
elif args.mode == 'synthesis':
run_synthesis(args, checkpoint_path, output_dir, hparams)
else:
run_live(args, checkpoint_path, hparams)
|
1605959
|
from distutils.core import setup, Extension
import os
import numpy
H2PACK_DIR = ".."
extra_cflags = ["-I"+H2PACK_DIR+"/include"]
extra_cflags += ["-g", "-std=gnu99", "-O3"]
extra_cflags += ["-DUSE_MKL", "-qopenmp", "-xHost", "-mkl"]
LIB = [H2PACK_DIR+"/lib/libH2Pack.a"]
extra_lflags = LIB + ["-g", "-O3", "-qopenmp", "-L${MKLROOT}/lib/intel64", "-mkl_rt", "-lpthread"]
def main():
setup(name="pyh2pack",
version="1.0.0",
description="Python interface for H2Pack",
author="<NAME>, <NAME>, and <NAME>",
author_email="<EMAIL>",
ext_modules=[Extension(
name = "pyh2pack",
sources = ["pyh2pack.c"],
include_dirs=[H2PACK_DIR+"/include", numpy.get_include()],
extra_compile_args = extra_cflags,
extra_link_args= extra_lflags,
)
]
)
if __name__ == "__main__":
main()
|
1605996
|
from typing import (
Union,
)
from .abc import (
BackendAPI,
PreImageAPI,
)
class Keccak256:
def __init__(self, backend: BackendAPI) -> None:
self._backend = backend
self.hasher = self._hasher_first_run
self.preimage = self._preimage_first_run
def _hasher_first_run(self, in_data: Union[bytearray, bytes]) -> bytes:
"""
Validate, on first-run, that the hasher backend is valid.
After first run, replace this with the new hasher method.
This is a bit of a hacky way to minimize overhead on hash calls after
this first one.
"""
new_hasher = self._backend.keccak256
assert new_hasher(b'') == b"\xc5\xd2F\x01\x86\xf7#<\x92~}\xb2\xdc\xc7\x03\xc0\xe5\x00\xb6S\xca\x82';\x7b\xfa\xd8\x04]\x85\xa4p" # noqa: E501
self.hasher = new_hasher
return new_hasher(in_data)
def _preimage_first_run(self, in_data: Union[bytearray, bytes]) -> PreImageAPI:
new_preimage = self._backend.preimage
self.preimage = new_preimage
return new_preimage(in_data)
def __call__(self, preimage: Union[bytearray, bytes]) -> bytes:
if not isinstance(preimage, (bytearray, bytes)):
raise TypeError(
"Can only compute the hash of `bytes` or `bytearray` values, not %r" % preimage
)
return self.hasher(preimage)
def new(self, preimage: Union[bytearray, bytes]) -> PreImageAPI:
if not isinstance(preimage, (bytearray, bytes)):
raise TypeError(
"Can only compute the hash of `bytes` or `bytearray` values, not %r" % preimage
)
return self.preimage(preimage)
|
1605999
|
import argparse
import logging
from enum import Enum
from stix2patterns_translator.parser import generate_query
from stix2patterns_translator import data_models
from stix2patterns_translator import search_platforms
logger = logging.getLogger(__name__)
class SearchPlatforms(Enum):
ELASTIC = 'elastic'
SPLUNK = 'splunk'
class DataModels(Enum):
CAR = 'car'
CIM = 'cim'
class InputToEnumAction(argparse.Action):
""" This is used to resolve user / string input into one of the types defined by the Enums above."""
def __init__(self, option_strings, dest, **kwargs):
self.value_to_enum_mapping = {}
for e in (SearchPlatforms, DataModels):
self.value_to_enum_mapping.update({c.value: c for c in e})
super(InputToEnumAction, self).__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, value, option_string=None):
# print('%r %r %r' % (namespace, value, option_string))
setattr(namespace, self.dest, self.value_to_enum_mapping[value])
STIX2SearchPlatforms = {SearchPlatforms.ELASTIC,SearchPlatforms.SPLUNK}
STIX2DataModels = {DataModels.CAR, DataModels.CIM}
def translate(pattern: str, search_platform: STIX2SearchPlatforms=SearchPlatforms.ELASTIC,
data_model: STIX2DataModels=DataModels.CAR) -> str:
logger.info("Converting STIX2 Pattern to {}-{}".format(data_model, search_platform))
query_object = generate_query(pattern)
if data_model == DataModels.CAR:
data_model_mapper = data_models.CarDataMapper()
elif data_model == DataModels.CIM:
data_model_mapper = data_models.CimDataMapper()
else:
raise NotImplementedError("{}".format(data_model))
if search_platform == SearchPlatforms.ELASTIC:
res = search_platforms.elastic_query_string.translate_pattern(query_object,
data_model_mapper)
elif search_platform == SearchPlatforms.SPLUNK:
res = search_platforms.splunk_search.translate_pattern(query_object,
data_model_mapper)
else:
raise NotImplementedError
return res
def main():
parser = argparse.ArgumentParser(description='<placeholder description>')
parser.add_argument("--output-language", help="language of translated query", choices=[s._value_ for s in SearchPlatforms], required=True, action=InputToEnumAction)
parser.add_argument("--output-data-model", help="Translate to this Data Model", choices=[d._value_ for d in DataModels], required=True, action=InputToEnumAction)
parser.add_argument("pattern", help="The Query or Pattern to be translated.")
args = parser.parse_args()
result = translate(args.pattern, args.output_language, args.output_data_model)
print(result)
exit()
if __name__ == '__main__':
main()
|
1606008
|
import pytest
from kiez.evaluate import hits
@pytest.mark.parametrize(
"nn_ind, gold, k, expected",
[
(
[[1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 6]],
{0: 2, 1: 4, 2: 3, 3: 4},
[1, 2, 3],
{1: 0.5, 2: 0.75, 3: 1.0},
),
(
[[1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 6]],
{0: 5, 1: 6, 2: 7, 3: 8},
None,
{1: 0.0, 5: 0.0, 10: 0.0},
),
(
{0: [1, 2, 3], 1: [2, 3, 4], 2: [3, 4, 5], 3: [4, 5, 6]},
{0: 2, 1: 4, 2: 3, 3: 4},
[1, 2, 3],
{1: 0.5, 2: 0.75, 3: 1.0},
),
(
{0: [1, 2, 3], 1: [2, 3, 4], 2: [3, 4, 5], 3: [4, 5, 6]},
{0: 5, 1: 6, 2: 7, 3: 8},
None,
{1: 0.0, 5: 0.0, 10: 0.0},
),
(
{
"0": ["1", "2", "3"],
"1": ["2", "3", "4"],
"2": ["3", "4", "5"],
"3": ["4", "5", "6"],
},
{"0": "2", "1": "4", "2": "3", "3": "4"},
[1, 2, 3],
{1: 0.5, 2: 0.75, 3: 1.0},
),
],
)
def test_hits(nn_ind, gold, k, expected):
res = hits(nn_ind, gold, k)
assert res == expected
|
1606033
|
import math
import torch
import torch.nn as nn
from src.model.nets.base_net import BaseNet
class EDSRNet(BaseNet):
"""The implementation of Enhanced Deep Residual Networks (ref: https://arxiv.org/pdf/1707.02921.pdf).
Args:
in_channels (int): The input channels.
out_channels (int): The output channels.
num_resblocks (int): The number of the resblocks.
num_features (int): The number of the internel feature maps.
upscale_factor (int): The upscale factor (2, 3 ,4 or 8).
res_scale (float): The residual scaling factor of the resblocks. Default: `0.1`.
"""
def __init__(self, in_channels, out_channels, num_resblocks, num_features, upscale_factor, res_scale=0.1):
super().__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.num_resblocks = num_resblocks
self.num_features = num_features
self.upscale_factor = upscale_factor
self.res_scale = res_scale
self.head = nn.Sequential(nn.Conv2d(self.in_channels, self.num_features, kernel_size=3, padding=1))
self.body = nn.Sequential(*[_ResBlock(self.num_features, self.res_scale) for _ in range(self.num_resblocks)])
self.body.add_module('conv', nn.Conv2d(self.num_features, self.num_features, kernel_size=3, padding=1))
self.tail = nn.Sequential(_UpBlock(self.num_features, self.upscale_factor))
self.tail.add_module('conv', nn.Conv2d(self.num_features, self.out_channels, kernel_size=3, padding=1))
def forward(self, input):
head = self.head(input)
body = self.body(head) + head
output = self.tail(body)
return output
class _ResBlock(nn.Module):
def __init__(self, num_features, res_scale):
super().__init__()
self.body = nn.Sequential()
self.body.add_module('conv1', nn.Conv2d(num_features, num_features, kernel_size=3, padding=1))
self.body.add_module('relu1', nn.ReLU())
self.body.add_module('conv2', nn.Conv2d(num_features, num_features, kernel_size=3, padding=1))
self.res_scale = res_scale
def forward(self, x):
res = self.body(x).mul(self.res_scale)
res += x
return res
class _UpBlock(nn.Sequential):
def __init__(self, num_features, upscale_factor):
super().__init__()
if (math.log(upscale_factor, 2) % 1) == 0:
for i in range(int(math.log(upscale_factor, 2))):
self.add_module(f'conv{i+1}', nn.Conv2d(num_features, 4 * num_features, kernel_size=3, padding=1))
self.add_module(f'deconv{i+1}', nn.PixelShuffle(2))
elif upscale_factor == 3:
self.add_module(f'conv1', nn.Conv2d(num_features, 9 * num_features, kernel_size=3, padding=1))
self.add_module(f'deconv1', nn.PixelShuffle(3))
else:
raise NotImplementedError
|
1606067
|
from django.views.generic import CreateView
from .forms import SampleModelForm
from .models import SampleModel
class SampleModelCreate(CreateView):
model = SampleModel
form_class = SampleModelForm
|
1606075
|
import argparse
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
import scipy as sp
import scipy.stats
import pyemma
from pyemma.util.contexts import settings
import MDAnalysis as mda
# My own functions
from pensa import *
def workflow_torsions_jsd(args, feat_a, feat_b, data_a, data_b, tors='bb'):
# Use only features that are common to both ensembles
if args.only_common_sctors and tors=='sc':
select_a, select_b = select_common_features(feat_a[tors+'-torsions'],
feat_b[tors+'-torsions'])
else:
select_a = np.arange(len(feat_a[tors+'-torsions']))
select_b = np.arange(len(feat_b[tors+'-torsions']))
# Relative Entropy analysis with BB torsions
relen = relative_entropy_analysis(list(np.array(feat_a[tors+'-torsions'])[select_a]),
list(np.array(feat_b[tors+'-torsions'])[select_b]),
data_a[tors+'-torsions'][:,select_a],
data_b[tors+'-torsions'][:,select_b],
bin_width=None, bin_num=10, verbose=False,
override_name_check=args.override_name_check)
names, jsd, kld_ab, kld_ba = relen
# Save all results (per feature) in CSV files
np.savetxt(args.out_results+'_'+tors+'-torsions_relative-entropy.csv', np.array(relen).T,
fmt='%s', delimiter=',', header='Name, JSD(A,B), KLD(A,B), KLD(B,A)')
# Save the Jensen-Shannon distance as "B factor" in a PDB file
vis = residue_visualization(names, jsd, args.ref_file_a,
args.out_plots+"_"+tors+"-torsions_jsd.pdf",
args.out_vispdb+"_"+tors+"-torsions_jsd.pdb",
y_label='max. JS dist. of '+tors.upper()+' torsions')
# Print the features with the highest values
print(tors.upper()+" torsions with the strongest deviations (JSD):")
sf = sort_features(names, jsd)
for f in sf[:args.print_num]: print(f[0], f[1])
return names, jsd
def workflow_torsions_kss(args, feat_a, feat_b, data_a, data_b, tors='bb'):
# Use only features that are common to both ensembles
if args.only_common_sctors and tors=='sc':
select_a, select_b = select_common_features(feat_a[tors+'-torsions'],
feat_b[tors+'-torsions'])
else:
select_a = np.arange(len(feat_a[tors+'-torsions']))
select_b = np.arange(len(feat_b[tors+'-torsions']))
# Kolmogorov-Smirnov analysis with BB torsions
ksana = kolmogorov_smirnov_analysis(list(np.array(feat_a[tors+'-torsions'])[select_a]),
list(np.array(feat_b[tors+'-torsions'])[select_b]),
data_a[tors+'-torsions'][:,select_a],
data_b[tors+'-torsions'][:,select_b],
verbose=False,
override_name_check=args.override_name_check)
names, kss, ksp = ksana
# Save all results (per feature) in CSV files
np.savetxt(args.out_results+'_'+tors+'-torsions_kolmogorov-smirnov.csv', np.array(ksana).T,
fmt='%s', delimiter=',', header='Name, KSS(A,B), p-value')
# Save the Kolmogorov-Smirnov statistic as "B factor" in a PDB file
vis = residue_visualization(names, kss, args.ref_file_a,
args.out_plots+"_"+tors+"-torsions_kss.pdf",
args.out_vispdb+"_"+tors+"-torsions_kss.pdb",
y_label='max. KS stat. of '+tors.upper()+' torsions')
# Print the features with the highest values
print(tors.upper()+" torsions with the strongest deviations (KSS):")
sf = sort_features(names, kss)
for f in sf[:args.print_num]: print(f[0], f[1])
return names, kss
def workflow_torsions_ssi(args, feat_a, feat_b, data_a, data_b, tors='bb'):
# Use only features that are common to both ensembles
if args.only_common_sctors and tors=='sc':
select_a, select_b = select_common_features(feat_a[tors+'-torsions'],
feat_b[tors+'-torsions'])
else:
select_a = np.arange(len(feat_a[tors+'-torsions']))
select_b = np.arange(len(feat_b[tors+'-torsions']))
# SSI analysis with BB torsions
ana = ssi_ensemble_analysis(feat_a, feat_b, data_a, data_b, torsions = tors,
verbose=False, override_name_check=args.override_name_check)
resnames, ssi = ana
# Save all results (per feature) in CSV files
np.savetxt(args.out_results+'_'+tors+'-torsions_state-specific-information.csv', np.array(ana).T,
fmt='%s', delimiter=',', header='Name, SSI(A,B)')
# Save the state-specific information as "B factor" in a PDB file
vis = residue_visualization(resnames, ssi, args.ref_file_a,
args.out_plots+"_"+tors+"-torsions_ssi.pdf",
args.out_vispdb+"_"+tors+"-torsions_ssi.pdb",
y_label='SSI of '+tors.upper()+' torsions')
# Print the features with the highest values
print(tors.upper()+" torsions with the strongest deviations (SSI):")
sf = sort_features(resnames, ssi)
for f in sf[:args.print_num]: print(f[0], f[1])
return resnames, ssi
# -------------#
# --- MAIN --- #
# -------------#
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument( "--ref_file_a", type=str, default='traj/rhodopsin_arrbound_receptor.gro')
parser.add_argument( "--trj_file_a", type=str, default='traj/rhodopsin_arrbound_receptor.xtc')
parser.add_argument( "--ref_file_b", type=str, default='traj/rhodopsin_gibound_receptor.gro')
parser.add_argument( "--trj_file_b", type=str, default='traj/rhodopsin_gibound_receptor.xtc')
parser.add_argument( "--out_plots", type=str, default='plots/rhodopsin_receptor' )
parser.add_argument( "--out_vispdb", type=str, default='vispdb/rhodopsin_receptor' )
parser.add_argument( "--out_results", type=str, default='results/rhodopsin_receptor' )
parser.add_argument( "--start_frame", type=int, default=0 )
parser.add_argument( "--print_num", type=int, default=12 )
parser.add_argument( "--override_name_check", dest="override_name_check", default=False, action="store_true")
parser.add_argument( "--only_common_sctors", dest="only_common_sctors", default=False, action="store_true")
args = parser.parse_args()
# -- FEATURES --
# Load Features
feat_a, data_a = get_structure_features(args.ref_file_a, args.trj_file_a, args.start_frame)
feat_b, data_b = get_structure_features(args.ref_file_b, args.trj_file_b, args.start_frame)
# Report dimensions
print('Feature dimensions from', args.trj_file_a)
for k in data_a.keys():
print(k, data_a[k].shape)
print('Feature dimensions from', args.trj_file_b)
for k in data_b.keys():
print(k, data_b[k].shape)
# -- TORSIONS -- #
print('BACKBONE TORSIONS')
names, jsd = workflow_torsions_jsd(args, feat_a, feat_b, data_a, data_b, tors='bb')
names, kss = workflow_torsions_kss(args, feat_a, feat_b, data_a, data_b, tors='bb')
names, ssi = workflow_torsions_ssi(args, feat_a, feat_b, data_a, data_b, tors='bb')
print('SIDECHAIN TORSIONS')
names, jsd = workflow_torsions_jsd(args, feat_a, feat_b, data_a, data_b, tors='sc')
names, kss = workflow_torsions_kss(args, feat_a, feat_b, data_a, data_b, tors='sc')
names, ssi = workflow_torsions_ssi(args, feat_a, feat_b, data_a, data_b, tors='sc')
# -- BACKBONE C-ALPHA DISTANCES --
print('BACKBONE C-ALPHA DISTANCES')
# Relative entropy analysis for C-alpha distances
relen = relative_entropy_analysis(feat_a['bb-distances'], feat_b['bb-distances'],
data_a['bb-distances'], data_b['bb-distances'],
bin_width=0.01, verbose=False)
names, jsd, kld_ab, kld_ba = relen
# Save all results (per feature) in a CSV file
np.savetxt(args.out_results+'_bb-distances_relative-entropy.csv', np.array(relen).T,
fmt='%s', delimiter=',', header='Name, JSD(A,B), KLD(A,B), KLD(B,A)')
# Print the features with the highest values
print("Backbone C-alpha distances with the strongest deviations:")
sf = sort_features(names, jsd)
for f in sf[:args.print_num]: print(f[0], f[1])
# Visualize the deviations in a matrix plot
matrix = distances_visualization(names, jsd,
args.out_plots+"_bb-distances-distributions_jsd.pdf",
vmin = 0.0, vmax = 1.0)
# Difference-of-the-mean analysis for C-alpha distances
meanda = mean_difference_analysis(feat_a['bb-distances'], feat_b['bb-distances'],
data_a['bb-distances'], data_b['bb-distances'],
verbose=False)
names, avg, diff = meanda
# Save all results (per feature) in a CSV file
np.savetxt(args.out_results+'_bb-distances_difference-of-mean.csv', np.array(meanda).T,
fmt='%s', delimiter=',', header='Name, average, difference')
# Sort the distances by their differences
print("Backbone C-alpha distances with the strongest differences of their mean value:")
sf = sort_features(names, diff)
for f in sf[:args.print_num]: print(f[0], f[1])
# Visualize the deviations in a matrix plot
matrix = distances_visualization(names, diff, args.out_plots+"_bb-diststances_difference-of-mean.pdf")
|
1606076
|
class Error(Exception):
"""Base class for exceptions in this module"""
pass
class InvalidTransactionError(Error):
pass
class InvalidConnectorError(Error):
pass
class InvalidCertificateError(Error):
pass
|
1606146
|
import math
from pyblox.math.facing import Facing
from pyblox.math.vector2 import Vector2
class Vector3:
def __init__(self, x=0, y=0, z=0):
self.x = x
self.y = y
self.z = z
def get_x(self):
return self.x
def get_y(self):
return self.y
def get_z(self):
return self.z
def get_floor_x(self) -> int:
return math.floor(self.x)
def get_floor_y(self) -> int:
return math.floor(self.y)
def get_floor_z(self) -> int:
return math.floor(self.z)
def get_chunk_x(self):
return self.get_floor_x() >> 4
def get_chunk_z(self):
return self.get_floor_z() >> 4
def get_right(self):
return self.x
def get_up(self):
return self.y
def get_forward(self):
return self.z
def get_south(self):
return self.x
def get_west(self):
return self.z
def add(self, x, y=None, z=None):
if isinstance(x, Vector3):
return Vector3(self.x + x.get_x(), self.y + x.get_y(), self.z + x.get_z())
elif x is not None and y is None and z is None:
return self.add(x, 0, 0)
elif x is not None and y is not None and z is None:
return self.add(x, y, 0)
else:
return Vector3(self.x + x, self.y + y, self.z + z)
def substract(self,x=0, y=0, z=0):
if isinstance(x, Vector3):
return self.add(-x.x, -x.y, -x.z)
else:
return self.add(-x, -y, -z)
def multiply(self, number: float):
return Vector3(self.x * number, self.y * number, self.z * number)
def divide(self, number: float):
return Vector3(self.x / number, self.y / number, self.z / number)
def ceil(self):
return Vector3(math.ceil(self.x), math.ceil(self.y), math.ceil(self.z))
def floor(self):
return Vector3(math.floor(self.x), math.floor(self.y), math.floor(self.z))
def round(self):
return Vector3(round(self.x), round(self.y), round(self.z))
def abs(self):
return Vector3(abs(self.x), abs(self.y), abs(self.z))
def get_side(self, side: int, step: int):
if side is Facing.DOWN:
return Vector3(self.x, self.y - step, self.z)
elif side is Facing.UP:
return Vector3(self.x, self.y + step, self.z)
elif side is Facing.NORTH:
return Vector3(self.x, self.y, self.z - step)
elif side is Facing.SOUTH:
return Vector3(self.x, self.y, self.z + step)
elif side is Facing.WEST:
return Vector3(self.x - step, self.y, self.z)
elif side is Facing.EAST:
return Vector3(self.x + step, self.y, self.z)
else:
return self
def up(self, step: int):
return self.get_side(Facing.UP, step)
def down(self, step: int):
return self.get_side(Facing.DOWN, step)
def north(self, step: int):
return self.get_side(Facing.NORTH, step)
def south(self, step: int):
return self.get_side(Facing.SOUTH, step)
def east(self, step: int):
return self.get_side(Facing.EAST, step)
def west(self, step: int):
return self.get_side(Facing.WEST, step)
def distance(self, pos):
return math.sqrt(self.distance_squared(pos))
def distance_squared(self, pos):
return math.pow(self.x - pos.x, 2) + math.pow(self.y - pos.y, 2) + math.pow(self.z - pos.z, 2)
def max_plain_distance(self, x=None, z=None):
if isinstance(x, Vector2):
return self.max_plain_distance(x.x, x.y)
elif isinstance(x, Vector3):
return self.max_plain_distance(x.x, x.z)
elif x is None and z is None:
return self.max_plain_distance(0, 0)
elif x is not None and x is None:
return self.max_plain_distance(x, 0)
else:
return max(abs(self.x - x), abs(self.z - z))
def length(self):
return math.sqrt(self.length_squared())
def length_squared(self):
return self.x * self.x + self.y * self.y + self.z * self.z
def normalize(self):
_len = self.length_squared()
if _len > 0:
return self.divide(math.sqrt(_len))
return Vector3(0, 0, 0)
def dot(self, v):
return self.x * v.x + self.y * v.y + self.z * v.z
def cross(self, v):
return Vector3(
self.y * v.z - self.z * v.y,
self.z * v.x - self.x * v.z,
self.x * v.y - self.y * v.x
)
def equals(self, v) -> bool:
return self.x is v.x and self.y is v.y and self.z is v.z
def get_intermediate_with_x_value(self, v, x: int):
xdiff = v.x - self.x
ydiff = v.y - self.y
zdiff = v.z - self.z
if xdiff * xdiff < 0.0000001:
return None
f = (x - self.x) / xdiff
if f < 0 or f > 1:
return None
else:
return Vector3(self.x + xdiff * f, self.y + ydiff * f, self.z + zdiff * f)
def get_intermediate_with_y_value(self, v, y: int):
xdiff = v.x - self.x
ydiff = v.y - self.y
zdiff = v.z - self.z
if ydiff * ydiff < 0.0000001:
return None
f = (y - self.y) / ydiff
if f < 0 or f > 1:
return None
else:
return Vector3(self.x + xdiff * f, self.y + ydiff * f, self.z + zdiff * f)
def get_intermediate_with_z_value(self, v, z: int):
xdiff = v.x - self.x
ydiff = v.y - self.y
zdiff = v.z - self.z
if zdiff * zdiff < 0.0000001:
return None
f = (z - self.z) / zdiff
if f < 0 or f > 1:
return None
else:
return Vector3(self.x + xdiff * f, self.y + ydiff * f, self.z + zdiff * f)
def set_components(self, x, y, z):
self.x = x
self.y = y
self.z = z
return self
|
1606154
|
from scene import *
import math
class MyScene (Scene):
def __init__(self):
Scene.__init__(self)
self.distance_old = 0.0
self.distance_new = 0.0
self.reset = True
self.zoom = 0
def draw (self):
background(0, 0, 0)
locations = [[0,0],[0,0]]
i = 0
if len(self.touches) == 2:
for touch in self.touches.values():
locations[i] = touch.location
i += 1
if self.reset:
self.reset = False
self.distance_old = math.sqrt(math.pow((locations[1][0] - locations[0][0]),2) + pow((locations[1][1] - locations[0][1]),2))
else:
self.distance_new = math.sqrt(math.pow((locations[1][0] - locations[0][0]),2) + pow((locations[1][1] - locations[0][1]),2))
self.zoom = self.distance_old - self.distance_new
#print self.zoom
def touch_ended(self,touch):
self.reset = True
run(MyScene())
|
1606222
|
from unittest import TestCase, skip
from pykeg.core import testutils
from . import client
import requests
import requests_mock
vcr = testutils.get_vcr("contrib/twitter")
FAKE_API_KEY = "<KEY>"
FAKE_API_SECRET = "<KEY>"
FAKE_REQUEST_TOKEN = "<KEY>"
FAKE_REQUEST_TOKEN_SECRET = "<KEY>"
FAKE_AUTH_URL = "https://api.twitter.com/oauth/authorize?oauth_token=<KEY>"
FAKE_CALLBACK_URL = "http://example.com/redirect?oauth_token=Y5DvLwAAAAAA1RVNAAABXOA_xQw&oauth_verifier=<KEY>"
FAKE_OAUTH_TOKEN = "<KEY>"
FAKE_OAUTH_TOKEN_SECRET = "<KEY>"
class TwitterClientTest(TestCase):
@vcr.use_cassette()
@skip("fixtures broken")
def test_fetch_request_token_with_invalid_keys(self):
c = client.TwitterClient("test", "test_secret")
with self.assertRaises(client.AuthError):
c.fetch_request_token("http://example.com")
@vcr.use_cassette()
def test_fetch_request_token_with_no_connection(self):
c = client.TwitterClient("test", "test_secret")
with self.assertRaises(client.RequestError):
with requests_mock.Mocker() as m:
m.post(
client.TwitterClient.REQUEST_TOKEN_URL, exc=requests.exceptions.ConnectTimeout
)
c.fetch_request_token("http://example.com")
@vcr.use_cassette()
@skip("fixtures broken")
def test_fetch_request_token_with_valid_keys(self):
c = client.TwitterClient(FAKE_API_KEY, FAKE_API_SECRET)
result = c.fetch_request_token("http://example.com/redirect")
request_token, request_token_secret = result
self.assertEqual(FAKE_REQUEST_TOKEN, request_token)
self.assertEqual(FAKE_REQUEST_TOKEN_SECRET, request_token_secret)
@vcr.use_cassette()
def test_get_authorization_url_with_valid_keys(self):
c = client.TwitterClient(FAKE_API_KEY, FAKE_API_SECRET)
result = c.get_authorization_url(FAKE_REQUEST_TOKEN, FAKE_REQUEST_TOKEN_SECRET)
self.assertEqual(FAKE_AUTH_URL, result)
@vcr.use_cassette()
@skip("fixtures broken")
def test_handle_authorization_callback(self):
c = client.TwitterClient(FAKE_API_KEY, FAKE_API_SECRET)
token, token_secret = c.handle_authorization_callback(
FAKE_REQUEST_TOKEN, FAKE_REQUEST_TOKEN_SECRET, uri=FAKE_CALLBACK_URL
)
self.assertEqual(FAKE_OAUTH_TOKEN, token)
self.assertEqual(FAKE_OAUTH_TOKEN_SECRET, token_secret)
@vcr.use_cassette()
def test_handle_authorization_callback_with_no_connection(self):
c = client.TwitterClient(FAKE_API_KEY, FAKE_API_SECRET)
with self.assertRaises(client.RequestError):
with requests_mock.Mocker() as m:
m.post(
client.TwitterClient.ACCESS_TOKEN_URL, exc=requests.exceptions.ConnectTimeout
)
c.handle_authorization_callback(
FAKE_REQUEST_TOKEN, FAKE_REQUEST_TOKEN_SECRET, uri=FAKE_CALLBACK_URL
)
|
1606229
|
import numpy as np
from scipy.linalg import solveh_banded
def als_baseline(intensities, asymmetry_param=0.05, smoothness_param=1e6,
max_iters=10, conv_thresh=1e-5, verbose=False):
'''Computes the asymmetric least squares baseline.
* http://www.science.uva.nl/~hboelens/publications/draftpub/Eilers_2005.pdf
smoothness_param: Relative importance of smoothness of the predicted response.
asymmetry_param (p): if y > z, w = p, otherwise w = 1-p.
Setting p=1 is effectively a hinge loss.
'''
smoother = WhittakerSmoother(intensities, smoothness_param, deriv_order=2)
# Rename p for concision.
p = asymmetry_param
# Initialize weights.
w = np.ones(intensities.shape[0])
for i in xrange(max_iters):
z = smoother.smooth(w)
mask = intensities > z
new_w = p*mask + (1-p)*(~mask)
conv = np.linalg.norm(new_w - w)
if verbose:
print i+1, conv
if conv < conv_thresh:
break
w = new_w
else:
print 'ALS did not converge in %d iterations' % max_iters
return z
class WhittakerSmoother(object):
def __init__(self, signal, smoothness_param, deriv_order=1):
self.y = signal
assert deriv_order > 0, 'deriv_order must be an int > 0'
# Compute the fixed derivative of identity (D).
d = np.zeros(deriv_order*2 + 1, dtype=int)
d[deriv_order] = 1
d = np.diff(d, n=deriv_order)
n = self.y.shape[0]
k = len(d)
s = float(smoothness_param)
# Here be dragons: essentially we're faking a big banded matrix D,
# doing s * D.T.dot(D) with it, then taking the upper triangular bands.
diag_sums = np.vstack([
np.pad(s*np.cumsum(d[-i:]*d[:i]), ((k-i,0),), 'constant')
for i in xrange(1, k+1)])
upper_bands = np.tile(diag_sums[:,-1:], n)
upper_bands[:,:k] = diag_sums
for i,ds in enumerate(diag_sums):
upper_bands[i,-i-1:] = ds[::-1][:i+1]
self.upper_bands = upper_bands
def smooth(self, w):
foo = self.upper_bands.copy()
foo[-1] += w # last row is the diagonal
return solveh_banded(foo, w * self.y, overwrite_ab=True, overwrite_b=True)
|
1606240
|
import unittest
from src import inetaccess
class ScanDataTest(unittest.TestCase):
def test_no_response(self):
expected_results = {
"result_score": "0.0",
"scan_data": [
{
"fqdn": "testing.notworking.com.co",
"domain": "notworking.com.co",
"ip": "0.0.0.0",
"cidr": "0.0.0.0/24",
"loc": "TESTING",
"http": "No Response",
"https": "No Response"
},
]
}
scan_results = inetaccess.scan_site('./tests/test_resources/scan_test.json')
self.assertEqual(scan_results, expected_results)
|
1606248
|
import hashlib
from django.conf import settings
from blocktools import *
from gcoin.transaction import serialize
MAGIC = MAGIC_NUMBER[settings.NET]
SKIP_LIMIT = 100
class BlockHeader:
def __init__(self, blockchain):
self.version = uint4(blockchain)
self.previousHash = hash32(blockchain)
self.merkleHash = hash32(blockchain)
self.time = uint4(blockchain)
self.bits = uint4(blockchain)
self.nonce = uint4(blockchain)
@property
def difficulty(self):
nShift = int((self.bits >> 24) & 0xFF)
dDiff = float(0x0000FFFF) / (self.bits & 0x00FFFFFF)
while nShift < 29:
dDiff *= 256
nShift += 1
while nShift > 29:
dDiff /= 256
nShift -= 1
return dDiff
@property
def blockHash(self):
headerHex = (intLE(self.version) + hashStrLE(self.previousHash) + hashStrLE(self.merkleHash)
+ uintLE(self.time) + uintLE(self.bits) + uintLE(self.nonce))
headerBin = headerHex.decode('hex')
hash_ = hashlib.sha256(hashlib.sha256(headerBin).digest()).digest()
return hash_[::-1].encode('hex_codec')
@property
def blockWork(self):
lastSixBits = self.bits & 0x00FFFFFF
firstTwoBits = (self.bits >> 24) & 0xFF
target = lastSixBits * 2**(8 * (firstTwoBits - 3))
return 2**256 / (target + 1)
def toString(self):
print "Version:\t %d" % self.version
print "Previous Hash\t %s" % hashStr(self.previousHash)
print "Merkle Root\t %s" % hashStr(self.merkleHash)
print "Time\t\t %d" % self.time
print "Bits\t\t %8x" % self.bits
print "Difficulty\t %.8f" % self.difficulty
print "Nonce\t\t %s" % self.nonce
print "Hash\t\t %s" % self.blockHash
print "Work\t\t %x" % self.blockWork
class Block:
def __init__(self, blockchain):
self.continueParsing = True
self.magicNum = 0
self.blocksize = 0
self.blockHeader = ''
self.txCount = 0
self.Txs = []
self.scriptSig = ''
# Skip bytes with all 0 between blocks
# Note: I assume bytes with all 0 between blocks will no more than SKIP_LIMIT
skip_bytes = 0
while self.hasLength(blockchain, 8) and skip_bytes < SKIP_LIMIT:
self.magicNum = uint4(blockchain)
if self.magicNum == MAGIC:
# this is normal situation
self.blocksize = uint4(blockchain)
break
elif self.magicNum == 0:
# skip 4 bytes
skip_bytes += 4
else:
# assume blk file is broken when magic number is not GC30 and 0
self.continueParsing = False
return
if skip_bytes >= SKIP_LIMIT:
self.continueParsing = False
return
if self.blocksize > 0 and self.hasLength(blockchain, self.blocksize):
self.setHeader(blockchain)
self.txCount = varint(blockchain)
self.Txs = []
for i in range(0, self.txCount):
tx = Tx(blockchain)
self.Txs.append(tx)
else:
self.continueParsing = False
def continueParsing(self):
return self.continueParsing
def getBlocksize(self):
return self.blocksize
def hasLength(self, blockchain, size):
curPos = blockchain.tell()
blockchain.seek(0, 2)
fileSize = blockchain.tell()
blockchain.seek(curPos)
tempBlockSize = fileSize - curPos
if tempBlockSize < size:
return False
return True
def setHeader(self, blockchain):
self.blockHeader = BlockHeader(blockchain)
def toString(self):
print ""
print "Magic No: \t%8x" % self.magicNum
print "Blocksize: \t", self.blocksize
print ""
print "#" * 10 + " Block Header " + "#" * 10
self.blockHeader.toString()
print
print "Script Sig: \t %s" % hashStr(self.scriptSig)
print "##### Tx Count: %d" % self.txCount
for t in self.Txs:
t.toString()
class Tx:
def __init__(self, blockchain):
txStart = blockchain.tell()
self.version = uint4(blockchain)
self.inCount = varint(blockchain)
self.inputs = []
self.witness_flag = 0
if self.inCount == 0:
self.witness_flag = varint(blockchain)
self.inCount = varint(blockchain)
for i in range(0, self.inCount):
input = txInput(blockchain)
self.inputs.append(input)
self.outCount = varint(blockchain)
self.outputs = []
if self.outCount > 0:
for i in range(0, self.outCount):
output = txOutput(blockchain)
self.outputs.append(output)
if not self.witness_flag == 0:
for i in range(0, self.inCount):
self.inputs[i].parse_witness(blockchain)
self.lockTime = uint4(blockchain)
self.size = blockchain.tell() - txStart
@property
def txHex(self):
return serialize(self.toDict())
@property
def txHash(self):
headerBin = self.txHex.decode('hex')
hash_ = hashlib.sha256(hashlib.sha256(headerBin).digest()).digest()
return hash_[::-1].encode('hex_codec')
@property
def txID(self):
headerBin = serialize(self.toDict(True)).decode('hex')
hash_ = hashlib.sha256(hashlib.sha256(headerBin).digest()).digest()
return hash_[::-1].encode('hex_codec')
def toString(self):
print ""
print "=" * 10 + " New Transaction " + "=" * 10
print "TX Hex:\t %s" % self.txHex
print "TX Hash:\t %s" % self.txHash
print "TX ID:\t %s" % self.txID
print "Tx Version:\t %d" % self.version
print "Inputs:\t\t %d" % self.inCount
for i in self.inputs:
i.toString()
print "Outputs:\t %d" % self.outCount
for o in self.outputs:
o.toString()
print "Lock Time:\t %d" % self.lockTime
print "TX Size:\t %d" % self.size
def toDict(self, flag_txid=False):
txDict = {
'locktime': self.lockTime, 'version': self.version, 'flag': 0 if flag_txid else self.witness_flag ,
'ins': [], 'outs': []
}
for txin in self.inputs:
txDict['ins'].append(txin.toDict())
for txout in self.outputs:
txDict['outs'].append(txout.toDict())
return txDict
class txInput:
def __init__(self, blockchain):
self.prevhash = hash32(blockchain)
self.txOutId = uint4(blockchain)
self.scriptLen = varint(blockchain)
self.scriptSig = blockchain.read(self.scriptLen)
self.seqNo = uint4(blockchain)
self.witnessCount = 0
self.witnesses = []
def toString(self):
print "--------------TX IN------------------------"
print "Tx Previous Hash:\t %s" % hashStr(self.prevhash)
print "Tx Out Index:\t %8x" % self.txOutId
print "Script Length:\t %d" % self.scriptLen
print "Script Sig:\t %s" % hashStr(self.scriptSig)
print "Sequence:\t %8x" % self.seqNo
print "Size of Witness stack:\t %8x" % self.witnessCount
for w in self.witnesses:
w.toString()
print "--------------------------------------------"
def toDict(self):
dict_ = {
'outpoint': {
'hash': hashStr(self.prevhash),
'index': self.txOutId
},
'script': hashStr(self.scriptSig),
'sequence': self.seqNo,
'witness': []
}
for witness in self.witnesses:
dict_['witness'].append(witness.toDict())
return dict_
def parse_witness(self, blockchain):
self.witnessCount = varint(blockchain)
for i in range(0, self.witnessCount):
witness = Witness(blockchain)
self.witnesses.append(witness)
class txOutput:
def __init__(self, blockchain):
self.value = uint8(blockchain)
self.scriptLen = varint(blockchain)
self.pubkey = blockchain.read(self.scriptLen)
@property
def address(self):
return addressFromScriptPubKey(hashStr(self.pubkey))
def toString(self):
print "--------------TX OUT------------------------"
print "Value:\t\t %d" % self.value
print "Script Len:\t %d" % self.scriptLen
print "Pubkey:\t\t %s" % hashStr(self.pubkey)
print "Addr:\t\t %s" % self.address
print "---------------------------------------------"
def toDict(self):
dict_ = {
'script': hashStr(self.pubkey),
'value': self.value
}
return dict_
class Witness:
def __init__(self, blockchain):
self.scriptLen = varint(blockchain)
self.scriptSig = blockchain.read(self.scriptLen)
def toString(self):
print "--------------WITNESS-----------------------"
print "Script Length:\t %d" % self.scriptLen
print "Script Sig:\t %s" % hashStr(self.scriptSig)
print "--------------------------------------------"
def toDict(self):
dict_ = {
'script': hashStr(self.scriptSig),
}
return dict_
|
1606258
|
from .linked_data_proof import LinkedDataProof
from .linked_data_signature import LinkedDataSignature
from .jws_linked_data_signature import JwsLinkedDataSignature
from .ed25519_signature_2018 import Ed25519Signature2018
from .bbs_bls_signature_2020 import BbsBlsSignature2020
from .bbs_bls_signature_proof_2020 import BbsBlsSignatureProof2020
__all__ = [
"LinkedDataProof",
"LinkedDataSignature",
"JwsLinkedDataSignature",
"Ed25519Signature2018",
"BbsBlsSignature2020",
"BbsBlsSignatureProof2020",
]
|
1606287
|
import abc
from carbonserver.api import schemas
class Runs(abc.ABC):
@abc.abstractmethod
def add_run(self, run: schemas.RunCreate):
raise NotImplementedError
|
1606341
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class FocalLoss_Ori(nn.Module):
"""
This is a implementation of Focal Loss with smooth label cross entropy supported which is proposed in
'Focal Loss for Dense Object Detection. (https://arxiv.org/abs/1708.02002)'
Focal_Loss= -1*alpha*(1-pt)*log(pt)
:param num_class:
:param alpha: (tensor) 3D or 4D the scalar factor for this criterion
:param gamma: (float,double) gamma > 0 reduces the relative loss for well-classified examples (p>0.5) putting more
focus on hard misclassified example
:param smooth: (float,double) smooth value when cross entropy
:param size_average: (bool, optional) By default, the losses are averaged over each loss element in the batch.
"""
def __init__(self, num_class, alpha=[0.25, 0.75], gamma=2, balance_index=-1, size_average=True):
super(FocalLoss_Ori, self).__init__()
self.num_class = num_class
self.alpha = alpha
self.gamma = gamma
self.size_average = size_average
self.eps = 1e-6
if isinstance(self.alpha, (list, tuple)):
assert len(self.alpha) == self.num_class
self.alpha = torch.Tensor(list(self.alpha))
elif isinstance(self.alpha, (float, int)):
assert 0 < self.alpha < 1.0, 'alpha should be in `(0,1)`)'
assert balance_index > -1
alpha = torch.ones((self.num_class))
alpha *= 1 - self.alpha
alpha[balance_index] = self.alpha
self.alpha = alpha
elif isinstance(self.alpha, torch.Tensor):
self.alpha = self.alpha
else:
raise TypeError('Not support alpha type, expect `int|float|list|tuple|torch.Tensor`')
def forward(self, logit, target):
""" logit: softmax scores (N, K+1)
target: integeral labels (N, 1) \in [0,...,K]
"""
if logit.dim() > 2:
# N,C,d1,d2 -> N,C,m (m=d1*d2*...)
logit = logit.view(logit.size(0), logit.size(1), -1)
logit = logit.transpose(1, 2).contiguous() # [N,C,d1*d2..] -> [N,d1*d2..,C]
logit = logit.view(-1, logit.size(-1)) # [N,d1*d2..,C]-> [N*d1*d2..,C]
target = target.view(-1, 1) # [N,d1,d2,...]->[N*d1*d2*...,1]
# -----------legacy way------------
# idx = target.cpu().long()
# one_hot_key = torch.FloatTensor(target.size(0), self.num_class).zero_()
# one_hot_key = one_hot_key.scatter_(1, idx, 1)
# if one_hot_key.device != logit.device:
# one_hot_key = one_hot_key.to(logit.device)
# pt = (one_hot_key * logit).sum(1) + epsilon
# ----------memory saving way--------
pt = logit.gather(1, target).view(-1) + self.eps # avoid apply
logpt = pt.log()
if self.alpha.device != logpt.device:
self.alpha = self.alpha.to(logpt.device)
alpha_class = self.alpha.gather(0, target.view(-1))
logpt = alpha_class * logpt
loss = -1 * torch.pow(torch.sub(1.0, pt), self.gamma) * logpt
if self.size_average:
loss = loss.mean()
else:
loss = loss.sum()
return loss
class EvidenceLoss(nn.Module):
def __init__(self, num_cls, cfg, size_average=False):
super(EvidenceLoss, self).__init__()
self.num_cls = num_cls
self.loss_type = cfg['loss_type']
self.evidence = cfg['evidence']
self.iou_aware = cfg['iou_aware'] if 'iou_aware' in cfg else False
self.with_ghm = cfg['with_ghm'] if 'with_ghm' in cfg else False
self.with_ibm = cfg['with_ibm'] if 'with_ibm' in cfg else False
self.eps = 1e-10
if self.with_ghm:
self.num_bins = cfg['num_bins']
self.momentum = cfg['momentum']
self.ghm_start = cfg['ghm_start'] if 'ghm_start' in cfg else 0
self.edges = [float(x) / self.num_bins for x in range(self.num_bins+1)]
self.edges[-1] += 1e-6
if self.momentum > 0:
self.acc_sum = [0.0 for _ in range(self.num_bins)]
if self.with_ibm:
self.ibm_start = cfg['ibm_start'] if 'ibm_start' in cfg else 0
self.coeff = cfg['ibm_coeff'] if 'ibm_coeff' in cfg else 10
self.epoch, self.total_epoch = 0, 25
self.size_average = size_average
def iou_calib(self, logits, ious, mean=False):
""" logit, shape=(N, K)
ious, shape=(N)
"""
ious[ious < 0] = 1e-3
pred_alpha = self.evidence_func(logits) + 1 # (alpha = e + 1)
uncertainty = self.num_cls / pred_alpha.sum(dim=-1) # (N,)
iou_reg = - ious * torch.log(1-uncertainty) - (1-ious) * torch.log(uncertainty)
iou_reg = torch.mean(iou_reg) if mean else torch.sum(iou_reg)
return iou_reg
def forward(self, logit, target):
""" logit, shape=(N, K+1)
target, shape=(N, 1)
"""
if logit.dim() > 2:
# N,C,d1,d2 -> N,C,m (m=d1*d2*...)
logit = logit.view(logit.size(0), logit.size(1), -1)
logit = logit.transpose(1, 2).contiguous() # [N,C,d1*d2..] -> [N,d1*d2..,C]
logit = logit.view(-1, logit.size(-1)) # [N,d1*d2..,C]-> [N*d1*d2..,C]
target = target.view(-1) # [N,d1,d2,...]->[N*d1*d2*...,]
out_dict = dict()
# one-hot embedding for the target
y = torch.eye(self.num_cls).to(logit.device, non_blocking=True)
y = y[target] # (N, K+1)
# get loss func
loss, func = self.get_loss_func()
# L_1 norm of feature
feat_norm = torch.sum(torch.abs(logit), 1).reshape(-1) if self.with_ibm else None
# compute losses
pred_alpha = self.evidence_func(logit) + 1 # (alpha = e + 1)
loss_out = loss(y, pred_alpha, func=func, target=target, feat_norm=feat_norm)
out_dict.update(loss_out)
# accumulate total loss
total_loss = 0
for k, v in loss_out.items():
if 'loss' in k:
total_loss += v
out_dict.update({'total_loss': total_loss})
return total_loss
def get_loss_func(self):
if self.loss_type == 'mse':
return self.mse_loss, None
elif self.loss_type == 'log':
return self.edl_loss, torch.log
elif self.loss_type == 'digamma':
return self.edl_loss, torch.digamma
else:
raise NotImplementedError
def evidence_func(self, logit):
if self.evidence == 'relu':
return F.relu(logit)
if self.evidence == 'exp':
return torch.exp(torch.clamp(logit, -10, 10))
if self.evidence == 'softplus':
return F.softplus(logit)
def mse_loss(self, y, alpha, func=None, target=None, feat_norm=None):
"""Used only for loss_type == 'mse'
y: the one-hot labels (batchsize, num_classes)
alpha: the predictions (batchsize, num_classes)
annealing_coef: dependent on training epoch
"""
losses = {}
# compute loss by considering the temporal penalty
loglikelihood_err, loglikelihood_var = self.loglikelihood_loss(y, alpha)
if self.size_average:
loglikelihood_err = torch.mean(loglikelihood_err)
loglikelihood_var = torch.mean(loglikelihood_var)
else:
loglikelihood_err = torch.sum(loglikelihood_err)
loglikelihood_var = torch.sum(loglikelihood_var)
losses.update({'cls_loss': loglikelihood_err, 'var_loss': loglikelihood_var})
return losses
def edl_loss(self, y, alpha, func=torch.log, target=None, feat_norm=None):
"""Used for both loss_type == 'log' and loss_type == 'digamma'
y: the one-hot labels (batchsize, num_classes)
alpha: the predictions (batchsize, num_classes)
annealing_coef: dependent on training epoch
func: function handler (torch.log, or torch.digamma)
"""
losses = {}
S = torch.sum(alpha, dim=1, keepdim=True) # (B, 1)
if self.with_ghm and self.epoch >= self.ghm_start:
alpha_pred = alpha.detach().clone() # (N, K)
uncertainty = self.num_cls / alpha_pred.sum(dim=-1, keepdim=True) # (N, 1)
# gradient length
grad_norm = torch.abs(1 / alpha_pred - uncertainty) * y # y_ij * (1/alpha_ij - u_i)
n = 0 # n valid bins
weights = torch.zeros_like(alpha)
for i in range(self.num_bins):
inds = (grad_norm >= self.edges[i]) & (grad_norm < self.edges[i+1])
num_in_bin = inds.sum().item()
if num_in_bin > 0:
if self.momentum > 0:
self.acc_sum[i] = self.momentum * self.acc_sum[i] \
+ (1 - self.momentum) * num_in_bin
weights[inds] = 1.0 / self.acc_sum[i]
else:
weights[inds] = 1.0 / num_in_bin
n += 1
if n > 0:
weights = weights / n
# compute the weighted EDL loss
cls_loss = torch.sum(y * weights * (func(S) - func(alpha)), dim=1)
elif self.with_ibm and self.epoch >= self.ibm_start:
alpha_pred = alpha.detach().clone() # (N, K)
uncertainty = self.num_cls / alpha_pred.sum(dim=-1, keepdim=True) # (N, 1)
grad_norm = torch.sum(torch.abs(1 / alpha_pred - uncertainty) * y, dim=1) # sum_j|y_ij * (1/alpha_ij - u_i)|, (N)
weights = 1.0 / (feat_norm * torch.exp(self.coeff * grad_norm) + self.eps) # influence-balanced weight
# compute the weighted EDL loss
cls_loss = weights * torch.sum(y * (func(S) - func(alpha)), dim=1)
else:
cls_loss = torch.sum(y * (func(S) - func(alpha)), dim=1)
if self.size_average:
cls_loss = torch.mean(cls_loss)
else:
cls_loss = torch.sum(cls_loss)
losses.update({'cls_loss': cls_loss})
return losses
def loglikelihood_loss(self, y, alpha):
S = torch.sum(alpha, dim=1, keepdim=True)
loglikelihood_err = torch.sum((y - (alpha / S)) ** 2, dim=1, keepdim=True)
loglikelihood_var = torch.sum(alpha * (S - alpha) / (S * S * (S + 1)), dim=1, keepdim=True)
return loglikelihood_err, loglikelihood_var
class ActionnessLoss(nn.Module):
def __init__(self, size_average=False, weight=0.1, margin=1.0):
super(ActionnessLoss, self).__init__()
self.size_average = size_average
self.weight = weight
self.margin = margin
def forward(self, logit, target):
""" logit, shape=(N, 1), unbounded logits
target, shape=(N, 1) bianry values
"""
if logit.dim() > 2:
# N,C,d1,d2 -> N,C,m (m=d1*d2*...)
logit = logit.view(logit.size(0), logit.size(1), -1)
logit = logit.transpose(1, 2).contiguous() # [N,C,d1*d2..] -> [N,d1*d2..,C]
logit = logit.view(-1, logit.size(-1)) # [N,d1*d2..,C]-> [N*d1*d2..,C]
label = target.view(-1) # [N,d1,d2,...]->[N*d1*d2*...,]
pred = logit.view(-1) if logit.size(-1) == 1 else logit
# split the predictions into positive and negative setss
pos_pred, pos_label = pred[label > 0], label[label > 0]
neg_pred, neg_label = pred[label == 0], label[label == 0]
num_pos = pos_pred.numel()
num_neg = neg_pred.numel()
topM = min(num_pos, num_neg) - 1 # reserve one for rank loss
if topM > 0: # both pos and neg sets have at least 2 samples
_, inds = neg_pred.sort() # by default, it is ascending sort
# select the top-M negatives
neg_clean_pred = neg_pred[inds[:topM]]
neg_clean_label = neg_label[inds[:topM]]
pred = torch.cat((pos_pred, neg_clean_pred), dim=0)
label = torch.cat((pos_label, neg_clean_label), dim=0)
num_neg = topM
# compute BCE loss
reduction = 'mean' if self.size_average else 'sum'
loss_bce = F.binary_cross_entropy_with_logits(pred, label, reduction=reduction)
# compute rank loss
loss_rank = 0
if topM > 0:
neg_noisy_pred, _ = torch.max(neg_pred, dim=0)
pos_clean_pred, _ = torch.max(pos_pred, dim=0)
loss_rank = torch.maximum(torch.tensor(0.0).to(pred.device), self.margin - neg_noisy_pred + pos_clean_pred.detach())
loss_total = loss_bce + self.weight * loss_rank
return loss_total, num_pos + num_neg
|
1606395
|
from rxbp.mixins.flowablemixin import FlowableMixin
from rxbp.observables.tolistobservable import ToListObservable
from rxbp.subscriber import Subscriber
from rxbp.subscription import Subscription
class ToListFlowable(FlowableMixin):
def __init__(self, source: FlowableMixin):
super().__init__()
self._source = source
def unsafe_subscribe(self, subscriber: Subscriber) -> Subscription:
subscription = self._source.unsafe_subscribe(subscriber=subscriber)
observable = ToListObservable(source=subscription.observable)
return subscription.copy(observable=observable)
|
1606420
|
from sys import stdin
from collections import deque
INFINITE = 999999999
def read_scenario():
npapers, nauthors = tuple(map(int, stdin.readline().split()))
papers = [stdin.readline() for _ in range(npapers)]
authors = [stdin.readline().rstrip() for _ in range(nauthors)]
return papers, authors
def extract_paper_authors(paper):
tokens = paper.replace(':', ',').split(', ')
nauthors = (len(tokens)-1)//2
authors = []
for i in range(nauthors):
last_name = tokens[i*2]
initials = tokens[i*2+1]
authors.append(last_name+', '+initials.rstrip())
return authors
def build_author_ref(papers):
# Build table containing all the authors found in the papers
# and assign an unique id to each.
author_ref = {'<NAME>.': 0} # Assume HE is in some papaer
next_id = 1
for paper in papers:
for author in paper:
if author not in author_ref:
author_ref[author] = next_id
next_id += 1
return author_ref
def find_erdos_bfs(papers, authors):
paper_authors = [extract_paper_authors(paper) for paper in papers]
author_ref = build_author_ref(paper_authors)
# Create paper_authors/author_paper tables using author references
paper_authors = [[author_ref[author] for author in paper] for paper in paper_authors]
author_papers = [[] for _ in author_ref.keys()]
for num, paper in enumerate(paper_authors):
for author in paper:
author_papers[author].append(num)
# Initial erdos for each author and paper
author_erdos = {ref: INFINITE for author, ref in author_ref.items()}
paper_erdos = [INFINITE for paper_id in range(len(papers))]
author_erdos[0]=0 # HE was the beginning
# Use BFS to find erdos numbers starting by ERDOS HIMSELF
queue = deque([0])
while queue:
author = queue.popleft()
for paper in author_papers[author]:
if author_erdos[author] >= paper_erdos[paper]:
continue
paper_erdos[paper] = author_erdos[author]
for co_author in paper_authors[paper]:
if author_erdos[co_author] == INFINITE:
author_erdos[co_author] = author_erdos[author]+1
queue.append(co_author)
# Obtain requested authors erdos number
erdos = []
for author in authors:
if author not in author_ref:
erdos.append(INFINITE)
else:
erdos.append(author_erdos[author_ref[author]])
return erdos
if __name__ == '__main__':
nscenarios = int(stdin.readline())
for s in range(nscenarios):
papers, authors = read_scenario()
erdos = find_erdos_bfs(papers, authors)
print("Scenario {}".format(s+1))
for author, erd in zip(authors, erdos):
if erd == INFINITE:
print(author, 'infinity')
else:
print(author, erd)
|
1606455
|
import requests, re, os, time
from bs4 import BeautifulSoup
import http.cookiejar
def search(keyword, headers, cookies=".cookies\\ssd.txt"):
if not os.path.exists(cookies):
return False
re_subname = re.match(r'(.+?)\.(mkv|mp4|ts|avi)', keyword) #去除副檔名
key1 = key2 = re_subname.group(1) if re_subname else keyword
re_brackets = re.search(r'\[(.+?)(\(.+\))?\].+(\d{4})', key2) #去除中括弧
key2 = "{} {}".format(re_brackets.group(1), re_brackets.group(3)) if re_brackets else key2
ssd_movie = re.search(r'(.+?)\d{0,2}(\(.+\))?\.(\d{4})(\..+)?.?£.+', key2) #SSD-Movie
key2 = "{} {} CMCT".format(ssd_movie.group(1),ssd_movie.group(3)) if ssd_movie else key2
ssd_tv = re.search(r'(.+)\.全\d+集|话\.(\d{4})\..+£.+', key1) #SSD-TV
key2 = <KEY>".format(ssd_tv.group(1)) if ssd_tv else key2
ssd_version = re.search(r'(.+)( |\.)(.+版)', key2) #SSD-Version
key2 = key2.replace(ssd_version.group(3),"") if ssd_version else key2
key2 = key2.replace("!"," ").replace("!"," ").replace("-"," ").replace("\'"," ")
url="https://springsunday.net/torrents.php?search="+key2
cookies = http.cookiejar.MozillaCookieJar(cookies)
cookies.load()
response=requests.get(url,headers=headers,cookies=cookies)
response.encoding = 'UTF-8'
print(response.status_code) if response.status_code != 200 else print("",end="")
soup = BeautifulSoup(response.text, 'lxml')
results = soup.find_all("table",{"class":"torrentname"})
reslinks = ["https://springsunday.net/"+result.find("a").get("href") for result in results] #取得搜尋結果鏈接
for reslink in reslinks:
res=requests.get(reslink,headers=headers,cookies=cookies)
res.encoding = 'UTF-8'
print(res.status_code) if res.status_code != 200 else print("",end="")
soup = BeautifulSoup(res.text, 'lxml')
title = soup.find("a",{"class":"index"}).getText().replace(".torrent","").replace("[SSD].","")
if title == keyword:
imdb_search = re.search(r"(http|https)://(www|us)\.imdb\.com/title/(tt\d+)",res.text)
imdb_search2 = re.search(r'tt\d{6,}',res.text)
db_search = re.search(r"https:\/\/(movie\.|www\.)?douban\.com\/(subject|movie)\/(\d+)",res.text)
dblink = db_search.group() if db_search else ""
imdbid = imdb_search.group(3) if imdb_search else ""
imdbid = imdb_search2.group() if not imdbid and imdb_search2 else imdbid
if dblink or imdbid:
return {'douban':dblink,'imdb':imdbid}
time.sleep(0.5)
print(url) if len(reslinks) == 0 else print("",end="") #無結果時顯示搜尋關鍵字,搜尋邏輯優化用
return False
if __name__ == '__main__':
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36'}
x = search("[1922].1922.2017.1080p.NF.WEB-DL.DDP.5.1.x264-CMCTV.mkv", headers)
print(x)
|
1606501
|
from .base import Brain
from .brain_nn import BrainNN
from .rlpower_splines import BrainRLPowerSplines
from .bo_cpg import BrainCPGBO
|
1606527
|
from __future__ import unicode_literals
from django.conf.urls import *
from .views import datatable_manager
app_name = 'django_datatables'
urlpatterns = [
url(r'^data/$', datatable_manager, name="datatable_manager")
]
|
1606569
|
import os
import yaml
import click
import shutil
import subprocess
import venv as ve
lab_project = ['experiments', 'data', 'logs', 'notebooks', 'config']
# Project
def check_minio_config(minio_tag):
"""Check that minio configuration exists"""
home_dir = os.path.expanduser('~')
lab_dir = os.path.join(home_dir, '.lab')
try:
with open(os.path.join(lab_dir, 'config.yaml'), 'r') as file:
yaml.load(file)[minio_tag]
except Exception as e:
print(str(e))
click.secho('Invalid global minio connection tag.', fg='red')
raise click.Abort()
def is_venv(home_dir):
"""Check that virtual environment exists"""
if not os.path.exists(os.path.join(home_dir, '.venv')):
click.secho('Virtual environment not found. '
'Creating one for this project',
fg='blue')
create_venv(home_dir)
def is_empty_project():
"""Check if there are any experiments in the project"""
experiments = next(os.walk('experiments'))[1]
if len(experiments) == 0:
click.secho("It looks like you've started a brand new project. "
'Run your first experiment to generate a list of metrics.',
fg='blue')
raise click.Abort()
def is_lab_project():
"""Check if the current directory is a lab project"""
_exists = [f for f in lab_project if os.path.exists(f)]
if len(_exists) != len(lab_project):
click.secho('This directory does not appear to be a valid '
'Lab Project.\nRun <lab init> to create one.',
fg='red')
raise click.Abort()
def create_venv(project_name):
"""Create a lab virtual environment"""
# Create a virtual environment
venv_dir = os.path.join(project_name, '.venv')
try:
environment = ve.EnvBuilder(system_site_packages=False,
symlinks=True, with_pip=True)
environment.create(venv_dir)
subprocess.call([venv_dir + '/bin/pip', 'install',
'--upgrade', 'pip'])
subprocess.call([venv_dir + '/bin/pip',
'install', '--upgrade', 'lab-ml'])
subprocess.call([venv_dir + '/bin/pip', 'install',
'-r', 'requirements.txt'])
except Exception as e:
shutil.rmtree(venv_dir)
click.secho('Something went wrong during .venv creation.',
fg='red')
print(str(e))
raise click.Abort()
|
1606606
|
import pytest
import sqlalchemy as sa
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy_utils import get_hybrid_properties
@pytest.fixture
def Category(Base):
class Category(Base):
__tablename__ = 'category'
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.Unicode(255))
@hybrid_property
def lowercase_name(self):
return self.name.lower()
@lowercase_name.expression
def lowercase_name(cls):
return sa.func.lower(cls.name)
return Category
class TestGetHybridProperties(object):
def test_declarative_model(self, Category):
assert (
list(get_hybrid_properties(Category).keys()) ==
['lowercase_name']
)
def test_mapper(self, Category):
assert (
list(get_hybrid_properties(sa.inspect(Category)).keys()) ==
['lowercase_name']
)
def test_aliased_class(self, Category):
props = get_hybrid_properties(sa.orm.aliased(Category))
assert list(props.keys()) == ['lowercase_name']
|
1606615
|
from o3seespy.base_model import OpenSeesObject
class UniaxialMaterialBase(OpenSeesObject):
op_base_type = "uniaxialMaterial"
op_type = None
def set_parameter(self, osi, pstr, value, ele, eles):
from o3seespy import set_parameter
if ele is not None:
set_parameter(osi, value=value, eles=[ele], args=[pstr, 1])
if eles is not None:
set_parameter(osi, value=value, eles=eles, args=[pstr, 1])
def build(self, osi):
self.osi = osi
osi.n_mat += 1
self._tag = osi.n_mat
self.to_process(osi)
self.built = 1
|
1606669
|
from tensorflow import keras
from tensorflow.keras import backend as K
from ..function_approximator import FunctionApproximator
__all__ = (
'ConnectFourFunctionApproximator',
)
class ConnectFourFunctionApproximator(FunctionApproximator):
"""
A :term:`function approximator` specifically designed for the
:class:`ConnectFour <keras_gym.envs.ConnectFourEnv>` environment.
Parameters
----------
env : environment
An Atari 2600 gym environment.
optimizer : keras.optimizers.Optimizer, optional
If left unspecified (``optimizer=None``), the function approximator's
DEFAULT_OPTIMIZER is used. See `keras documentation
<https://keras.io/optimizers/>`_ for more details.
**optimizer_kwargs : keyword arguments
Keyword arguments for the optimizer. This is useful when you want to
use the default optimizer with a different setting, e.g. changing the
learning rate.
"""
def body(self, S):
def extract_state(S):
return K.cast(S[:, 1:, :, :], 'float')
def extract_available_actions_mask(S):
return K.cast(S[:, 0, :, 0], 'bool')
# extract the mask over available actions from the state observation
self.available_actions_mask = keras.layers.Lambda(
extract_available_actions_mask,
name='extract_available_actions_mask')(S)
layers = [
keras.layers.Lambda(extract_state, name='extract_state'),
keras.layers.Conv2D(
name='conv1', filters=20, kernel_size=4, strides=1,
activation='relu'),
keras.layers.Conv2D(
name='conv2', filters=40, kernel_size=2, strides=1,
activation='relu'),
keras.layers.Flatten(name='flatten'),
keras.layers.Dense(
name='dense1', units=64, activation='linear'),
]
# forward pass
X = S
for layer in layers:
X = layer(X)
return X
|
1606697
|
from django.apps import AppConfig
from orchestra.core import services
class MailboxesConfig(AppConfig):
name = 'orchestra.contrib.mailboxes'
verbose_name = 'Mailboxes'
def ready(self):
from .models import Mailbox, Address
services.register(Mailbox, icon='email.png')
services.register(Address, icon='X-office-address-book.png')
from . import signals
|
1606734
|
from app.core.result import Result
def get_success_result() -> Result:
result = Result()
result.set_success()
return result
def get_error_result() -> Result:
result = Result()
result.error('some error')
return result
def get_failed_result() -> Result:
return Result()
|
1606786
|
import cv2, pyrebase, socket,struct
import numpy as np
from tracker import CentroidTracker
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(33,GPIO.IN)
GPIO.setup(34,GPIO.IN)
GPIO.setup(36,GPIO.OUT)
GPIO.setup(37,GPIO.OUT)
RollOut = GPIO.PWM(36,300)
PitchOut = GPIO.PWM(37,300)
RollOut.start(50)
PitchOut.start(50)
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name)
print('HOST IP:', host_ip)
port = 9999
socket_address = (host_ip, port)
server_socket.bind(socket_address)
server_socket.listen(5)
print("LISTENING AT:", socket_address)
#=============== Video Streamer ===============#
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host_ip = '192.168.137.33' #RPi ip
port = 9000
client_socket.connect((host_ip, port))
data = b""
payload_size = struct.calcsize("Q")
#==============================================#
tracker = CentroidTracker(maxDisappeared=80)
config = {
############ Firebase details
}
firebase = pyrebase.initialize_app(config)
db = firebase.database()
# ========= Variables =========#
ROI_dif = 100
counter = 0
Move_Dir = "null"
FrameCentre = 640
PreArea = 0
# =============================#
# ===== Getting Video =====#
cap = cv2.VideoCapture("rtsp://19172.16.58.3:8554")
# cap = cv2.VideoCapture(0)
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('TrackAI.avi',fourcc, 3.2, (640,480), True)
# =========================#
# =========== Obj_Detection_Config ===========#
configPath = 'ssd_mobilenet_v3_large_coco_2020_01_14.pbtxt'
weightsPath = 'frozen_inference_graph.pb'
net = cv2.dnn_DetectionModel(weightsPath, configPath)
net.setInputSize(320, 320)
net.setInputScale(1.0 / 127.5)
net.setInputMean((127.5, 127.5, 127.5))
net.setInputSwapRB(True)
# ============================================#
RpiSocket, addr = server_socket.accept()
print("Got connection from", addr)
while True:
#################################
RollIN = 50 # note:
PitchIN = 50 # note:
#NOTE: The method I use to read PWM signals from the receiver is not even close to accurate. So, please suggest a better way.
# RollIN reads PWM on pin 33
# PitchIN reads PWM on pin 34
#################################
try:
trackstatus = db.child("checkpoint").get().val()
if (trackstatus == "track_proceed"):
if (counter == 0):
Detected_ClassId = 0
RoiX1 = int(db.child("coordinates").child("Xa1").get().val()
RoiY1 = int(db.child("coordinates").child("Ya1").get().val()
RoiX2 = int(db.child("coordinates").child("Xa2").get().val()
RoiY2 = int(db.child("coordinates").child("Ya2").get().val()
PreArea = abs((RoiX1-RoiX2)*(RoiY1-RoiY2))
# ================ ROI & ObjD Initially =================#
rects = []
success, img = cap.read()
classIds, confs, bbox = net.detect(img, confThreshold=0.4)
bbox = list(bbox)
confs = list(np.array(confs).reshape(1, -1)[0])
confs = list(map(float, confs))
indicies = cv2.dnn.NMSBoxes(bbox, confs, 0.2, 0.1)
if len(classIds) != 0:
i = indicies[0][0]
box = bbox[i]
rects.append(box)
object = tracker.update(rects)
X = object[1][0][0]
Y = object[1][0][1]
W = object[1][0][2]
H = object[1][0][3]
X2 = X + W)
Y2 =(Y + H)
if abs(RoiX1 - X) <= ROI_dif and abs(RoiY1 - Y) <= ROI_dif abs(RoiX2 - X2) <= ROI_dif and abs(RoiY2 - Y2) <= ROI_dif :
Detected_ClassId = classIds[0][0]
# =========================================================#
counter = (counter + 1)
elif (counter == 1):
rects = []
success, img = cap.read()
classIds, confs, bbox = net.detect(img, confThreshold=0.4)
if len(classIds) != 0:
if classIds[0][0] == Detected_ClassId:
bbox = list(bbox)
confs = list(np.array(confs).reshape(1, -1)[0])
confs = list(map(float, confs))
indicies = cv2.dnn.NMSBoxes(bbox, confs, 0.2, 0.1)
for i in indicies:
i = i[0]
box = bbox[i]
rects.append(box)
object = tracker.update(rects)
X = object[1][0][0]
Y = object[1][0][1]
W = object[1][0][2]
H = object[1][0][3]
PostArea = (W*H)
#print("ClassID:-", classIds[0][0])
cv2.rectangle(img, (X, Y), (X + W, Y + H), (0, 255, 0), 2)
cX = (X + (X + W)) / 2
# extreme diff = 310
# minimum diff = 150
# RIGHT
if((cX - FrameCentre) >= 150 and (cX - FrameCentre) < 310):
Move_Dir = "right"
RollOut.ChangeDutyCycle(58)
elif((cX - FrameCentre) >= 310):
Move_Dir = "Right"
RollOut.ChangeDutyCycle(32)
# LEFT
elif ((FrameCentre - cX) >= 150 and (FrameCentre - cX) < 310):
Move_Dir = "left"
RollOut.ChangeDutyCycle(42)
elif ((FrameCentre - cX) >= 310):
Move_Dir = "Left"
RollOut.ChangeDutyCycle(68)
else:
RollOut.ChangeDutyCycle(RollIN)
#######################################################################
if(PreArea - PostArea >= 50):
PitchOut.ChangeDutyCycle(58)
elif(PostArea - PreArea >= 50):
PitchOut.ChangeDutyCycle(42)
else:
PitchOut.ChangeDutyCycle(PitchIN)
elif (trackstatus == "track_abort"):
success, img = cap.read()
counter = 0
# out.write(img)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
except:
pass
cap.release()
# out.release() # save whole video on raspberry pi storage
cv2.destroyAllWindows()
|
1606804
|
import numpy as np
import pytest
import math
from sklearn.base import clone
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestRegressor
import doubleml as dml
from ._utils import draw_smpls
from ._utils_irm_manual import fit_irm, boot_irm, tune_nuisance_irm
@pytest.fixture(scope='module',
params=[RandomForestRegressor()])
def learner_g(request):
return request.param
@pytest.fixture(scope='module',
params=[LogisticRegression()])
def learner_m(request):
return request.param
@pytest.fixture(scope='module',
params=['ATE', 'ATTE'])
def score(request):
return request.param
@pytest.fixture(scope='module',
params=['dml2'])
def dml_procedure(request):
return request.param
@pytest.fixture(scope='module',
params=[True, False])
def tune_on_folds(request):
return request.param
def get_par_grid(learner):
if learner.__class__ in [RandomForestRegressor]:
par_grid = {'n_estimators': [5, 10, 20]}
else:
assert learner.__class__ in [LogisticRegression]
par_grid = {'C': np.logspace(-4, 2, 10)}
return par_grid
@pytest.fixture(scope='module')
def dml_irm_fixture(generate_data_irm, learner_g, learner_m, score, dml_procedure, tune_on_folds):
par_grid = {'ml_g': get_par_grid(learner_g),
'ml_m': get_par_grid(learner_m)}
n_folds_tune = 4
boot_methods = ['normal']
n_folds = 2
n_rep_boot = 499
# collect data
(x, y, d) = generate_data_irm
# Set machine learning methods for m & g
ml_g = clone(learner_g)
ml_m = clone(learner_m)
np.random.seed(3141)
obj_dml_data = dml.DoubleMLData.from_arrays(x, y, d)
dml_irm_obj = dml.DoubleMLIRM(obj_dml_data,
ml_g, ml_m,
n_folds,
score=score,
dml_procedure=dml_procedure)
# tune hyperparameters
_ = dml_irm_obj.tune(par_grid, tune_on_folds=tune_on_folds, n_folds_tune=n_folds_tune)
dml_irm_obj.fit()
np.random.seed(3141)
n_obs = len(y)
all_smpls = draw_smpls(n_obs, n_folds)
smpls = all_smpls[0]
if tune_on_folds:
g0_params, g1_params, m_params = tune_nuisance_irm(y, x, d,
clone(learner_g), clone(learner_m), smpls, score,
n_folds_tune,
par_grid['ml_g'], par_grid['ml_m'])
else:
xx = [(np.arange(len(y)), np.array([]))]
g0_params, g1_params, m_params = tune_nuisance_irm(y, x, d,
clone(learner_g), clone(learner_m), xx, score,
n_folds_tune,
par_grid['ml_g'], par_grid['ml_m'])
g0_params = g0_params * n_folds
m_params = m_params * n_folds
if score == 'ATE':
g1_params = g1_params * n_folds
else:
assert score == 'ATTE'
g1_params = None
res_manual = fit_irm(y, x, d, clone(learner_g), clone(learner_m),
all_smpls, dml_procedure, score,
g0_params=g0_params, g1_params=g1_params, m_params=m_params)
res_dict = {'coef': dml_irm_obj.coef,
'coef_manual': res_manual['theta'],
'se': dml_irm_obj.se,
'se_manual': res_manual['se'],
'boot_methods': boot_methods}
for bootstrap in boot_methods:
np.random.seed(3141)
boot_theta, boot_t_stat = boot_irm(y, d, res_manual['thetas'], res_manual['ses'],
res_manual['all_g_hat0'], res_manual['all_g_hat1'],
res_manual['all_m_hat'], res_manual['all_p_hat'],
all_smpls, score, bootstrap, n_rep_boot)
np.random.seed(3141)
dml_irm_obj.bootstrap(method=bootstrap, n_rep_boot=n_rep_boot)
res_dict['boot_coef' + bootstrap] = dml_irm_obj.boot_coef
res_dict['boot_t_stat' + bootstrap] = dml_irm_obj.boot_t_stat
res_dict['boot_coef' + bootstrap + '_manual'] = boot_theta
res_dict['boot_t_stat' + bootstrap + '_manual'] = boot_t_stat
return res_dict
@pytest.mark.ci
def test_dml_irm_coef(dml_irm_fixture):
assert math.isclose(dml_irm_fixture['coef'],
dml_irm_fixture['coef_manual'],
rel_tol=1e-9, abs_tol=1e-4)
@pytest.mark.ci
def test_dml_irm_se(dml_irm_fixture):
assert math.isclose(dml_irm_fixture['se'],
dml_irm_fixture['se_manual'],
rel_tol=1e-9, abs_tol=1e-4)
@pytest.mark.ci
def test_dml_irm_boot(dml_irm_fixture):
for bootstrap in dml_irm_fixture['boot_methods']:
assert np.allclose(dml_irm_fixture['boot_coef' + bootstrap],
dml_irm_fixture['boot_coef' + bootstrap + '_manual'],
rtol=1e-9, atol=1e-4)
assert np.allclose(dml_irm_fixture['boot_t_stat' + bootstrap],
dml_irm_fixture['boot_t_stat' + bootstrap + '_manual'],
rtol=1e-9, atol=1e-4)
|
1606867
|
import packerlicious.provisioner as provisioner
class TestBreakpointProvisioner(object):
def test_no_required_fields(self):
b = provisioner.Breakpoint()
b.to_dict()
|
1606868
|
from ...utilities.units.dataclasses import DataClassWithPscale, distancefield
class ImageStats(DataClassWithPscale):
"""
Gives statistics about an HPF image.
n: id of the HPF
mean, min, max, std: the average, minimum, maximum, and standard deviation
of the pixel fluxes
cx, cy: the center of the HPF in integer microns
"""
pixelsormicrons = "microns"
n: int
mean: float
min: float
max: float
std: float
cx: distancefield(pixelsormicrons=pixelsormicrons, dtype=int)
cy: distancefield(pixelsormicrons=pixelsormicrons, dtype=int)
|
1606879
|
import unittest
import torch
from models import decoders
from templates import ModelTestsMixin, FrozenLayerCheckMixin
class TestDenseEncoder(ModelTestsMixin, unittest.TestCase):
def setUp(self):
self.test_inputs = torch.randn(16, 32)
self.output_shape = torch.Size((16, 1, 32, 32))
self.net = decoders.DenseDecoder(32, 3, self.output_shape[1:])
class TestShallowDecoder(ModelTestsMixin, unittest.TestCase):
def setUp(self):
self.test_inputs = torch.randn(16, 32)
self.output_shape = torch.Size((16, 1, 32, 32))
self.net = decoders.ShallowDecoder(32, self.output_shape[1:])
class TestStackedDecoder(unittest.TestCase, FrozenLayerCheckMixin):
def setUp(self):
self.output_shape = torch.Size((16, 1, 32, 32))
self.net = decoders.StackedDecoder(32, 3, self.output_shape[1:])
def test_stacking(self):
for i, num_features in enumerate([341, 113, 32]):
with self.subTest(stack_level=i):
inputs = torch.randn(16, num_features)
outputs = self.net(inputs)
self.assertEqual(self.output_shape, outputs.shape)
self.net.stack_layer()
def test_freezing(self):
for i in range(3, 0, -1):
with self.subTest(stack_level=i):
self._check_frozen(self.net.layers[i:])
self._check_frozen(self.net.layers[:i], should_be_frozen=False)
self.net.stack_layer()
def test_set_training(self):
self.net.stack_layer()
self.net.eval()
self._check_frozen(self.net.layers[2:])
self.net.train()
self._check_frozen(self.net.layers[2:])
self._check_frozen(self.net.layers[:2], should_be_frozen=False)
|
1606882
|
import django.dispatch
# @@@ this is the exact same as in django.contrib.auth not sure why it's duped here
user_logged_in = django.dispatch.Signal(providing_args=["request", "user"])
password_changed = django.dispatch.Signal(providing_args=["user",])
user_login_attempt = django.dispatch.Signal(providing_args=["username", "result"])
user_sign_up_attempt = django.dispatch.Signal(providing_args=["username", "email", "result"])
user_signed_up = django.dispatch.Signal(providing_args=["user"])
timezone_changed = django.dispatch.Signal(providing_args=["request", "from_timezone", "to_timezone"])
|
1606886
|
import ctypes
import enum
import errno
from dataclasses import dataclass
from functools import partial
from signal import Signals
import socket
from typing import List
IOC_REQUEST_PARAMS = {
0x20000000: 'IOC_VOID',
0x40000000: 'IOC_OUT',
0x80000000: 'IOC_IN',
0xc0000000: 'IOC_IN | IOC_OUT',
0xe0000000: 'IOC_DIRMASK'
}
class BscOpenFlags(enum.Enum):
O_RDONLY = 0x0000
O_WRONLY = 0x0001
O_RDWR = 0x0002
O_ACCMODE = 0x0003
O_NONBLOCK = 0x0004
O_APPEND = 0x0008
O_SHLOCK = 0x0010
O_EXLOCK = 0x0020
O_ASYNC = 0x0040
O_NOFOLLOW = 0x0100
O_CREAT = 0x0200
O_TRUNC = 0x0400
O_EXCL = 0x0800
O_EVTONLY = 0x8000
O_SYMLINK = 0x200000
O_CLOEXEC = 0x1000000
S_IFMT = 0o170000
class StatFlags(enum.Flag):
S_IXOTH = 0o1
S_IWOTH = 0o2
S_IROTH = 0o4
S_IXGRP = 0o10
S_IWGRP = 0o20
S_IRGRP = 0o40
S_IXUSR = 0o100
S_IWUSR = 0o200
S_IRUSR = 0o400
S_ISTXT = 0o1000
S_ISGID = 0o2000
S_ISUID = 0o4000
S_IFIFO = 0o10000
S_IFCHR = 0o20000
S_IFDIR = 0o40000
S_IFBLK = 0o60000
S_IFREG = 0o100000
S_IFLNK = 0o120000
S_IFSOCK = 0o140000
class SocketMsgFlags(enum.Enum):
MSG_OOB = 0x1
MSG_PEEK = 0x2
MSG_DONTROUTE = 0x4
MSG_EOR = 0x8
MSG_TRUNC = 0x10
MSG_CTRUNC = 0x20
MSG_WAITALL = 0x40
MSG_DONTWAIT = 0x80
MSG_EOF = 0x100
MSG_WAITSTREAM = 0x200
MSG_FLUSH = 0x400
MSG_HOLD = 0x800
MSG_SEND = 0x1000
MSG_HAVEMORE = 0x2000
MSG_RCVMORE = 0x4000
MSG_COMPAT = 0x8000
MSG_NEEDSA = 0x10000
MSG_NBIO = 0x20000
MSG_SKIPCFIL = 0x40000
MSG_USEUPCALL = 0x80000000
class BscAccessFlags(enum.Enum):
F_OK = 0x0
X_OK = 0x1
W_OK = 0x2
R_OK = 0x4
class BscChangeableFlags(enum.Enum):
UF_NODUMP = 0x1
UF_IMMUTABLE = 0x2
UF_APPEND = 0x4
UF_OPAQUE = 0x8
UF_HIDDEN = 0x8000
SF_ARCHIVED = 0x10000
SF_IMMUTABLE = 0x20000
SF_APPEND = 0x40000
class SigprocmaskFlags(enum.Enum):
SIG_BLOCK = 1
SIG_UNBLOCK = 2
SIG_SETMASK = 3
class FcntlCmd(enum.Enum):
F_DUPFD = 0
F_GETFD = 1
F_SETFD = 2
F_GETFL = 3
F_SETFL = 4
F_GETOWN = 5
F_SETOWN = 6
F_GETLK = 7
F_SETLK = 8
F_SETLKW = 9
F_SETLKWTIMEOUT = 10
F_FLUSH_DATA = 40
F_CHKCLEAN = 41
F_PREALLOCATE = 42
F_SETSIZE = 43
F_RDADVISE = 44
F_RDAHEAD = 45
F_NOCACHE = 48
F_LOG2PHYS = 49
F_GETPATH = 50
F_FULLFSYNC = 51
F_PATHPKG_CHECK = 52
F_FREEZE_FS = 53
F_THAW_FS = 54
F_GLOBAL_NOCACHE = 55
F_OPENFROM = 56
F_UNLINKFROM = 57
F_CHECK_OPENEVT = 58
F_ADDSIGS = 59
F_MARKDEPENDENCY = 60
F_ADDFILESIGS = 61
F_NODIRECT = 62
F_GETPROTECTIONCLASS = 63
F_SETPROTECTIONCLASS = 64
F_LOG2PHYS_EXT = 65
F_GETLKPID = 66
F_DUPFD_CLOEXEC = 67
F_SETSTATICCONTENT = 68
F_MOVEDATAEXTENTS = 69
F_SETBACKINGSTORE = 70
F_GETPATH_MTMINFO = 71
F_GETCODEDIR = 72
F_SETNOSIGPIPE = 73
F_GETNOSIGPIPE = 74
F_TRANSCODEKEY = 75
F_SINGLE_WRITER = 76
F_GETPROTECTIONLEVEL = 77
F_FINDSIGS = 78
F_GETDEFAULTPROTLEVEL = 79
F_MAKECOMPRESSED = 80
F_SET_GREEDY_MODE = 81
F_SETIOTYPE = 82
F_ADDFILESIGS_FOR_DYLD_SIM = 83
F_RECYCLE = 84
F_BARRIERFSYNC = 85
F_OFD_SETLK = 90
F_OFD_SETLKW = 91
F_OFD_GETLK = 92
F_OFD_SETLKWTIMEOUT = 93
F_OFD_GETLKPID = 94
F_SETCONFINED = 95
F_GETCONFINED = 96
F_ADDFILESIGS_RETURN = 97
F_CHECK_LV = 98
F_PUNCHHOLE = 99
F_TRIM_ACTIVE_FILE = 100
F_SPECULATIVE_READ = 101
F_GETPATH_NOFIRMLINK = 102
F_ADDFILESIGS_INFO = 103
F_ADDFILESUPPL = 104
F_GETSIGSINFO = 105
class PriorityWhich(enum.Enum):
PRIO_PROCESS = 0
PRIO_PGRP = 1
PRIO_USER = 2
PRIO_DARWIN_THREAD = 3
PRIO_DARWIN_PROCESS = 4
PRIO_DARWIN_GPU = 5
PRIO_DARWIN_ROLE = 6
class SocketOptionName(enum.Enum):
SO_DEBUG = 0x1
SO_ACCEPTCONN = 0x2
SO_REUSEADDR = 0x4
SO_KEEPALIVE = 0x8
SO_DONTROUTE = 0x10
SO_BROADCAST = 0x20
SO_USELOOPBACK = 0x40
SO_LINGER = 0x80
SO_OOBINLINE = 0x100
SO_REUSEPORT = 0x200
SO_TIMESTAMP = 0x400
SO_TIMESTAMP_MONOTONIC = 0x800
SO_ACCEPTFILTER = 0x1000
SO_SNDBUF = 0x1001
SO_RCVBUF = 0x1002
SO_SNDLOWAT = 0x1003
SO_RCVLOWAT = 0x1004
SO_SNDTIMEO = 0x1005
SO_RCVTIMEO = 0x1006
SO_ERROR = 0x1007
SO_TYPE = 0x1008
SO_LABEL = 0x1010
SO_PEERLABEL = 0x1011
SO_NREAD = 0x1020
SO_NKE = 0x1021
SO_NOSIGPIPE = 0x1022
SO_NOADDRERR = 0x1023
SO_NWRITE = 0x1024
SO_REUSESHAREUID = 0x1025
SO_NOTIFYCONFLICT = 0x1026
SO_UPCALLCLOSEWAIT = 0x1027
SO_LINGER_SEC = 0x1080
SO_RESTRICTIONS = 0x1081
SO_RANDOMPORT = 0x1082
SO_NP_EXTENSIONS = 0x1083
SO_EXECPATH = 0x1085
SO_TRAFFIC_CLASS = 0x1086
SO_RECV_TRAFFIC_CLASS = 0x1087
SO_TRAFFIC_CLASS_DBG = 0x1088
SO_OPTION_UNUSED_0 = 0x1089
SO_PRIVILEGED_TRAFFIC_CLASS = 0x1090
SO_DEFUNCTIT = 0x1091
SO_DEFUNCTOK = 0x1100
SO_ISDEFUNCT = 0x1101
SO_OPPORTUNISTIC = 0x1102
SO_FLUSH = 0x1103
SO_RECV_ANYIF = 0x1104
SO_TRAFFIC_MGT_BACKGROUND = 0x1105
SO_FLOW_DIVERT_TOKEN = 0x1106
SO_DELEGATED = 0x1107
SO_DELEGATED_UUID = 0x1108
SO_NECP_ATTRIBUTES = 0x1109
SO_CFIL_SOCK_ID = 0x1110
SO_NECP_CLIENTUUID = 0x1111
SO_NUMRCVPKT = 0x1112
SO_AWDL_UNRESTRICTED = 0x1113
SO_EXTENDED_BK_IDLE = 0x1114
SO_MARK_CELLFALLBACK = 0x1115
SO_NET_SERVICE_TYPE = 0x1116
SO_QOSMARKING_POLICY_OVERRIDE = 0x1117
SO_INTCOPROC_ALLOW = 0x1118
SO_NETSVC_MARKING_LEVEL = 0x1119
SO_NECP_LISTENUUID = 0x1120
SO_MPKL_SEND_INFO = 0x1122
SO_STATISTICS_EVENT = 0x1123
SO_WANT_KEV_SOCKET_CLOSED = 0x1124
SO_DONTTRUNC = 0x2000
SO_WANTMORE = 0x4000
SO_WANTOOBFLAG = 0x8000
SO_NOWAKEFROMSLEEP = 0x10000
SO_NOAPNFALLBK = 0x20000
SO_TIMESTAMP_CONTINUOUS = 0x40000
def sockopt_format_level_and_option(level, option_name):
if level == socket.SOL_SOCKET:
return 'SOL_SOCKET', SocketOptionName(option_name).name
else:
return level, option_name
class RusageWho(enum.Enum):
RUSAGE_CHILDREN = -1
RUSAGE_SELF = 0
class FlockOperation(enum.Enum):
LOCK_SH = 1
LOCK_EX = 2
LOCK_NB = 4
LOCK_UN = 8
class CsopsOps(enum.Enum):
CS_OPS_STATUS = 0
CS_OPS_MARKINVALID = 1
CS_OPS_MARKHARD = 2
CS_OPS_MARKKILL = 3
CS_OPS_PIDPATH = 4
CS_OPS_CDHASH = 5
CS_OPS_PIDOFFSET = 6
CS_OPS_ENTITLEMENTS_BLOB = 7
CS_OPS_MARKRESTRICT = 8
CS_OPS_SET_STATUS = 9
CS_OPS_BLOB = 10
CS_OPS_IDENTITY = 11
CS_OPS_CLEARINSTALLER = 12
CS_OPS_CLEARPLATFORM = 13
CS_OPS_TEAMID = 14
CS_OPS_CLEAR_LV = 15
CS_OPS_16 = 16
class ProcInfoCall(enum.Enum):
PROC_INFO_CALL_LISTPIDS = 0x1
PROC_INFO_CALL_PIDINFO = 0x2
PROC_INFO_CALL_PIDFDINFO = 0x3
PROC_INFO_CALL_KERNMSGBUF = 0x4
PROC_INFO_CALL_SETCONTROL = 0x5
PROC_INFO_CALL_PIDFILEPORTINFO = 0x6
PROC_INFO_CALL_TERMINATE = 0x7
PROC_INFO_CALL_DIRTYCONTROL = 0x8
PROC_INFO_CALL_PIDRUSAGE = 0x9
PROC_INFO_CALL_PIDORIGINATORINFO = 0xa
PROC_INFO_CALL_LISTCOALITIONS = 0xb
PROC_INFO_CALL_CANUSEFGHW = 0xc
PROC_INFO_CALL_PIDDYNKQUEUEINFO = 0xd
PROC_INFO_CALL_UDATA_INFO = 0xe
class FsSnapshotOp(enum.Enum):
SNAPSHOT_OP_CREATE = 0x01
SNAPSHOT_OP_DELETE = 0x02
SNAPSHOT_OP_RENAME = 0x03
SNAPSHOT_OP_MOUNT = 0x04
SNAPSHOT_OP_REVERT = 0x05
SNAPSHOT_OP_ROOT = 0x06
def serialize_open_flags(flags: int) -> List[BscOpenFlags]:
call_flags = []
for flag in (BscOpenFlags.O_RDWR, BscOpenFlags.O_WRONLY):
if flags & flag.value:
call_flags.append(flag)
break
else: # No break.
call_flags.append(BscOpenFlags.O_RDONLY)
for flag in (
BscOpenFlags.O_CREAT, BscOpenFlags.O_APPEND, BscOpenFlags.O_TRUNC, BscOpenFlags.O_EXCL,
BscOpenFlags.O_NONBLOCK, BscOpenFlags.O_SHLOCK, BscOpenFlags.O_EXLOCK, BscOpenFlags.O_NOFOLLOW,
BscOpenFlags.O_SYMLINK, BscOpenFlags.O_EVTONLY, BscOpenFlags.O_CLOEXEC):
if flags & flag.value:
call_flags.append(flag)
return call_flags
def serialize_stat_flags(flags: int) -> List[StatFlags]:
stat_flags = []
for flag in list(StatFlags):
if flag.value & S_IFMT:
if flags & S_IFMT == flag.value:
stat_flags.append(flag)
elif flag.value & flags:
stat_flags.append(flag)
return stat_flags
def serialize_result(end_event, success_name='', fmt=lambda x: x) -> str:
error_code = end_event.values[0]
res = end_event.values[1]
if error_code in errno.errorcode:
err = f'errno: {errno.errorcode[error_code]}({error_code})'
else:
err = f'errno: {error_code}'
success = f'{success_name}: {fmt(res)}' if success_name else ''
return success if not error_code else err
def serialize_access_flags(flags: int) -> List[BscAccessFlags]:
amode = [flag for flag in BscAccessFlags if flag.value & flags]
if not amode:
amode = [BscAccessFlags.F_OK]
return amode
@dataclass
class BscOpen:
ktraces: List
path: str
flags: List
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'''open{no_cancel}("{self.path}", {' | '.join(map(lambda f: f.name, self.flags))}), {self.result}'''
@dataclass
class BscOpenat:
ktraces: List
dirfd: int
path: str
flags: List
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return (f'''openat{no_cancel}({self.dirfd}, "{self.path}", '''
f'''{' | '.join(map(lambda f: f.name, self.flags))}), {self.result}''')
@dataclass
class BscRead:
ktraces: List
fd: int
address: int
size: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'read{no_cancel}({self.fd}, {hex(self.address)}, {self.size}), {self.result}'
@dataclass
class BscWrite:
ktraces: List
fd: int
address: int
size: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'write{no_cancel}({self.fd}, {hex(self.address)}, {self.size}), {self.result}'
@dataclass
class BscPread:
ktraces: List
fd: int
address: int
size: int
offset: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'pread{no_cancel}({self.fd}, {hex(self.address)}, {self.size}, {hex(self.offset)}), {self.result}'
@dataclass
class BscPwrite:
ktraces: List
fd: int
address: int
size: int
offset: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'pwrite{no_cancel}({self.fd}, {hex(self.address)}, {self.size}, {hex(self.offset)}), {self.result}'
@dataclass
class BscSysFstat64:
ktraces: List
fd: int
result: str
def __str__(self):
rep = f'fstat64({self.fd})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscLstat64:
ktraces: List
path: str
result: str
def __str__(self):
rep = f'lstat64("{self.path}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetdirentries64:
ktraces: List
fd: int
buf: int
bufsize: int
position: int
result: str
def __str__(self):
return f'getdirentries64({self.fd}, {hex(self.buf)}, {self.bufsize}, {hex(self.position)}), {self.result}'
@dataclass
class BscStatfs64:
ktraces: List
path: str
buf: int
result: str
def __str__(self):
rep = f'statfs64("{self.path}", {hex(self.buf)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFstatfs64:
ktraces: List
fd: int
buf: int
result: str
def __str__(self):
rep = f'fstatfs64({self.fd}, {hex(self.buf)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetfsstat64:
ktraces: List
buf: int
bufsize: int
flags: int
result: str
def __str__(self):
return f'getfsstat64({hex(self.buf)}, {self.bufsize}, {self.flags}), {self.result}'
@dataclass
class BscPthreadFchdir:
ktraces: List
fd: int
result: str
def __str__(self):
rep = f'pthread_fchdir({self.fd})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscAudit:
ktraces: List
record: int
length: int
result: str
def __str__(self):
rep = f'audit({hex(self.record)}, {self.length})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscAuditon:
ktraces: List
cmd: int
data: int
length: int
result: str
def __str__(self):
rep = f'auditon({self.cmd}, {hex(self.data)}, {self.length})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetauid:
ktraces: List
auid: int
result: str
def __str__(self):
rep = f'getauid({hex(self.auid)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetauid:
ktraces: List
auid: int
result: str
def __str__(self):
rep = f'setauid({hex(self.auid)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscBsdthreadCreate:
ktraces: List
pid: int
def __str__(self):
return 'thread_create()'
@dataclass
class BscKqueue:
ktraces: List
result: str
def __str__(self):
return f'kqueue(), {self.result}'
@dataclass
class BscKevent:
ktraces: List
kq: int
changelist: int
nchanges: int
eventlist: int
result: str
def __str__(self):
return f'kevent({self.kq}, {hex(self.changelist)}, {self.nchanges}, {hex(self.eventlist)}), {self.result}'
@dataclass
class BscLchown:
ktraces: List
path: str
owner: int
group: int
result: str
def __str__(self):
rep = f'lchown("{self.path}", {self.owner}, {self.group})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscBsdthreadRegister:
ktraces: List
threadstart: int
wqthread: int
pthsize: int
dummy_value: int
result: str
def __str__(self):
rep = f'thread_register({hex(self.threadstart)}, {hex(self.wqthread)}, {self.pthsize}, {hex(self.dummy_value)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscWorkqOpen:
ktraces: List
result: str
def __str__(self):
rep = 'workq_open()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscWorkqKernreturn:
ktraces: List
options: int
item: int
affinity: int
prio: int
result: str
def __str__(self):
return f'workq_kernreturn({self.options}, {hex(self.item)}, {self.affinity}, {self.prio}), {self.result}'
@dataclass
class BscKevent64:
ktraces: List
kq: int
changelist: int
nchanges: int
eventlist: int
result: str
def __str__(self):
return f'kevent64({self.kq}, {hex(self.changelist)}, {self.nchanges}, {hex(self.eventlist)}), {self.result}'
@dataclass
class BscThreadSelfid:
ktraces: List
result: str
def __str__(self):
return f'thread_selfid(), {self.result}'
@dataclass
class BscKeventQos:
ktraces: List
kq: int
changelist: int
nchanges: int
eventlist: int
result: str
def __str__(self):
return f'kevent_qos({self.kq}, {hex(self.changelist)}, {self.nchanges}, {hex(self.eventlist)}), {self.result}'
@dataclass
class BscKeventId:
ktraces: List
kq: int
changelist: int
nchanges: int
eventlist: int
result: str
def __str__(self):
return f'kevent_id({self.kq}, {hex(self.changelist)}, {self.nchanges}, {hex(self.eventlist)}), {self.result}'
@dataclass
class BscMacSyscall:
ktraces: List
policy: int
call: int
arg: int
result: str
def __str__(self):
rep = f'mac_syscall({hex(self.policy)}, {self.call}, {hex(self.arg)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPselect:
ktraces: List
nfds: int
readfds: int
writefds: int
errorfds: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return (f'pselect{no_cancel}({self.nfds}, {hex(self.readfds)}, {hex(self.writefds)}, {hex(self.errorfds)}),'
f' {self.result}')
@dataclass
class BscFsgetpath:
ktraces: List
buf: int
bufsize: int
fsid: int
objid: int
path: str
result: str
def __str__(self):
rep = f'fsgetpath({hex(self.buf)}, {self.bufsize}, {hex(self.fsid)}, {self.objid}), {self.result}'
if self.path:
rep += f' path: "{self.path}"'
return rep
@dataclass
class BscSysFileportMakeport:
ktraces: List
fd: int
portnamep: int
result: str
def __str__(self):
rep = f'fileport_makeport({self.fd}, {hex(self.portnamep)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSysFileportMakefd:
ktraces: List
port: int
result: str
def __str__(self):
return f'fileport_makefd({self.port}), {self.result}'
@dataclass
class BscAuditSessionPort:
ktraces: List
asid: int
portnamep: int
result: str
def __str__(self):
rep = f'audit_session_port({self.asid}, {hex(self.portnamep)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPidSuspend:
ktraces: List
pid: int
result: str
def __str__(self):
rep = f'pid_suspend({self.pid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPidResume:
ktraces: List
pid: int
result: str
def __str__(self):
rep = f'pid_resume({self.pid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPidHibernate:
ktraces: List
pid: int
result: str
def __str__(self):
rep = f'pid_hibernate({self.pid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPidShutdownSockets:
ktraces: List
pid: int
level: int
result: str
def __str__(self):
rep = f'pid_shutdown_sockets({self.pid}, {self.level})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSharedRegionMapAndSlideNp:
ktraces: List
fd: int
count: int
mappings: int
slide: int
result: str
def __str__(self):
rep = f'shared_region_map_and_slide_np({self.fd}, {self.count}, {hex(self.mappings)}, {self.slide})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscKasInfo:
ktraces: List
selector: int
value: int
size: int
result: str
def __str__(self):
rep = f'kas_info({self.selector}, {hex(self.value)}, {hex(self.size)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMemorystatusControl:
ktraces: List
command: int
pid: int
flags: int
buffer: int
result: str
def __str__(self):
rep = f'memorystatus_control({self.command}, {self.pid}, {self.flags}, {hex(self.buffer)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGuardedOpenNp:
ktraces: List
path: str
guard: int
guardflags: int
flags: List
result: str
def __str__(self):
flags = ' | '.join(map(lambda f: f.name, self.flags))
return f'guarded_open_np("{self.path}", {hex(self.guard)}, {self.guardflags}, {flags}), {self.result}'
@dataclass
class BscGuardedCloseNp:
ktraces: List
fd: int
guard: int
result: str
def __str__(self):
rep = f'guarded_close_np({self.fd}, {hex(self.guard)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGuardedKqueueNp:
ktraces: List
guard: int
guardflags: int
result: str
def __str__(self):
rep = f'guarded_kqueue_np({hex(self.guard)}, {self.guardflags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscChangeFdguardNp:
ktraces: List
fd: int
guard: int
guardflags: int
nguard: int
result: str
def __str__(self):
rep = f'change_fdguard_np({self.fd}, {hex(self.guard)}, {self.guardflags}, {hex(self.nguard)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscUsrctl:
ktraces: List
flags: int
result: str
def __str__(self):
rep = f'usrctl({self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscProcRlimitControl:
ktraces: List
pid: int
flavor: int
arg: int
result: str
def __str__(self):
rep = f'proc_rlimit_control({self.pid}, {self.flavor}, {hex(self.arg)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscConnectx:
ktraces: List
socket: int
endpoints: int
associd: int
flags: int
result: str
def __str__(self):
rep = f'connectx({self.socket}, {hex(self.endpoints)}, {self.associd}, {self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscDisconnectx:
ktraces: List
s: int
aid: int
cid: int
result: str
def __str__(self):
rep = f'disconnectx({self.s}, {self.aid}, {self.cid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPeeloff:
ktraces: List
s: int
aid: int
result: str
def __str__(self):
rep = f'peeloff({self.s}, {self.aid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSocketDelegate:
ktraces: List
domain: socket.AddressFamily
type: socket.SocketKind
protocol: int
epid: int
result: str
def __str__(self):
return f'socket_delegate({self.domain.name}, {self.type.name}, {self.protocol}, {self.epid}), {self.result}'
@dataclass
class BscTelemetry:
ktraces: List
cmd: int
deadline: int
interval: int
leeway: int
result: str
def __str__(self):
rep = f'telemetry({self.cmd}, {self.deadline}, {self.interval}, {self.leeway})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscProcUuidPolicy:
ktraces: List
operation: int
uuid: int
uuidlen: int
flags: int
result: str
def __str__(self):
rep = f'proc_uuid_policy({self.operation}, {self.uuid}, {self.uuidlen}, {self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMemorystatusGetLevel:
ktraces: List
level: int
result: str
def __str__(self):
rep = f'memorystatus_get_level({hex(self.level)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSystemOverride:
ktraces: List
timeout: int
flags: int
result: str
def __str__(self):
rep = f'system_override({self.timeout}, {self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscVfsPurge:
ktraces: List
result: str
def __str__(self):
rep = 'vfs_purge()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSfiCtl:
ktraces: List
operation: int
sfi_class: int
time: int
out_time: int
result: str
def __str__(self):
rep = f'sfi_ctl({self.operation}, {self.sfi_class}, {self.time}, {hex(self.out_time)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSfiPidctl:
ktraces: List
operation: int
pid: int
sfi_flags: int
out_sfi_flags: int
result: str
def __str__(self):
rep = f'sfi_pidctl({self.operation}, {self.pid}, {self.sfi_flags}, {hex(self.out_sfi_flags)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscCoalition:
ktraces: List
operation: int
cid: int
flags: int
result: str
def __str__(self):
rep = f'coalition({self.operation}, {hex(self.cid)}, {self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscCoalitionInfo:
ktraces: List
flavor: int
cid: int
buffer: int
bufsize: int
result: str
def __str__(self):
rep = f'coalition_info({self.flavor}, {hex(self.cid)}, {hex(self.buffer)}, {hex(self.bufsize)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNecpMatchPolicy:
ktraces: List
parameters: int
parameters_size: int
returned_result: int
result: str
def __str__(self):
rep = f'necp_match_policy({hex(self.parameters)}, {self.parameters_size}, {hex(self.returned_result)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetattrlistbulk:
ktraces: List
dirfd: int
alist: int
attributeBuffer: int
bufferSize: int
result: str
def __str__(self):
return (f'getattrlistbulk({self.dirfd}, {hex(self.alist)}, {hex(self.attributeBuffer)}, {self.bufferSize}),'
f' {self.result}')
@dataclass
class BscClonefileat:
ktraces: List
src_dirfd: int
src: str
dst_dirfd: int
dst: str
result: str
def __str__(self):
rep = f'clonefileat({self.src_dirfd}, "{self.src}", {self.dst_dirfd}, "{self.dst}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscRenameat:
ktraces: List
fromfd: int
from_: str
tofd: int
to: str
result: str
def __str__(self):
rep = f'renameat({self.fromfd}, "{self.from_}", {self.tofd}, "{self.to}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFaccessat:
ktraces: List
fd: int
path: str
amode: List
flag: int
result: str
def __str__(self):
amode = ' | '.join(map(lambda f: f.name, self.amode))
rep = f'faccessat({self.fd}, "{self.path}", {amode}, {self.flag})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFchmodat:
ktraces: List
fd: int
path: str
mode: List
flag: int
result: str
def __str__(self):
mode = ' | '.join(map(lambda f: f.name, self.mode))
rep = f'fchmodat({self.fd}, "{self.path}", {mode}, {self.flag})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFchownat:
ktraces: List
fd: int
path: str
uid: int
gid: int
result: str
def __str__(self):
rep = f'fchownat({self.fd}, "{self.path}", {self.uid}, {self.gid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFstatat:
ktraces: List
fd: int
path: str
ub: int
flag: int
result: str
def __str__(self):
rep = f'fstatat({self.fd}, "{self.path}", {hex(self.ub)}, {self.flag})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFstatat64:
ktraces: List
fd: int
path: str
ub: int
flag: int
result: str
def __str__(self):
rep = f'fstatat64({self.fd}, "{self.path}", {hex(self.ub)}, {self.flag})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscLinkat:
ktraces: List
fd1: int
path: str
fd2: int
link: str
result: str
def __str__(self):
rep = f'linkat({self.fd1}, "{self.path}", {self.fd2}, "{self.link}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscUnlinkat:
ktraces: List
fd: int
path: str
flag: int
result: str
def __str__(self):
rep = f'unlinkat({self.fd}, "{self.path}", {self.flag})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscReadlinkat:
ktraces: List
fd: int
path: str
buf: int
bufsize: int
result: str
def __str__(self):
return f'readlinkat({self.fd}, "{self.path}", {hex(self.buf)}, {self.bufsize}), {self.result}'
@dataclass
class BscSymlinkat:
ktraces: List
path1: str
fd: int
path2: str
result: str
def __str__(self):
rep = f'symlinkat("{self.path1}", {self.fd}, "{self.path2}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMkdirat:
ktraces: List
fd: int
path: str
mode: List
result: str
def __str__(self):
mode = ' | '.join(map(lambda f: f.name, self.mode))
rep = f'mkdirat({self.fd}, "{self.path}", {mode})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetattrlistat:
ktraces: List
fd: int
path: str
alist: int
attributeBuffer: int
result: str
def __str__(self):
rep = f'getattrlistat({self.fd}, "{self.path}", {hex(self.alist)}, {hex(self.attributeBuffer)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscProcTraceLog:
ktraces: List
pid: int
uniqueid: int
result: str
def __str__(self):
rep = f'proc_trace_log({self.pid}, {self.uniqueid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscBsdthreadCtl:
ktraces: List
cmd: int
arg1: int
arg2: int
arg3: int
result: str
def __str__(self):
rep = f'bsdthread_ctl({self.cmd}, {hex(self.arg1)}, {hex(self.arg2)}, {hex(self.arg3)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscOpenbyidNp:
ktraces: List
fsid: int
objid: int
oflags: List
result: str
def __str__(self):
oflags = ' | '.join(map(lambda f: f.name, self.oflags))
return f'openbyid_np({self.fsid}, {self.objid}, {oflags}), {self.result}'
@dataclass
class BscRecvmsgX:
ktraces: List
s: int
msgp: int
cnt: int
flags: int
result: str
def __str__(self):
return f'recvmsg_x({self.s}, {hex(self.msgp)}, {self.cnt}, {self.flags}), {self.result}'
@dataclass
class BscSendmsgX:
ktraces: List
s: int
msgp: int
cnt: int
flags: int
result: str
def __str__(self):
return f'sendmsg_x({self.s}, {hex(self.msgp)}, {self.cnt}, {self.flags}), {self.result}'
@dataclass
class BscThreadSelfusage:
ktraces: List
result: str
def __str__(self):
return f'thread_selfusage(), {self.result}'
@dataclass
class BscCsrctl:
ktraces: List
op: int
useraddr: int
usersize: int
result: str
def __str__(self):
rep = f'csrctl({self.op}, {hex(self.useraddr)}, {self.usersize})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGuardedOpenDprotectedNp:
ktraces: List
path: str
guard: int
guardflags: int
flags: List
result: str
def __str__(self):
oflags = ' | '.join(map(lambda f: f.name, self.flags))
return (f'guarded_open_dprotected_np("{self.path}", {hex(self.guard)}, {self.guardflags}, {oflags})'
f', {self.result}')
@dataclass
class BscGuardedWriteNp:
ktraces: List
fd: int
guard: int
cbuf: int
nbyte: int
result: str
def __str__(self):
return f'guarded_write_np({self.fd}, {hex(self.guard)}, {hex(self.cbuf)}, {self.nbyte}), {self.result}'
@dataclass
class BscGuardedPwriteNp:
ktraces: List
fd: int
guard: int
buf: int
nbyte: int
result: str
def __str__(self):
return f'guarded_pwrite_np({self.fd}, {hex(self.guard)}, {hex(self.buf)}, {self.nbyte}), {self.result}'
@dataclass
class BscGuardedWritevNp:
ktraces: List
fd: int
guard: int
iovp: int
iovcnt: int
result: str
def __str__(self):
return f'guarded_writev_np({self.fd}, {hex(self.guard)}, {hex(self.iovp)}, {self.iovcnt}), {self.result}'
@dataclass
class BscRenameatxNp:
ktraces: List
fromfd: int
from_: str
tofd: int
to: str
result: str
def __str__(self):
rep = f'renameatx_np({self.fromfd}, "{self.from_}", {self.tofd}, "{self.to}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMremapEncrypted:
ktraces: List
addr: int
len: int
cryptid: int
cputype: int
result: str
def __str__(self):
rep = f'mremap_encrypted({hex(self.addr)}, {self.len}, {self.cryptid}, {self.cputype})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNetagentTrigger:
ktraces: List
agent_uuid: int
agent_uuidlen: int
result: str
def __str__(self):
rep = f'netagent_trigger({self.agent_uuid}, {self.agent_uuidlen})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscStackSnapshotWithConfig:
ktraces: List
stackshot_config_version: int
stackshot_config: int
stackshot_config_size: int
result: str
def __str__(self):
rep = (f'stack_snapshot_with_config({self.stackshot_config_version}, {hex(self.stackshot_config)}'
f', {self.stackshot_config_size})')
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMicrostackshot:
ktraces: List
tracebuf: int
tracebuf_size: int
flags: int
result: str
def __str__(self):
return f'microstackshot({hex(self.tracebuf)}, {self.tracebuf_size}, {self.flags}), {self.result}'
@dataclass
class BscGrabPgoData:
ktraces: List
uuid: int
flags: int
buffer: int
size: int
result: str
def __str__(self):
return f'grab_pgo_data({hex(self.uuid)}, {self.flags}, {hex(self.buffer)}, {self.size}), {self.result}'
@dataclass
class BscPersona:
ktraces: List
operation: int
flags: int
buffer: int
size: int
result: str
def __str__(self):
rep = f'persona({self.operation}, {self.flags}, {hex(self.buffer)}, {hex(self.size)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMachEventlinkSignal:
ktraces: List
eventlink_port: int
signal_count: int
result: str
def __str__(self):
rep = f'mach_eventlink_signal({self.eventlink_port}, {self.signal_count})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMachEventlinkWaitUntil:
ktraces: List
eventlink_port: int
wait_count: int
deadline: int
clock_id: int
result: str
def __str__(self):
rep = (f'mach_eventlink_wait_until({self.eventlink_port}, {hex(self.wait_count)}, {self.deadline}'
f', {self.clock_id})')
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMachEventlinkSignalWaitUntil:
ktraces: List
eventlink_port: int
wait_count: int
signal_count: int
deadline: int
result: str
def __str__(self):
rep = (f'mach_eventlink_signal_wait_until({self.eventlink_port}, {hex(self.wait_count)}, {self.signal_count}'
f', {self.deadline})')
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscWorkIntervalCtl:
ktraces: List
operation: int
work_interval_id: int
arg: int
len: int
result: str
def __str__(self):
rep = f'work_interval_ctl({self.operation}, {self.work_interval_id}, {hex(self.arg)}, {self.len})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetentropy:
ktraces: List
buffer: int
size: int
result: str
def __str__(self):
rep = f'getentropy({hex(self.buffer)}, {self.size})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNecpOpen:
ktraces: List
flags: int
result: str
def __str__(self):
return f'necp_open({self.flags}), {self.result}'
@dataclass
class BscNecpClientAction:
ktraces: List
necp_fd: int
action: int
client_id: int
client_id_len: int
result: str
def __str__(self):
return (f'necp_client_action({self.necp_fd}, {self.action}, {hex(self.client_id)}, {self.client_id_len})'
f', {self.result}')
@dataclass
class BscNexusOpen:
ktraces: List
result: str
def __str__(self):
return f'nexus_open(), {self.result}'
@dataclass
class BscNexusRegister:
ktraces: List
result: str
def __str__(self):
rep = 'nexus_register()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNexusDeregister:
ktraces: List
result: str
def __str__(self):
rep = 'nexus_deregister()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNexusCreate:
ktraces: List
result: str
def __str__(self):
rep = 'nexus_create()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNexusDestroy:
ktraces: List
result: str
def __str__(self):
rep = 'nexus_destroy()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNexusGetOpt:
ktraces: List
result: str
def __str__(self):
rep = 'nexus_get_opt()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNexusSetOpt:
ktraces: List
result: str
def __str__(self):
rep = 'nexus_set_opt()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscChannelOpen:
ktraces: List
result: str
def __str__(self):
rep = 'channel_open()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscChannelGetInfo:
ktraces: List
result: str
def __str__(self):
rep = 'channel_get_info()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscChannelSync:
ktraces: List
result: str
def __str__(self):
rep = 'channel_sync()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscChannelGetOpt:
ktraces: List
result: str
def __str__(self):
rep = 'channel_get_opt()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscChannelSetOpt:
ktraces: List
result: str
def __str__(self):
rep = 'channel_set_opt()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscUlockWait:
ktraces: List
operation: int
addr: int
value: int
timeout: int
result: str
def __str__(self):
return f'ulock_wait({self.operation}, {hex(self.addr)}, {self.value}, {self.timeout}), {self.result}'
@dataclass
class BscUlockWake:
ktraces: List
operation: int
addr: int
wake_value: int
result: str
def __str__(self):
return f'ulock_wake({self.operation}, {hex(self.addr)}, {self.wake_value}), {self.result}'
@dataclass
class BscFclonefileat:
ktraces: List
src_fd: int
dst_dirfd: int
dst: str
flags: int
result: str
def __str__(self):
rep = f'fclonefileat({self.src_fd}, {self.dst_dirfd}, "{self.dst}", {self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFsSnapshot:
ktraces: List
op: FsSnapshotOp
dirfd: int
name1: str
name2: str
result: str
def __str__(self):
rep = f'fs_snapshot({self.op.name}, {self.dirfd}, "{self.name1}", "{self.name2}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscTerminateWithPayload:
ktraces: List
pid: int
reason_namespace: int
reason_code: int
payload: int
result: str
def __str__(self):
rep = (f'terminate_with_payload({self.pid}, {self.reason_namespace}, {hex(self.reason_code)}'
f', {hex(self.payload)})')
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscAbortWithPayload:
ktraces: List
reason_namespace: int
reason_code: int
payload: int
payload_size: int
def __str__(self):
return (f'abort_with_payload({self.reason_namespace}, {hex(self.reason_code)}, {hex(self.payload)}'
f', {self.payload_size})')
@dataclass
class BscNecpSessionOpen:
ktraces: List
flags: int
result: str
def __str__(self):
return f'necp_session_open({self.flags}), {self.result}'
@dataclass
class BscNecpSessionAction:
ktraces: List
necp_fd: int
action: int
in_buffer: int
in_buffer_length: int
result: str
def __str__(self):
rep = f'necp_session_action({self.necp_fd}, {self.action}, {hex(self.in_buffer)}, {self.in_buffer_length})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetattrlistat:
ktraces: List
fd: int
path: str
alist: int
attributeBuffer: int
result: str
def __str__(self):
rep = f'setattrlistat({self.fd}, "{self.path}", {hex(self.alist)}, {hex(self.attributeBuffer)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNetQosGuideline:
ktraces: List
param: int
param_len: int
result: str
def __str__(self):
return f'net_qos_guideline({hex(self.param)}, {self.param_len}), {self.result}'
@dataclass
class BscFmount:
ktraces: List
type: int
fd: int
flags: int
data: int
result: str
def __str__(self):
rep = f'fmount({hex(self.type)}, {self.fd}, {self.flags}, {hex(self.data)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNtpAdjtime:
ktraces: List
tp: int
result: str
def __str__(self):
return f'ntp_adjtime({hex(self.tp)}), {self.result}'
@dataclass
class BscNtpGettime:
ktraces: List
ntvp: int
result: str
def __str__(self):
rep = f'ntp_gettime({hex(self.ntvp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscOsFaultWithPayload:
ktraces: List
reason_namespace: int
reason_code: int
payload: int
payload_size: int
result: str
def __str__(self):
rep = (f'os_fault_with_payload({self.reason_namespace}, {hex(self.reason_code)}, {hex(self.payload)}'
f', {self.payload_size})')
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscKqueueWorkloopCtl:
ktraces: List
cmd: int
options: int
addr: int
sz: int
result: str
def __str__(self):
rep = f'kqueue_workloop_ctl({self.cmd}, {self.options}, {hex(self.addr)}, {self.sz})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMachBridgeRemoteTime:
ktraces: List
local_timestamp: int
result: str
def __str__(self):
rep = f'mach_bridge_remote_time({self.local_timestamp})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscCoalitionLedger:
ktraces: List
operation: int
cid: int
buffer: int
bufsize: int
result: str
def __str__(self):
rep = f'coalition_ledger({self.operation}, {hex(self.cid)}, {hex(self.buffer)}, {hex(self.bufsize)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscLogData:
ktraces: List
tag: int
flags: int
buffer: int
size: int
result: str
def __str__(self):
rep = f'log_data({self.tag}, {self.flags}, {hex(self.buffer)}, {self.size})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMemorystatusAvailableMemory:
ktraces: List
result: str
def __str__(self):
return f'memorystatus_available_memory(), {self.result}'
@dataclass
class BscSharedRegionMapAndSlide2Np:
ktraces: List
files_count: int
shared_file_np: int
mappings_count: int
mappings: int
result: str
def __str__(self):
rep = (f'shared_region_map_and_slide_2_np({self.files_count}, {hex(self.shared_file_np)},'
f' {hex(self.mappings_count)}, {hex(self.mappings)})')
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPivotRoot:
ktraces: List
new_rootfs_path_before: str
old_rootfs_path_after: str
result: str
def __str__(self):
rep = f'pivot_root("{self.new_rootfs_path_before}", "{self.old_rootfs_path_after}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscTaskInspectForPid:
ktraces: List
target_tport: int
pid: int
t: int
result: str
def __str__(self):
rep = f'task_inspect_for_pid({self.target_tport}, {self.pid}, {self.t})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscTaskReadForPid:
ktraces: List
target_tport: int
pid: int
t: int
result: str
def __str__(self):
rep = f'task_read_for_pid({self.target_tport}, {self.pid}, {self.t})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSysPreadv:
ktraces: List
fd: int
iovp: int
iovcnt: int
offset: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'preadv{no_cancel}({self.fd}, {hex(self.iovp)}, {self.iovcnt}, {self.offset}), {self.result}'
@dataclass
class BscSysPwritev:
ktraces: List
fd: int
iovp: int
iovcnt: int
offset: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'pwritev{no_cancel}({self.fd}, {hex(self.iovp)}, {self.iovcnt}, {self.offset}), {self.result}'
@dataclass
class BscUlockWait2:
ktraces: List
operation: int
addr: int
value: int
timeout: int
result: str
def __str__(self):
return f'ulock_wait2({self.operation}, {hex(self.addr)}, {self.value}, {self.timeout}), {self.result}'
@dataclass
class BscProcInfoExtendedId:
ktraces: List
callnum: int
pid: int
flavor: int
flags: int
result: str
def __str__(self):
rep = f'proc_info_extended_id({self.callnum}, {self.pid}, {self.flavor}, {self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSysClose:
ktraces: List
fd: str
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
rep = f'close{no_cancel}({self.fd})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscLink:
ktraces: List
oldpath: str
newpath: str
result: str
def __str__(self):
rep = f'link("{self.oldpath}", "{self.newpath}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscUnlink:
ktraces: List
pathname: str
result: str
def __str__(self):
rep = f'unlink("{self.pathname}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscChdir:
ktraces: List
path: str
result: str
def __str__(self):
rep = f'chdir("{self.path}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFchdir:
ktraces: List
fd: int
result: str
def __str__(self):
rep = f'fchdir({self.fd})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMknod:
ktraces: List
pathname: str
mode: int
dev: int
result: str
def __str__(self):
rep = f'mknod("{self.pathname}", {self.mode}, {self.dev})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscChmod:
ktraces: List
pathname: str
mode: List
result: str
def __str__(self):
rep = f'''chmod("{self.pathname}", {' | '.join(map(lambda f: f.name, self.mode))})'''
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscChown:
ktraces: List
pathname: str
owner: int
group: int
result: str
def __str__(self):
rep = f'''chown("{self.pathname}", {self.owner}, {self.group})'''
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetpid:
ktraces: List
pid: int
def __str__(self):
return f'getpid(), pid: {self.pid}'
@dataclass
class BscSetuid:
ktraces: List
uid: int
result: str
def __str__(self):
rep = f'setuid({self.uid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetuid:
ktraces: List
uid: int
def __str__(self):
return f'getuid(), uid: {self.uid}'
@dataclass
class BscGeteuid:
ktraces: List
uid: int
def __str__(self):
return f'geteuid(), uid: {self.uid}'
@dataclass
class BscWait4:
ktraces: List
pid: int
status: int
options: int
rusage: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'wait4{no_cancel}({self.pid}, {hex(self.status)}, {self.options}, {hex(self.rusage)}), {self.result}'
@dataclass
class BscRecvmsg:
ktraces: List
socket: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'recvmsg{no_cancel}({self.socket}), {self.result}'
@dataclass
class BscSendmsg:
ktraces: List
socket: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'sendmsg{no_cancel}({self.socket}), {self.result}'
@dataclass
class BscRecvfrom:
ktraces: List
socket: int
buffer: int
length: int
flags: List
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return (f'''recvfrom{no_cancel}({self.socket}, {hex(self.buffer)}, {self.length}, '''
f'''{' | '.join(map(lambda f: f.name, self.flags)) if self.flags else '0'}), {self.result}''')
@dataclass
class BscAccept:
ktraces: List
socket: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'accept{no_cancel}({self.socket}), {self.result}'
@dataclass
class BscGetpeername:
ktraces: List
socket: int
address: int
address_len: int
result: str
def __str__(self):
rep = f'getpeername({self.socket}, {hex(self.address)}, {hex(self.address_len)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetsockname:
ktraces: List
socket: int
address: int
address_len: int
result: str
def __str__(self):
rep = f'getsockname({self.socket}, {hex(self.address)}, {hex(self.address_len)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscAccess:
ktraces: List
path: str
amode: List
result: str
def __str__(self):
rep = f'''access("{self.path}", {' | '.join(map(lambda f: f.name, self.amode))})'''
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscChflags:
ktraces: List
path: str
flags: List
result: str
def __str__(self):
rep = f'''chflags("{self.path}", {' | '.join(map(lambda f: f.name, self.flags))})'''
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFchflags:
ktraces: List
fd: int
flags: List
result: str
def __str__(self):
rep = f'''fchflags({self.fd}, {' | '.join(map(lambda f: f.name, self.flags))})'''
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSync:
ktraces: List
def __str__(self):
return 'sync()'
@dataclass
class BscKill:
ktraces: List
pid: int
sig: int
result: str
def __str__(self):
rep = f'kill({self.pid}, {self.sig})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetppid:
ktraces: List
pid: int
def __str__(self):
return f'getppid(), pid: {self.pid}'
@dataclass
class BscSysDup:
ktraces: List
fildes: int
result: str
def __str__(self):
return f'dup({self.fildes}), {self.result}'
@dataclass
class BscPipe:
ktraces: List
result: str
def __str__(self):
return f'pipe(), {self.result}'
@dataclass
class BscGetegid:
ktraces: List
gid: int
def __str__(self):
return f'getegid(), gid: {self.gid}'
@dataclass
class BscSigaction:
ktraces: List
sig: Signals
act: int
oact: int
result: str
def __str__(self):
rep = f'sigaction({self.sig.name}, {hex(self.act)}, {hex(self.oact)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetgid:
ktraces: List
gid: int
def __str__(self):
return f'getgid(), gid: {self.gid}'
@dataclass
class BscSigprocmap:
ktraces: List
how: SigprocmaskFlags
set: int
oset: int
result: str
def __str__(self):
rep = f'sigprocmask({self.how.name}, {hex(self.set)}, {hex(self.oset)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetlogin:
ktraces: List
address: int
def __str__(self):
return f'getlogin(), address: {hex(self.address)}'
@dataclass
class BscSetlogin:
ktraces: List
address: int
result: str
def __str__(self):
rep = f'setlogin({hex(self.address)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscAcct:
ktraces: List
file: str
result: str
def __str__(self):
rep = f'acct("{self.file}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSigpending:
ktraces: List
set: int
result: str
def __str__(self):
rep = f'sigpending({hex(self.set)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSigaltstack:
ktraces: List
ss_address: int
oss_address: int
result: str
def __str__(self):
rep = f'sigaltstack({hex(self.ss_address)}, {hex(self.oss_address)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscIoctl:
ktraces: List
fildes: int
request: int
arg: int
result: str
def __str__(self):
params = IOC_REQUEST_PARAMS[self.request & 0xf0000000]
group = chr((self.request >> 8) & 0xff)
number = self.request & 0xff
length = (self.request >> 16) & 0x1fff
ioc = f'''_IOC({params}, '{group}', {number}, {length})'''
rep = f'ioctl({self.fildes}, {hex(self.request)} /* {ioc} */, {hex(self.arg)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscReboot:
ktraces: List
howto: int
result: str
def __str__(self):
rep = f'reboot({self.howto})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscRevoke:
ktraces: List
path: str
result: str
def __str__(self):
rep = f'revoke("{self.path}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSymlink:
ktraces: List
vnode1: int
path2: str
result: str
def __str__(self):
rep = f'symlink({self.vnode1}, "{self.path2}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscReadlink:
ktraces: List
path: str
buf: int
bufsize: int
result: str
def __str__(self):
return f'readlink("{self.path}", {hex(self.buf)}, {self.bufsize}), {self.result}'
@dataclass
class BscExecve:
ktraces: List
def __str__(self):
return 'execve()'
@dataclass
class BscUmask:
ktraces: List
cmask: int
prev_mask: int
def __str__(self):
return f'umask({self.cmask}), previous mask: {self.prev_mask}'
@dataclass
class BscChroot:
ktraces: List
dirname: str
result: str
def __str__(self):
rep = f'chroot("{self.dirname}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMsync:
ktraces: List
addr: int
len_: int
flags: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
rep = f'msync{no_cancel}({hex(self.addr)}, {self.len_}, {self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscVfork:
ktraces: List
def __str__(self):
return 'vfork()'
@dataclass
class BscMunmap:
ktraces: List
addr: int
len_: int
result: str
def __str__(self):
rep = f'munmap({hex(self.addr)}, {self.len_})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMprotect:
ktraces: List
addr: int
len_: int
prot: int
result: str
def __str__(self):
rep = f'mprotect({hex(self.addr)}, {self.len_}, {self.prot})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMadvise:
ktraces: List
addr: int
len_: int
advice: int
result: str
def __str__(self):
rep = f'madvise({hex(self.addr)}, {self.len_}, {self.advice})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMincore:
ktraces: List
addr: int
len_: int
vec: int
result: str
def __str__(self):
rep = f'mincore({hex(self.addr)}, {self.len_}, {hex(self.vec)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetgroups:
ktraces: List
gidsetsize: int
grouplist: int
result: str
def __str__(self):
return f'getgroups({self.gidsetsize}, {hex(self.grouplist)}), {self.result}'
@dataclass
class BscSetgroups:
ktraces: List
ngroups: int
gidset: int
result: str
def __str__(self):
rep = f'setgroups({self.ngroups}, {hex(self.gidset)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetpgrp:
ktraces: List
pgid: int
def __str__(self):
return f'getpgrp(), pgid: {self.pgid}'
@dataclass
class BscSetpgid:
ktraces: List
pid: int
pgid: int
result: str
def __str__(self):
rep = f'setpgid({self.pid}, {self.pgid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetitimer:
ktraces: List
which: int
value: int
ovalue: int
result: str
def __str__(self):
rep = f'setitimer({self.which}, {hex(self.value)}, {hex(self.ovalue)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSwapon:
ktraces: List
path: int
swapflags: int
result: str
def __str__(self):
rep = f'swapon({self.path}, {self.swapflags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetitimer:
ktraces: List
which: int
value: int
result: str
def __str__(self):
rep = f'getitimer({self.which}, {hex(self.value)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSysGetdtablesize:
ktraces: List
table_size: int
def __str__(self):
return f'getdtablesize(), size: {self.table_size}'
@dataclass
class BscSysDup2:
ktraces: List
fildes: int
fildes2: int
result: str
def __str__(self):
rep = f'dup2({self.fildes}, {self.fildes2})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSysFcntl:
ktraces: List
fildes: int
cmd: FcntlCmd
buf: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'fcntl{no_cancel}({self.fildes}, {self.cmd.name}, {hex(self.buf)}), {self.result}'
@dataclass
class BscSelect:
ktraces: List
nfds: int
readfds: int
writefds: int
errorfds: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return (f'select{no_cancel}({self.nfds}, {hex(self.readfds)}, {hex(self.writefds)}, {hex(self.errorfds)}),'
f' {self.result}')
@dataclass
class BscFsync:
ktraces: List
fildes: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
rep = f'fsync{no_cancel}({self.fildes})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetpriority:
ktraces: List
which: PriorityWhich
who: int
prio: int
result: str
def __str__(self):
rep = f'setpriority({self.which.name}, {self.who}, {self.prio})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSocket:
ktraces: List
domain: socket.AddressFamily
type: socket.SocketKind
protocol: int
result: str
def __str__(self):
return f'socket({self.domain.name}, {self.type.name}, {self.protocol}), {self.result}'
@dataclass
class BscConnect:
ktraces: List
socket: int
address: int
address_len: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
rep = f'connect{no_cancel}({self.socket}, {hex(self.address)}, {self.address_len})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetpriority:
ktraces: List
which: PriorityWhich
who: int
result: str
def __str__(self):
return f'getpriority({self.which.name}, {self.who}), {self.result}'
@dataclass
class BscBind:
ktraces: List
socket: int
address: int
address_len: int
result: str
def __str__(self):
rep = f'bind({self.socket}, {hex(self.address)}, {self.address_len})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetsockopt:
ktraces: List
socket: int
level: int
option_name: int
option_value: int
result: str
def __str__(self):
level, option = sockopt_format_level_and_option(self.level, self.option_name)
rep = f'setsockopt({self.socket}, {level}, {option}, {hex(self.option_value)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscListen:
ktraces: List
socket: int
backlog: int
result: str
def __str__(self):
rep = f'listen({self.socket}, {self.backlog})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSigsuspend:
ktraces: List
sigmask: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
rep = f'sigsuspend{no_cancel}({hex(self.sigmask)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGettimeofday:
ktraces: List
tv: int
tz: int
result: str
def __str__(self):
rep = f'gettimeofday({hex(self.tv)}, {hex(self.tz)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetrusage:
ktraces: List
who: RusageWho
r_usage: int
result: str
def __str__(self):
rep = f'getrusage({self.who.name}, {self.r_usage})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetsockopt:
ktraces: List
socket: int
level: int
option_name: int
option_value: int
result: str
def __str__(self):
level, option = sockopt_format_level_and_option(self.level, self.option_name)
rep = f'getsockopt({self.socket}, {level}, {option}, {hex(self.option_value)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscReadv:
ktraces: List
d: int
iov: int
iovcnt: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'readv{no_cancel}({self.d}, {hex(self.iov)}, {self.iovcnt}), {self.result}'
@dataclass
class BscWritev:
ktraces: List
fildes: int
iov: int
iovcnt: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'writev{no_cancel}({self.fildes}, {hex(self.iov)}, {self.iovcnt}), {self.result}'
@dataclass
class BscSettimeofday:
ktraces: List
tp: int
tzp: int
result: str
def __str__(self):
rep = f'settimeofday({hex(self.tp)}, {hex(self.tzp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFchown:
ktraces: List
fildes: int
owner: int
group: int
result: str
def __str__(self):
rep = f'fchown({self.fildes}, {self.owner}, {self.group})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFchmod:
ktraces: List
fildes: str
mode: List
result: str
def __str__(self):
rep = f'''fchmod({self.fildes}, {' | '.join(map(lambda f: f.name, self.mode))})'''
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetreuid:
ktraces: List
ruid: int
euid: int
result: str
def __str__(self):
rep = f'setreuid({self.ruid}, {self.euid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetregid:
ktraces: List
rgid: int
egid: int
result: str
def __str__(self):
rep = f'setregid({self.rgid}, {self.egid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscRename:
ktraces: List
old: str
new: str
result: str
def __str__(self):
rep = f'rename("{self.old}", "{self.new}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSysFlock:
ktraces: List
fd: int
operation: List
result: str
def __str__(self):
rep = f'''flock({self.fd}, {' | '.join(map(lambda o: o.name, self.operation))})'''
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMkfifo:
ktraces: List
path: str
mode: List
result: str
def __str__(self):
rep = f'''mkfifo("{self.path}", {' | '.join(map(lambda f: f.name, self.mode))})'''
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSendto:
ktraces: List
socket: int
buffer: int
length: int
flags: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'sendto{no_cancel}({self.socket}, {hex(self.buffer)}, {self.length}, {self.flags}), {self.result}'
@dataclass
class BscShutdown:
ktraces: List
socket: int
how: int
result: str
def __str__(self):
rep = f'shutdown({self.socket}, {self.how})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSocketpair:
ktraces: List
domain: socket.AddressFamily
type: socket.SocketKind
protocol: int
socket_vector: int
result: str
def __str__(self):
rep = f'socketpair({self.domain.name}, {self.type.name}, {self.protocol}, {hex(self.socket_vector)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMkdir:
ktraces: List
path: str
mode: List
result: str
def __str__(self):
rep = f'''mkdir("{self.path}", {' | '.join(map(lambda f: f.name, self.mode))})'''
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscRmdir:
ktraces: List
path: str
result: str
def __str__(self):
rep = f'rmdir("{self.path}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscUtimes:
ktraces: List
path: str
times: int
result: str
def __str__(self):
rep = f'utimes("{self.path}", {hex(self.times)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFutimes:
ktraces: List
fildes: int
times: int
result: str
def __str__(self):
rep = f'futimes({self.fildes}, {hex(self.times)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscAdjtime:
ktraces: List
delta: int
olddelta: int
result: str
def __str__(self):
rep = f'adjtime({hex(self.delta)}, {hex(self.olddelta)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGethostuuid:
ktraces: List
uuid: int
timeout: int
result: str
def __str__(self):
rep = f'gethostuuid({hex(self.uuid)}, {hex(self.timeout)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscObsKillpg:
ktraces: List
pgrp: int
sig: int
result: str
def __str__(self):
rep = f'killpg({self.pgrp}, {self.sig})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetsid:
ktraces: List
result: str
def __str__(self):
return f'setsid(), {self.result}'
@dataclass
class BscGetpgid:
ktraces: List
pid: int
result: str
def __str__(self):
return f'getpgid({self.pid}), {self.result}'
@dataclass
class BscSetprivexec:
ktraces: List
flag: int
result: str
def __str__(self):
return f'setprivexec({self.flag}), {self.result}'
@dataclass
class BscNfssvc:
ktraces: List
flags: int
argstructp: int
result: str
def __str__(self):
rep = f'nfssvc({self.flags}, {hex(self.argstructp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscStatfs:
ktraces: List
path: str
buf: int
result: str
def __str__(self):
rep = f'statfs("{self.path}", {hex(self.buf)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFstatfs:
ktraces: List
fd: int
buf: int
result: str
def __str__(self):
rep = f'fstatfs({self.fd}, {hex(self.buf)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscUnmount:
ktraces: List
dir: str
flags: int
result: str
def __str__(self):
rep = f'unmount("{self.dir}", {self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetfh:
ktraces: List
path: str
fhp: int
result: str
def __str__(self):
rep = f'getfh("{self.path}", {hex(self.fhp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscQuotactl:
ktraces: List
path: str
cmd: int
id: int
addr: int
result: str
def __str__(self):
rep = f'quotactl("{self.path}", {self.cmd}, {self.id}, {hex(self.addr)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMount:
ktraces: List
source: str
dest: str
flags: int
data: int
result: str
def __str__(self):
rep = f'mount("{self.source}", "{self.dest}", {self.flags}, {hex(self.data)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscCsops:
ktraces: List
pid: int
ops: CsopsOps
useraddr: int
usersize: int
result: str
def __str__(self):
rep = f'csops({self.pid}, {self.ops.name}, {hex(self.useraddr)}, {self.usersize})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscCsopsAudittoken:
ktraces: List
pid: int
ops: CsopsOps
useraddr: int
usersize: int
result: str
def __str__(self):
rep = f'csops_audittoken({self.pid}, {self.ops.name}, {hex(self.useraddr)}, {self.usersize})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscWaitid:
ktraces: List
idtype: int
id: int
infop: int
options: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
rep = f'waitid{no_cancel}({self.idtype}, {self.id}, {hex(self.infop)}, {self.options})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscKdebugTypefilter:
ktraces: List
addr: int
size: int
result: str
def __str__(self):
rep = f'kdebug_typefilter({hex(self.addr)}, {hex(self.size)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetgid:
ktraces: List
gid: int
result: str
def __str__(self):
rep = f'setgid({self.gid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetegid:
ktraces: List
egid: int
result: str
def __str__(self):
rep = f'setegid({self.egid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSeteuid:
ktraces: List
euid: int
result: str
def __str__(self):
rep = f'seteuid({self.euid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscThreadSelfcounts:
ktraces: List
type: int
buf: int
nbytes: int
result: str
def __str__(self):
rep = f'thread_selfcounts({self.type}, {hex(self.buf)}, {self.nbytes})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFdatasync:
ktraces: List
fd: int
result: str
def __str__(self):
rep = f'fdatasync({self.fd})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPathconf:
ktraces: List
path: str
name: int
result: str
def __str__(self):
return f'pathconf("{self.path}", {self.name}), {self.result}'
@dataclass
class BscSysFpathconf:
ktraces: List
fildes: int
name: int
result: str
def __str__(self):
return f'fpathconf({self.fildes}, {self.name}), {self.result}'
@dataclass
class BscGetrlimit:
ktraces: List
resource: int
rlp: int
result: str
def __str__(self):
rep = f'getrlimit({self.resource}, {hex(self.rlp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetrlimit:
ktraces: List
resource: int
rlp: int
result: str
def __str__(self):
rep = f'setrlimit({self.resource}, {hex(self.rlp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetdirentries:
ktraces: List
fd: int
buf: int
nbytes: int
basep: int
result: str
def __str__(self):
return f'getdirentries({self.fd}, {hex(self.buf)}, {self.nbytes}, {hex(self.basep)}), {self.result}'
@dataclass
class BscMmap:
ktraces: List
addr: int
len: int
prot: int
flags: int
result: str
def __str__(self):
return f'mmap({hex(self.addr)}, {self.len}, {self.prot}, {self.flags}), {self.result}'
@dataclass
class BscLseek:
ktraces: List
fildes: int
offset: int
whence: int
result: str
def __str__(self):
return f'lseek({self.fildes}, {self.offset}, {self.whence}), {self.result}'
@dataclass
class BscTruncate:
ktraces: List
path: str
length: int
result: str
def __str__(self):
rep = f'truncate("{self.path}", {self.length})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFtruncate:
ktraces: List
fildes: int
length: int
result: str
def __str__(self):
rep = f'ftruncate({self.fildes}, {self.length})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSysctl:
ktraces: List
name: int
namelen: int
oldp: int
oldlenp: int
result: str
def __str__(self):
rep = f'sysctl({hex(self.name)}, {self.namelen}, {hex(self.oldp)}, {hex(self.oldlenp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMlock:
ktraces: List
addr: int
len: int
result: str
def __str__(self):
rep = f'mlock({hex(self.addr)}, {self.len})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMunlock:
ktraces: List
addr: int
len: int
result: str
def __str__(self):
rep = f'munlock({hex(self.addr)}, {self.len})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscUndelete:
ktraces: List
path: str
result: str
def __str__(self):
rep = f'undelete("{self.path}")'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscOpenDprotectedNp:
ktraces: List
path: str
flags: List
class_: str
dpflags: str
result: str
def __str__(self):
flags = ' | '.join(map(lambda f: f.name, self.flags))
return f'open_dprotected_np("{self.path}", {flags}, {self.class_}, {self.dpflags}), {self.result}'
@dataclass
class BscGetattrlist:
ktraces: List
path: str
attr_list: int
attr_buf: int
attr_buf_size: int
result: str
def __str__(self):
rep = f'getattrlist("{self.path}", {hex(self.attr_list)}, {hex(self.attr_buf)}, {self.attr_buf_size})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSetattrlist:
ktraces: List
path: str
attr_list: int
attr_buf: int
attr_buf_size: int
result: str
def __str__(self):
rep = f'setattrlist("{self.path}", {hex(self.attr_list)}, {hex(self.attr_buf)}, {self.attr_buf_size})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetdirentriesattr:
ktraces: List
fd: str
attr_list: int
attr_buf: int
attr_buf_size: int
result: str
def __str__(self):
return (f'getdirentriesattr({self.fd}, {hex(self.attr_list)}, {hex(self.attr_buf)}, {self.attr_buf_size})'
f', {self.result}')
@dataclass
class BscExchangedata:
ktraces: List
path1: str
path2: str
options: int
result: str
def __str__(self):
rep = f'exchangedata("{self.path1}", "{self.path2}", {self.options})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSearchfs:
ktraces: List
path: str
search_block: int
num_matches: int
script_code: int
result: str
def __str__(self):
rep = f'searchfs("{self.path}", {hex(self.search_block)}, {hex(self.num_matches)}, {self.script_code})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFgetattrlist:
ktraces: List
fd: int
attr_list: int
attr_buf: int
attr_buf_size: int
result: str
def __str__(self):
rep = f'fgetattrlist({self.fd}, {hex(self.attr_list)}, {hex(self.attr_buf)}, {self.attr_buf_size})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFsetattrlist:
ktraces: List
fd: int
attr_list: int
attr_buf: int
attr_buf_size: int
result: str
def __str__(self):
rep = f'fsetattrlist({self.fd}, {hex(self.attr_list)}, {hex(self.attr_buf)}, {self.attr_buf_size})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPoll:
ktraces: List
fds: int
nfds: int
timeout: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'poll{no_cancel}({hex(self.fds)}, {self.nfds}, {self.timeout}), {self.result}'
@dataclass
class BscGetxattr:
ktraces: List
path: str
name: int
value: int
size: int
result: str
def __str__(self):
return f'getxattr("{self.path}", {hex(self.name)}, {hex(self.value)}, {self.size}), {self.result}'
@dataclass
class BscFgetxattr:
ktraces: List
fd: int
name: int
value: int
size: int
result: str
def __str__(self):
return f'fgetxattr({self.fd}, {hex(self.name)}, {hex(self.value)}, {self.size}), {self.result}'
@dataclass
class BscSetxattr:
ktraces: List
path: str
name: int
value: int
size: int
result: str
def __str__(self):
rep = f'setxattr("{self.path}", {hex(self.name)}, {hex(self.value)}, {self.size})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFsetxattr:
ktraces: List
fd: int
name: int
value: int
size: int
result: str
def __str__(self):
rep = f'fsetxattr({self.fd}, {hex(self.name)}, {hex(self.value)}, {self.size})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscRemovexattr:
ktraces: List
path: str
name: int
options: int
result: str
def __str__(self):
rep = f'removexattr("{self.path}", {hex(self.name)}, {self.options})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFremovexattr:
ktraces: List
fd: int
name: int
options: int
result: str
def __str__(self):
rep = f'fremovexattr({self.fd}, {hex(self.name)}, {self.options})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscListxattr:
ktraces: List
path: str
namebuf: int
size: int
options: int
result: str
def __str__(self):
return f'listxattr("{self.path}", {hex(self.namebuf)}, {self.size}, {self.options}), {self.result}'
@dataclass
class BscFlistxattr:
ktraces: List
fd: int
namebuf: int
size: int
options: int
result: str
def __str__(self):
return f'flistxattr({self.fd}, {hex(self.namebuf)}, {self.size}, {self.options}), {self.result}'
@dataclass
class BscFsctl:
ktraces: List
path: str
request: int
data: int
options: int
result: str
def __str__(self):
rep = f'fsctl("{self.path}", {self.request}, {hex(self.data)}, {self.options})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscInitgroups:
ktraces: List
name: int
basegid: int
result: str
def __str__(self):
rep = f'initgroups({hex(self.name)}, {self.basegid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPosixSpawn:
ktraces: List
pid: int
path: str
file_actions: int
attrp: int
stdin: str
stdout: str
stderr: str
result: str
def __str__(self):
rep = f'posix_spawn({hex(self.pid)}, "{self.path}", {hex(self.file_actions)}, {hex(self.attrp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFfsctl:
ktraces: List
fd: int
request: int
data: int
options: int
result: str
def __str__(self):
rep = f'ffsctl({self.fd}, {self.request}, {hex(self.data)}, {self.options})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscNfsclnt:
ktraces: List
flags: int
argstructp: int
result: str
def __str__(self):
rep = f'nfsclnt({self.flags}, {hex(self.argstructp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscFhopen:
ktraces: List
fhp: int
flags: int
result: str
def __str__(self):
rep = f'fhopen({hex(self.fhp)}, {self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMinherit:
ktraces: List
addr: int
len: int
inherit: int
result: str
def __str__(self):
rep = f'minherit({hex(self.addr)}, {self.len}, {self.inherit})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSemsys:
ktraces: List
which: int
a2: int
a3: int
a4: int
result: str
def __str__(self):
rep = f'semsys({self.which}, {self.a2}, {self.a3}, {self.a4})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMsgsys:
ktraces: List
which: int
a2: int
a3: int
a4: int
result: str
def __str__(self):
rep = f'msgsys({self.which}, {self.a2}, {self.a3}, {self.a4})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscShmsys:
ktraces: List
which: int
a2: int
a3: int
a4: int
result: str
def __str__(self):
rep = f'shmsys({self.which}, {self.a2}, {self.a3}, {self.a4})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSemctl:
ktraces: List
semid: int
semnum: int
cmd: int
semun: int
result: str
def __str__(self):
return f'semctl({self.semid}, {self.semnum}, {self.cmd}, {hex(self.semun)}), {self.result}'
@dataclass
class BscSemget:
ktraces: List
key: int
nsems: int
semflg: int
result: str
def __str__(self):
return f'semget({self.key}, {self.nsems}, {self.semflg}), {self.result}'
@dataclass
class BscSemop:
ktraces: List
semid: int
sops: int
nsops: int
result: str
def __str__(self):
rep = f'semop({self.semid}, {hex(self.sops)}, {self.nsops})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMsgctl:
ktraces: List
msqid: int
cmd: int
ds: int
result: str
def __str__(self):
return f'msgctl({self.msqid}, {self.cmd}, {self.ds}), {self.result}'
@dataclass
class BscMsgget:
ktraces: List
key: int
msgflg: int
result: str
def __str__(self):
return f'msgget({self.key}, {self.msgflg}), {self.result}'
@dataclass
class BscMsgsnd:
ktraces: List
msqid: int
msgp: int
msgsz: int
msgflg: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'msgsnd{no_cancel}({self.msqid}, {hex(self.msgp)}, {self.msgsz}, {self.msgflg}), {self.result}'
@dataclass
class BscMsgrcv:
ktraces: List
msqid: int
msgp: int
msgsz: int
msgtyp: int
result: str
no_cancel: bool
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
return f'msgrcv{no_cancel}({self.msqid}, {hex(self.msgp)}, {self.msgsz}, {self.msgtyp}), {self.result}'
@dataclass
class BscShmat:
ktraces: List
shmid: int
shmaddr: int
shmflg: int
result: str
def __str__(self):
return f'shmat({self.shmid}, {hex(self.shmaddr)}, {self.shmflg}), {self.result}'
@dataclass
class BscShmctl:
ktraces: List
shmid: int
cmd: int
buf: int
result: str
def __str__(self):
rep = f'shmctl({self.shmid}, {self.cmd}, {hex(self.buf)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscShmdt:
ktraces: List
shmaddr: int
result: str
def __str__(self):
rep = f'shmdt({hex(self.shmaddr)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscShmget:
ktraces: List
key: int
size: int
shmflg: int
result: str
def __str__(self):
return f'shmget({self.key}, {self.size}, {self.shmflg}), {self.result}'
@dataclass
class BscShmOpen:
ktraces: List
name: int
oflag: List
mode: List
result: str
def __str__(self):
oflags = ' | '.join(map(lambda f: f.name, self.oflag))
mode = (', ' + ' | '.join(map(lambda f: f.name, self.mode))) if BscOpenFlags.O_CREAT in self.oflag else ''
return f'shm_open({hex(self.name)}, {oflags}{mode}), {self.result}'
@dataclass
class BscShmUnlink:
ktraces: List
name: int
result: str
def __str__(self):
rep = f'shm_unlink({hex(self.name)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSemOpen:
ktraces: List
name: int
oflag: List
mode: List
result: str
def __str__(self):
oflags = ' | '.join(map(lambda f: f.name, self.oflag))
mode = (', ' + ' | '.join(map(lambda f: f.name, self.mode))) if BscOpenFlags.O_CREAT in self.oflag else ''
return f'sem_open({hex(self.name)}, {oflags}{mode}), {self.result}'
@dataclass
class BscSemClose:
ktraces: List
sem: int
result: str
def __str__(self):
rep = f'sem_close({self.sem})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSemUnlink:
ktraces: List
name: int
result: str
def __str__(self):
rep = f'sem_unlink({hex(self.name)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSemWait:
ktraces: List
sem: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
rep = f'sem_wait{no_cancel}({hex(self.sem)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSemTrywait:
ktraces: List
sem: int
result: str
def __str__(self):
rep = f'sem_trywait({hex(self.sem)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSemPost:
ktraces: List
sem: int
result: str
def __str__(self):
rep = f'sem_post({hex(self.sem)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSysctlbyname:
ktraces: List
name: int
oldp: int
oldlenp: int
newp: int
result: str
def __str__(self):
rep = f'sysctlbyname({hex(self.name)}, {hex(self.oldp)}, {hex(self.oldlenp)}, {hex(self.newp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscAccessExtended:
ktraces: List
entries: int
size: int
results: int
uid: int
result: str
def __str__(self):
rep = f'access_extended({hex(self.entries)}, {self.size}, {hex(self.results)}, {self.uid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGettid:
ktraces: List
uidp: int
gidp: int
result: str
def __str__(self):
rep = f'gettid({hex(self.uidp)}, {hex(self.gidp)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSharedRegionCheckNp:
ktraces: List
startaddress: int
result: str
def __str__(self):
rep = f'shared_region_check_np({hex(self.startaddress)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPsynchMutexwait:
ktraces: List
mutex: int
mgen: int
ugen: int
tid: int
result: str
def __str__(self):
rep = f'psynch_mutexwait({hex(self.mutex)}, {self.mgen}, {self.ugen}, {self.tid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPsynchMutexdrop:
ktraces: List
mutex: int
mgen: int
ugen: int
tid: int
result: str
def __str__(self):
rep = f'psynch_mutexdrop({hex(self.mutex)}, {self.mgen}, {self.ugen}, {self.tid})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPsynchCvbroad:
ktraces: List
cv: int
cvlsgen: int
cvudgen: int
flags: int
result: str
def __str__(self):
rep = f'psynch_cvbroad({hex(self.cv)}, {self.cvlsgen}, {self.cvudgen}, {self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPsynchCvsignal:
ktraces: List
cv: int
cvlsgen: int
cvugen: int
thread_port: int
result: str
def __str__(self):
rep = f'psynch_cvsignal({hex(self.cv)}, {self.cvlsgen}, {self.cvugen}, {self.thread_port})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscPsynchCvwait:
ktraces: List
cv: int
cvlsgen: int
cvugen: int
mutex: int
result: str
def __str__(self):
rep = f'psynch_cvwait({hex(self.cv)}, {self.cvlsgen}, {self.cvugen}, {hex(self.mutex)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscGetsid:
ktraces: List
pid: int
result: str
def __str__(self):
return f'getsid({self.pid}), {self.result}'
@dataclass
class BscPsynchCvclrprepost:
ktraces: List
cv: int
cvgen: int
cvugen: int
cvsgen: int
result: str
def __str__(self):
rep = f'psynch_cvclrprepost({hex(self.cv)}, {self.cvgen}, {self.cvugen}, {self.cvsgen})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscIopolicysys:
ktraces: List
cmd: int
arg: int
result: str
def __str__(self):
return f'iopolicysys({self.cmd}, {hex(self.arg)}), {self.result}'
@dataclass
class BscProcessPolicy:
ktraces: List
scope: int
action: int
policy: int
policy_subtype: int
result: str
def __str__(self):
rep = f'process_policy({self.scope}, {self.action}, {self.policy}, {self.policy_subtype})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMlockall:
ktraces: List
flags: int
result: str
def __str__(self):
rep = f'mlockall({self.flags})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscMunlockall:
ktraces: List
result: str
def __str__(self):
rep = 'munlockall()'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscIssetugid:
ktraces: List
result: str
def __str__(self):
return f'issetugid(), {self.result}'
@dataclass
class BscPthreadSigmask:
ktraces: List
how: int
set: int
oset: int
result: str
def __str__(self):
rep = f'pthread_sigmask({self.how}, {hex(self.set)}, {hex(self.oset)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscDisableThreadsignal:
ktraces: List
value: int
result: str
def __str__(self):
rep = f'disable_threadsignal({self.value})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSemwaitSignal:
ktraces: List
cond_sem: int
mutex_sem: int
timeout: int
relative: int
result: str
no_cancel: bool = False
def __str__(self):
no_cancel = '_nocancel' if self.no_cancel else ''
rep = f'semwait_signal{no_cancel}({self.cond_sem}, {self.mutex_sem}, {self.timeout}, {self.relative})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscProcInfo:
ktraces: List
callnum: ProcInfoCall
pid: int
flags: int
ext_id: int
result: str
def __str__(self):
rep = f'proc_info({self.callnum.name}, {self.pid}, {self.flags}, {self.ext_id})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscSendfile:
ktraces: List
fd: int
s: int
offset: int
len: int
result: str
def __str__(self):
rep = f'sendfile({self.fd}, {self.s}, {self.offset}, {hex(self.len)})'
if self.result:
rep += f', {self.result}'
return rep
@dataclass
class BscStat64:
ktraces: List
path: str
buf: int
result: str
def __str__(self):
rep = f'stat64("{self.path}", {hex(self.buf)})'
if self.result:
rep += f', {self.result}'
return rep
def handle_read(parser, events, no_cancel=False):
result = serialize_result(events[-1], 'count')
args = events[0].values
return BscRead(events, args[0], args[1], args[2], result, no_cancel)
def handle_write(parser, events, no_cancel=False):
result = serialize_result(events[-1], 'count')
args = events[0].values
return BscWrite(events, args[0], args[1], args[2], result, no_cancel)
def handle_open(parser, events, no_cancel=False):
vnode = parser.parse_vnode(events)
call_flags = serialize_open_flags(events[0].values[1])
return BscOpen(events, vnode.path, call_flags, serialize_result(events[-1], 'fd'), no_cancel)
def handle_sys_close(parser, events, no_cancel=False):
return BscSysClose(events, events[0].values[0], serialize_result(events[-1]), no_cancel)
def handle_link(parser, events):
old_vnode = parser.parse_vnode(events)
new_vnode = parser.parse_vnode([e for e in events if e not in old_vnode.ktraces])
return BscLink(events, old_vnode.path, new_vnode.path, serialize_result(events[-1]))
def handle_unlink(parser, events):
vnode = parser.parse_vnode(events)
return BscUnlink(events, vnode.path, serialize_result(events[-1]))
def handle_chdir(parser, events):
vnode = parser.parse_vnode(events)
return BscChdir(events, vnode.path, serialize_result(events[-1]))
def handle_fchdir(parser, events):
return BscFchdir(events, events[0].values[0], serialize_result(events[-1]))
def handle_mknod(parser, events):
vnode = parser.parse_vnode(events)
return BscMknod(events, vnode.path, events[0].values[1], events[0].values[2], serialize_result(events[-1]))
def handle_chmod(parser, events):
vnode = parser.parse_vnode(events)
return BscChmod(events, vnode.path, serialize_stat_flags(events[0].values[1]), serialize_result(events[-1]))
def handle_chown(parser, events):
vnode = parser.parse_vnode(events)
return BscChown(events, vnode.path, events[0].values[1], events[0].values[2], serialize_result(events[-1]))
def handle_getpid(parser, events):
return BscGetpid(events, events[-1].values[1])
def handle_setuid(parser, events):
return BscSetuid(events, events[0].values[0], serialize_result(events[-1]))
def handle_getuid(parser, events):
return BscGetuid(events, events[-1].values[1])
def handle_geteuid(parser, events):
return BscGeteuid(events, events[-1].values[1])
def handle_wait4(parser, events, no_cancel=False):
args = events[0].values
return BscWait4(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'pid'), no_cancel)
def handle_recvmsg(parser, events, no_cancel=False):
return BscRecvmsg(events, events[0].values[0], serialize_result(events[-1], 'count'), no_cancel)
def handle_sendmsg(parser, events, no_cancel=False):
return BscSendmsg(events, events[0].values[0], serialize_result(events[-1], 'count'), no_cancel)
def handle_recvfrom(parser, events, no_cancel=False):
args = events[0].values
flags = [flag for flag in SocketMsgFlags if flag.value & args[3]]
return BscRecvfrom(events, args[0], args[1], args[2], flags, serialize_result(events[-1], 'count'), no_cancel)
def handle_accept(parser, events, no_cancel=False):
return BscAccept(events, events[0].values[0], serialize_result(events[-1], 'fd'), no_cancel)
def handle_getpeername(parser, events):
args = events[0].values
return BscGetpeername(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_getsockname(parser, events):
args = events[0].values
return BscGetsockname(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_access(parser, events):
vnode = parser.parse_vnode(events)
amode = serialize_access_flags(events[0].values[1])
return BscAccess(events, vnode.path, amode, serialize_result(events[-1]))
def handle_chflags(parser, events):
vnode = parser.parse_vnode(events)
flags = [flag for flag in BscChangeableFlags if flag.value & events[0].values[1]]
return BscChflags(events, vnode.path, flags, serialize_result(events[-1]))
def handle_fchflags(parser, events):
flags = [flag for flag in BscChangeableFlags if flag.value & events[0].values[1]]
return BscFchflags(events, events[0].values[0], flags, serialize_result(events[-1]))
def handle_sync(parser, events):
return BscSync(events)
def handle_kill(parser, events):
return BscKill(events, events[0].values[0], events[0].values[1], serialize_result(events[-1]))
def handle_getppid(parser, events):
return BscGetppid(events, events[-1].values[1])
def handle_sys_dup(parser, events):
return BscSysDup(events, events[0].values[0], serialize_result(events[-1], 'fd'))
def handle_pipe(parser, events):
error_code = events[-1].values[0]
if error_code:
if error_code in errno.errorcode:
result = f'errno: {errno.errorcode[error_code]}({error_code})'
else:
result = f'errno: {error_code}'
else:
result = f'read_fd: {events[-1].values[1]}, write_fd: {events[-1].values[2]}'
return BscPipe(events, result)
def handle_getegid(parser, events):
return BscGetegid(events, events[-1].values[1])
def handle_sigaction(parser, events):
args = events[0].values
return BscSigaction(events, Signals(args[0]), args[1], args[2], serialize_result(events[-1]))
def handle_getgid(parser, events):
return BscGetgid(events, events[-1].values[1])
def handle_sigprocmask(parser, events):
args = events[0].values
return BscSigprocmap(events, SigprocmaskFlags(args[0]), args[1], args[2], serialize_result(events[-1]))
def handle_getlogin(parser, events):
return BscGetlogin(events, events[0].values[0])
def handle_setlogin(parser, events):
return BscSetlogin(events, events[0].values[0], serialize_result(events[-1]))
def handle_acct(parser, events):
return BscAcct(events, parser.parse_vnode(events).path, serialize_result(events[-1]))
def handle_sigpending(parser, events):
return BscSigpending(events, events[0].values[0], serialize_result(events[-1]))
def handle_sigaltstack(parser, events):
return BscSigaltstack(events, events[0].values[0], events[0].values[1], serialize_result(events[-1]))
def handle_ioctl(parser, events):
args = events[0].values
return BscIoctl(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_reboot(parser, events):
return BscReboot(events, events[0].values[0], serialize_result(events[-1]))
def handle_revoke(parser, events):
return BscRevoke(events, parser.parse_vnode(events).path, serialize_result(events[-1]))
def handle_symlink(parser, events):
return BscSymlink(events, events[0].values[0], parser.parse_vnode(events).path, serialize_result(events[-1]))
def handle_readlink(parser, events):
args = events[0].values
return BscReadlink(events, parser.parse_vnode(events).path, args[1], args[2],
serialize_result(events[-1], 'count'))
def handle_execve(parser, events):
return BscExecve(events)
def handle_umask(parser, events):
return BscUmask(events, events[0].values[0], events[-1].values[1])
def handle_chroot(parser, events):
return BscChroot(events, parser.parse_vnode(events).path, serialize_result(events[-1]))
def handle_msync(parser, events, no_cancel=False):
args = events[0].values
return BscMsync(events, args[0], args[1], args[2], serialize_result(events[-1]), no_cancel)
def handle_vfork(parser, events):
return BscVfork(events)
def handle_munmap(parser, events):
args = events[0].values
return BscMunmap(events, args[0], args[1], serialize_result(events[-1]))
def handle_mprotect(parser, events):
args = events[0].values
return BscMprotect(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_madvise(parser, events):
args = events[0].values
return BscMadvise(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_mincore(parser, events):
args = events[0].values
return BscMincore(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_getgroups(parser, events):
args = events[0].values
return BscGetgroups(events, args[0], args[1], serialize_result(events[-1], 'count'))
def handle_setgroups(parser, events):
args = events[0].values
return BscSetgroups(events, args[0], args[1], serialize_result(events[-1]))
def handle_getpgrp(parser, events):
return BscGetpgrp(events, events[-1].values[1])
def handle_setpgid(parser, events):
return BscSetpgid(events, events[0].values[0], events[0].values[1], serialize_result(events[-1]))
def handle_setitimer(parser, events):
args = events[0].values
return BscSetitimer(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_swapon(parser, events):
args = events[0].values
return BscSwapon(events, args[0], args[1], serialize_result(events[-1]))
def handle_getitimer(parser, events):
args = events[0].values
return BscGetitimer(events, args[0], args[1], serialize_result(events[-1]))
def handle_sys_getdtablesize(parser, events):
return BscSysGetdtablesize(events, events[-1].values[1])
def handle_sys_dup2(parser, events):
args = events[0].values
return BscSysDup2(events, args[0], args[1], serialize_result(events[-1]))
def handle_sys_fcntl(parser, events, no_cancel=False):
args = events[0].values
return BscSysFcntl(events, args[0], FcntlCmd(args[1]), args[2], serialize_result(events[-1], 'return'),
no_cancel)
def handle_select(parser, events, no_cancel=False):
args = events[0].values
return BscSelect(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'), no_cancel)
def handle_fsync(parser, events, no_cancel=False):
return BscFsync(events, events[0].values[0], serialize_result(events[-1]), no_cancel)
def handle_setpriority(parser, events):
args = events[0].values
return BscSetpriority(events, PriorityWhich(args[0]), args[1], args[2], serialize_result(events[-1]))
def handle_socket(parser, events):
args = events[0].values
return BscSocket(events, socket.AddressFamily(args[0]), socket.SocketKind(args[1]), args[2],
serialize_result(events[-1], 'fd'))
def handle_connect(parser, events, no_cancel=False):
args = events[0].values
return BscConnect(events, args[0], args[1], args[2], serialize_result(events[-1]), no_cancel)
def handle_getpriority(parser, events):
args = events[0].values
return BscGetpriority(events, PriorityWhich(args[0]), args[1], serialize_result(events[-1], 'priority'))
def handle_bind(parser, events):
args = events[0].values
return BscBind(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_setsockopt(parser, events):
args = events[0].values
return BscSetsockopt(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_listen(parser, events):
args = events[0].values
return BscListen(events, args[0], args[1], serialize_result(events[-1]))
def handle_sigsuspend(parser, events, no_cancel=False):
return BscSigsuspend(events, events[0].values[0], serialize_result(events[-1]), no_cancel)
def handle_gettimeofday(parser, events):
args = events[0].values
return BscGettimeofday(events, args[0], args[1], serialize_result(events[-1]))
def handle_getrusage(parser, events):
args = events[0].values
return BscGetrusage(events, RusageWho(ctypes.c_int32(args[0]).value), args[1], serialize_result(events[-1]))
def handle_getsockopt(parser, events):
args = events[0].values
return BscGetsockopt(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_readv(parser, events, no_cancel=False):
args = events[0].values
return BscReadv(events, args[0], args[1], args[2], serialize_result(events[-1], 'count'), no_cancel)
def handle_writev(parser, events, no_cancel=False):
args = events[0].values
return BscWritev(events, args[0], args[1], args[2], serialize_result(events[-1], 'count'), no_cancel)
def handle_settimeofday(parser, events):
args = events[0].values
return BscSettimeofday(events, args[0], args[1], serialize_result(events[-1]))
def handle_fchown(parser, events):
args = events[0].values
return BscFchown(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_fchmod(parser, events):
args = events[0].values
return BscFchmod(events, args[0], serialize_stat_flags(args[1]), serialize_result(events[-1]))
def handle_setreuid(parser, events):
args = events[0].values
return BscSetreuid(events, args[0], args[1], serialize_result(events[-1]))
def handle_setregid(parser, events):
args = events[0].values
return BscSetregid(events, args[0], args[1], serialize_result(events[-1]))
def handle_rename(parser, events):
old_vnode = parser.parse_vnode(events)
new_vnode = parser.parse_vnode([e for e in events if e not in old_vnode.ktraces])
return BscRename(events, old_vnode.path, new_vnode.path, serialize_result(events[-1]))
def handle_sys_flock(parser, events):
args = events[0].values
operations = [op for op in list(FlockOperation) if args[1] & op.value]
return BscSysFlock(events, args[0], operations, serialize_result(events[-1]))
def handle_mkfifo(parser, events):
args = events[0].values
return BscMkfifo(events, parser.parse_vnode(events).path, serialize_stat_flags(args[1]),
serialize_result(events[-1]))
def handle_sendto(parser, events, no_cancel=False):
args = events[0].values
return BscSendto(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'), no_cancel)
def handle_shutdown(parser, events):
args = events[0].values
return BscShutdown(events, args[0], args[1], serialize_result(events[-1]))
def handle_socketpair(parser, events):
args = events[0].values
return BscSocketpair(events, socket.AddressFamily(args[0]), socket.SocketKind(args[1]), args[2], args[3],
serialize_result(events[-1]))
def handle_mkdir(parser, events):
args = events[0].values
return BscMkdir(events, parser.parse_vnode(events).path, serialize_stat_flags(args[1]),
serialize_result(events[-1]))
def handle_rmdir(parser, events):
return BscRmdir(events, parser.parse_vnode(events).path, serialize_result(events[-1]))
def handle_utimes(parser, events):
args = events[0].values
return BscUtimes(events, parser.parse_vnode(events).path, args[1], serialize_result(events[-1]))
def handle_futimes(parser, events):
args = events[0].values
return BscFutimes(events, args[0], args[1], serialize_result(events[-1]))
def handle_adjtime(parser, events):
args = events[0].values
return BscAdjtime(events, args[0], args[1], serialize_result(events[-1]))
def handle_gethostuuid(parser, events):
args = events[0].values
return BscGethostuuid(events, args[0], args[1], serialize_result(events[-1]))
def handle_obs_killpg(parser, events):
return BscObsKillpg(events, events[0].values[0], events[0].values[1], serialize_result(events[-1]))
def handle_setsid(parser, events):
return BscSetsid(events, serialize_result(events[-1], 'gid'))
def handle_getpgid(parser, events):
return BscGetpgid(events, events[0].values[0], serialize_result(events[-1], 'gid'))
def handle_setprivexec(parser, events):
return BscSetprivexec(events, events[0].values[0], serialize_result(events[-1], 'previous'))
def handle_pread(parser, events, no_cancel=False):
result = serialize_result(events[-1], 'count')
args = events[0].values
return BscPread(events, args[0], args[1], args[2], args[3], result, no_cancel)
def handle_pwrite(parser, events, no_cancel=False):
result = serialize_result(events[-1], 'count')
args = events[0].values
return BscPwrite(events, args[0], args[1], args[2], args[3], result, no_cancel)
def handle_nfssvc(parser, events):
args = events[0].values
return BscNfssvc(events, args[0], args[1], serialize_result(events[-1]))
def handle_statfs(parser, events):
args = events[0].values
return BscStatfs(events, parser.parse_vnode(events).path, args[1], serialize_result(events[-1]))
def handle_fstatfs(parser, events):
args = events[0].values
return BscFstatfs(events, args[0], args[1], serialize_result(events[-1]))
def handle_unmount(parser, events):
args = events[0].values
return BscUnmount(events, parser.parse_vnode(events).path, args[1], serialize_result(events[-1]))
def handle_getfh(parser, events):
args = events[0].values
return BscGetfh(events, parser.parse_vnode(events).path, args[1], serialize_result(events[-1]))
def handle_quotactl(parser, events):
args = events[0].values
return BscQuotactl(events, parser.parse_vnode(events).path, args[1], args[2], args[3],
serialize_result(events[-1]))
def handle_mount(parser, events):
src_vnode = parser.parse_vnode(events)
dst_vnode = parser.parse_vnode([e for e in events if e not in src_vnode.ktraces])
args = events[0].values
return BscMount(events, src_vnode.path, dst_vnode.path, args[2], args[3], serialize_result(events[-1]))
def handle_csops(parser, events):
args = events[0].values
return BscCsops(events, args[0], CsopsOps(args[1]), args[2], args[3], serialize_result(events[-1]))
def handle_csops_audittoken(parser, events):
args = events[0].values
return BscCsopsAudittoken(events, args[0], CsopsOps(args[1]), args[2], args[3], serialize_result(events[-1]))
def handle_waitid(parser, events, no_cancel=False):
args = events[0].values
return BscWaitid(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]), no_cancel)
def handle_kdebug_typefilter(parser, events):
args = events[0].values
return BscKdebugTypefilter(events, args[0], args[1], serialize_result(events[-1]))
def handle_setgid(parser, events):
args = events[0].values
return BscSetgid(events, args[0], serialize_result(events[-1]))
def handle_setegid(parser, events):
args = events[0].values
return BscSetegid(events, args[0], serialize_result(events[-1]))
def handle_seteuid(parser, events):
args = events[0].values
return BscSeteuid(events, args[0], serialize_result(events[-1]))
def handle_thread_selfcounts(parser, events):
args = events[0].values
return BscThreadSelfcounts(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_fdatasync(parser, events):
args = events[0].values
return BscFdatasync(events, args[0], serialize_result(events[-1]))
def handle_pathconf(parser, events):
args = events[0].values
return BscPathconf(events, parser.parse_vnode(events).path, args[1], serialize_result(events[-1], 'return'))
def handle_sys_fpathconf(parser, events):
args = events[0].values
return BscSysFpathconf(events, args[0], args[1], serialize_result(events[-1], 'return'))
def handle_getrlimit(parser, events):
args = events[0].values
return BscGetrlimit(events, args[0], args[1], serialize_result(events[-1]))
def handle_setrlimit(parser, events):
args = events[0].values
return BscSetrlimit(events, args[0], args[1], serialize_result(events[-1]))
def handle_getdirentries(parser, events):
args = events[0].values
return BscGetdirentries(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_mmap(parser, events):
args = events[0].values
return BscMmap(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count', hex))
def handle_lseek(parser, events):
args = events[0].values
return BscLseek(events, args[0], ctypes.c_int64(args[1]).value, args[2],
serialize_result(events[-1], 'count', lambda x: ctypes.c_int64(x).value))
def handle_truncate(parser, events):
args = events[0].values
return BscTruncate(events, parser.parse_vnode(events).path, args[1], serialize_result(events[-1]))
def handle_ftruncate(parser, events):
args = events[0].values
return BscFtruncate(events, args[0], args[1], serialize_result(events[-1]))
def handle_sysctl(parser, events):
args = events[0].values
return BscSysctl(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_mlock(parser, events):
args = events[0].values
return BscMlock(events, args[0], args[1], serialize_result(events[-1]))
def handle_munlock(parser, events):
args = events[0].values
return BscMunlock(events, args[0], args[1], serialize_result(events[-1]))
def handle_undelete(parser, events):
return BscUndelete(events, parser.parse_vnode(events).path, serialize_result(events[-1]))
def handle_open_dprotected_np(parser, events):
args = events[0].values
return BscOpenDprotectedNp(events, parser.parse_vnode(events).path, serialize_open_flags(args[1]), args[2],
args[3], serialize_result(events[-1], 'fd'))
def handle_getattrlist(parser, events):
args = events[0].values
return BscGetattrlist(events, parser.parse_vnode(events).path, args[1], args[2], args[3],
serialize_result(events[-1]))
def handle_setattrlist(parser, events):
args = events[0].values
return BscSetattrlist(events, parser.parse_vnode(events).path, args[1], args[2], args[3],
serialize_result(events[-1]))
def handle_getdirentriesattr(parser, events):
args = events[0].values
return BscGetdirentriesattr(events, args[0], args[1], args[2], args[3],
serialize_result(events[-1], 'last entry'))
def handle_exchangedata(parser, events):
vnode1 = parser.parse_vnode(events)
vnode2 = parser.parse_vnode([e for e in events if e not in vnode1.ktraces])
args = events[0].values
return BscExchangedata(events, vnode1.path, vnode2.path, args[2], serialize_result(events[-1]))
def handle_searchfs(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
return BscSearchfs(events, vnode.path, args[1], args[2], args[3], serialize_result(events[-1]))
def handle_fgetattrlist(parser, events):
args = events[0].values
return BscFgetattrlist(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_fsetattrlist(parser, events):
args = events[0].values
return BscFsetattrlist(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_poll(parser, events, no_cancel=False):
args = events[0].values
return BscPoll(events, args[0], args[1], args[2], serialize_result(events[-1], 'count'), no_cancel)
def handle_getxattr(parser, events):
args = events[0].values
return BscGetxattr(events, parser.parse_vnode(events).path, args[1], args[2], args[3],
serialize_result(events[-1], 'count'))
def handle_fgetxattr(parser, events):
args = events[0].values
return BscFgetxattr(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_setxattr(parser, events):
args = events[0].values
return BscSetxattr(events, parser.parse_vnode(events).path, args[1], args[2], args[3],
serialize_result(events[-1]))
def handle_fsetxattr(parser, events):
args = events[0].values
return BscFsetxattr(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_removexattr(parser, events):
args = events[0].values
return BscRemovexattr(events, parser.parse_vnode(events).path, args[1], args[2], serialize_result(events[-1]))
def handle_fremovexattr(parser, events):
args = events[0].values
return BscFremovexattr(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_listxattr(parser, events):
args = events[0].values
return BscListxattr(events, parser.parse_vnode(events).path, args[1], args[2], args[3],
serialize_result(events[-1], 'count'))
def handle_flistxattr(parser, events):
args = events[0].values
return BscFlistxattr(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_fsctl(parser, events):
args = events[0].values
return BscFsctl(events, parser.parse_vnode(events).path, args[1], args[2], args[3], serialize_result(events[-1]))
def handle_initgroups(parser, events):
args = events[0].values
return BscInitgroups(events, args[0], args[1], serialize_result(events[-1]))
def handle_posix_spawn(parser, events):
vnodes = parser.parse_vnodes(events)
if len(vnodes) >= 6:
stdin, stdout, stderr = vnodes[0].path, vnodes[1].path, vnodes[2].path
path = vnodes[3].path
else:
stdin, stdout, stderr = None, None, None
path = vnodes[0].path
args = events[0].values
return BscPosixSpawn(events, args[0], path, args[2], args[3], stdin, stdout, stderr,
serialize_result(events[-1]))
def handle_ffsctl(parser, events):
args = events[0].values
return BscFfsctl(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_nfsclnt(parser, events):
args = events[0].values
return BscNfsclnt(events, args[0], args[1], serialize_result(events[-1]))
def handle_fhopen(parser, events):
args = events[0].values
return BscFhopen(events, args[0], args[1], serialize_result(events[-1]))
def handle_minherit(parser, events):
args = events[0].values
return BscMinherit(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_semsys(parser, events):
args = events[0].values
return BscSemsys(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_msgsys(parser, events):
args = events[0].values
return BscMsgsys(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_shmsys(parser, events):
args = events[0].values
return BscShmsys(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_semctl(parser, events):
args = events[0].values
return BscSemctl(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'return'))
def handle_semget(parser, events):
args = events[0].values
return BscSemget(events, args[0], args[1], args[2], serialize_result(events[-1], 'id'))
def handle_semop(parser, events):
args = events[0].values
return BscSemop(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_msgctl(parser, events):
args = events[0].values
return BscMsgctl(events, args[0], args[1], args[2], serialize_result(events[-1], 'return'))
def handle_msgget(parser, events):
args = events[0].values
return BscMsgget(events, args[0], args[1], serialize_result(events[-1], 'id'))
def handle_msgsnd(parser, events, no_cancel=False):
args = events[0].values
return BscMsgsnd(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'), no_cancel)
def handle_msgrcv(parser, events, no_cancel=False):
args = events[0].values
return BscMsgrcv(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'), no_cancel)
def handle_shmat(parser, events):
args = events[0].values
return BscShmat(events, args[0], args[1], args[2], serialize_result(events[-1], 'address'))
def handle_shmctl(parser, events):
args = events[0].values
return BscShmctl(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_shmdt(parser, events):
args = events[0].values
return BscShmdt(events, args[0], serialize_result(events[-1]))
def handle_shmget(parser, events):
args = events[0].values
return BscShmget(events, args[0], args[1], args[2], serialize_result(events[-1], 'id'))
def handle_shm_open(parser, events):
args = events[0].values
oflags = serialize_open_flags(args[1])
sflags = serialize_stat_flags(args[2]) if BscOpenFlags.O_CREAT in oflags else []
return BscShmOpen(events, args[0], oflags, sflags, serialize_result(events[-1], 'fd'))
def handle_shm_unlink(parser, events):
return BscShmUnlink(events, events[0].values[0], serialize_result(events[-1]))
def handle_sem_open(parser, events):
args = events[0].values
oflags = serialize_open_flags(args[1])
sflags = serialize_stat_flags(args[2]) if BscOpenFlags.O_CREAT in oflags else []
return BscSemOpen(events, args[0], oflags, sflags, serialize_result(events[-1], 'fd'))
def handle_sem_close(parser, events):
args = events[0].values
return BscSemClose(events, args[0], serialize_result(events[-1]))
def handle_sem_unlink(parser, events):
args = events[0].values
return BscSemUnlink(events, args[0], serialize_result(events[-1]))
def handle_sem_wait(parser, events, no_cancel=False):
args = events[0].values
return BscSemWait(events, args[0], serialize_result(events[-1]), no_cancel)
def handle_sem_trywait(parser, events):
args = events[0].values
return BscSemTrywait(events, args[0], serialize_result(events[-1]))
def handle_sem_post(parser, events):
args = events[0].values
return BscSemPost(events, args[0], serialize_result(events[-1]))
def handle_sys_sysctlbyname(parser, events):
args = events[0].values
return BscSysctlbyname(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_access_extended(parser, events):
args = events[0].values
return BscAccessExtended(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_gettid(parser, events):
args = events[0].values
return BscGettid(events, args[0], args[1], serialize_result(events[-1]))
def handle_shared_region_check_np(parser, events):
args = events[0].values
return BscSharedRegionCheckNp(events, args[0], serialize_result(events[-1]))
def handle_psynch_mutexwait(parser, events):
args = events[0].values
return BscPsynchMutexwait(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_psynch_mutexdrop(parser, events):
args = events[0].values
return BscPsynchMutexdrop(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_psynch_cvbroad(parser, events):
args = events[0].values
return BscPsynchCvbroad(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_psynch_cvsignal(parser, events):
args = events[0].values
return BscPsynchCvsignal(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_psynch_cvwait(parser, events):
args = events[0].values
return BscPsynchCvwait(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_getsid(parser, events):
args = events[0].values
return BscGetsid(events, args[0], serialize_result(events[-1], 'sid'))
def handle_psynch_cvclrprepost(parser, events):
args = events[0].values
return BscPsynchCvclrprepost(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_iopolicysys(parser, events):
args = events[0].values
return BscIopolicysys(events, args[0], args[1], serialize_result(events[-1], 'return'))
def handle_process_policy(parser, events):
args = events[0].values
return BscProcessPolicy(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_mlockall(parser, events):
return BscMlockall(events, events[0].values[0], serialize_result(events[-1]))
def handle_munlockall(parser, events):
return BscMunlockall(events, serialize_result(events[-1]))
def handle_issetugid(parser, events):
return BscIssetugid(events, serialize_result(events[-1], 'return', bool))
def handle_pthread_sigmask(parser, events):
args = events[0].values
return BscPthreadSigmask(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_disable_threadsignal(parser, events):
return BscDisableThreadsignal(events, events[0].values[0], serialize_result(events[-1]))
def handle_semwait_signal(parser, events, no_cancel=False):
args = events[0].values
return BscSemwaitSignal(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]), no_cancel)
def handle_proc_info(parser, events):
args = events[0].values
return BscProcInfo(events, ProcInfoCall(args[0]), args[1], args[2], args[3], serialize_result(events[-1]))
def handle_sendfile(parser, events):
args = events[0].values
return BscSendfile(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_stat64(parser, events):
return BscStat64(events, parser.parse_vnode(events).path, events[0].values[1], serialize_result(events[-1]))
def handle_sys_fstat64(parser, events):
return BscSysFstat64(events, events[0].values[0], serialize_result(events[-1]))
def handle_lstat64(parser, events):
return BscLstat64(events, parser.parse_vnode(events).path, serialize_result(events[-1]))
def handle_getdirentries64(parser, events):
args = events[0].values
return BscGetdirentries64(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_statfs64(parser, events):
args = events[0].values
return BscStatfs64(events, parser.parse_vnode(events).path, args[1], serialize_result(events[-1]))
def handle_fstatfs64(parser, events):
args = events[0].values
return BscFstatfs64(events, args[0], args[1], serialize_result(events[-1]))
def handle_getfsstat64(parser, events):
args = events[0].values
return BscGetfsstat64(events, args[0], args[1], args[2], serialize_result(events[-1], 'count'))
def handle_pthread_fchdir(parser, events):
args = events[0].values
return BscPthreadFchdir(events, args[0], serialize_result(events[-1]))
def handle_audit(parser, events):
args = events[0].values
return BscAudit(events, args[0], args[1], serialize_result(events[-1]))
def handle_auditon(parser, events):
args = events[0].values
return BscAuditon(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_getauid(parser, events):
args = events[0].values
return BscGetauid(events, args[0], serialize_result(events[-1]))
def handle_setauid(parser, events):
args = events[0].values
return BscSetauid(events, args[0], serialize_result(events[-1]))
def handle_bsdthread_create(parser, events):
return BscBsdthreadCreate(events, events[-1].values[3])
def handle_kqueue(parser, events):
return BscKqueue(events, serialize_result(events[-1], 'fd'))
def handle_kevent(parser, events):
args = events[0].values
return BscKevent(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_lchown(parser, events):
args = events[0].values
return BscLchown(events, parser.parse_vnode(events).path, args[1], args[2], serialize_result(events[-1]))
def handle_bsdthread_register(parser, events):
args = events[0].values
return BscBsdthreadRegister(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_workq_open(parser, events):
return BscWorkqOpen(events, serialize_result(events[-1]))
def handle_workq_kernreturn(parser, events):
args = events[0].values
return BscWorkqKernreturn(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'return'))
def handle_kevent64(parser, events):
args = events[0].values
return BscKevent64(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_thread_selfid(parser, events):
return BscThreadSelfid(events, serialize_result(events[-1], 'tid'))
def handle_kevent_qos(parser, events):
args = events[0].values
return BscKeventQos(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_kevent_id(parser, events):
args = events[0].values
return BscKeventId(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_mac_syscall(parser, events):
args = events[0].values
return BscMacSyscall(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_pselect(parser, events, no_cancel=False):
args = events[0].values
return BscPselect(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'), no_cancel)
def handle_fsgetpath(parser, events):
args = events[0].values
return BscFsgetpath(events, args[0], args[1], args[2], args[3], parser.parse_vnode(events).path,
serialize_result(events[-1], 'count'))
def handle_sys_fileport_makeport(parser, events):
args = events[0].values
return BscSysFileportMakeport(events, args[0], args[1], serialize_result(events[-1]))
def handle_sys_fileport_makefd(parser, events):
args = events[0].values
return BscSysFileportMakefd(events, args[0], serialize_result(events[-1], 'fd'))
def handle_audit_session_port(parser, events):
args = events[0].values
return BscAuditSessionPort(events, args[0], args[1], serialize_result(events[-1]))
def handle_pid_suspend(parser, events):
args = events[0].values
return BscPidSuspend(events, args[0], serialize_result(events[-1]))
def handle_pid_resume(parser, events):
args = events[0].values
return BscPidResume(events, args[0], serialize_result(events[-1]))
def handle_pid_hibernate(parser, events):
args = events[0].values
return BscPidHibernate(events, args[0], serialize_result(events[-1]))
def handle_pid_shutdown_sockets(parser, events):
args = events[0].values
return BscPidShutdownSockets(events, args[0], args[1], serialize_result(events[-1]))
def handle_shared_region_map_and_slide_np(parser, events):
args = events[0].values
return BscSharedRegionMapAndSlideNp(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_kas_info(parser, events):
args = events[0].values
return BscKasInfo(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_memorystatus_control(parser, events):
args = events[0].values
return BscMemorystatusControl(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_guarded_open_np(parser, events):
args = events[0].values
return BscGuardedOpenNp(events, parser.parse_vnode(events).path, args[1], args[2], serialize_open_flags(args[3]),
serialize_result(events[-1], 'fd'))
def handle_guarded_close_np(parser, events):
args = events[0].values
return BscGuardedCloseNp(events, args[0], args[1], serialize_result(events[-1]))
def handle_guarded_kqueue_np(parser, events):
args = events[0].values
return BscGuardedKqueueNp(events, args[0], args[1], serialize_result(events[-1]))
def handle_change_fdguard_np(parser, events):
args = events[0].values
return BscChangeFdguardNp(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_usrctl(parser, events):
args = events[0].values
return BscUsrctl(events, args[0], serialize_result(events[-1]))
def handle_proc_rlimit_control(parser, events):
args = events[0].values
return BscProcRlimitControl(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_connectx(parser, events):
args = events[0].values
return BscConnectx(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_disconnectx(parser, events):
args = events[0].values
return BscDisconnectx(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_peeloff(parser, events):
args = events[0].values
return BscPeeloff(events, args[0], args[1], serialize_result(events[-1]))
def handle_socket_delegate(parser, events):
args = events[0].values
return BscSocketDelegate(events, socket.AddressFamily(args[0]), socket.SocketKind(args[1]), args[2], args[3],
serialize_result(events[-1], 'fd'))
def handle_telemetry(parser, events):
args = events[0].values
return BscTelemetry(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_proc_uuid_policy(parser, events):
args = events[0].values
return BscProcUuidPolicy(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_memorystatus_get_level(parser, events):
return BscMemorystatusGetLevel(events, events[0].values[0], serialize_result(events[-1]))
def handle_system_override(parser, events):
args = events[0].values
return BscSystemOverride(events, args[0], args[1], serialize_result(events[-1]))
def handle_vfs_purge(parser, events):
return BscVfsPurge(events, serialize_result(events[-1]))
def handle_sfi_ctl(parser, events):
args = events[0].values
return BscSfiCtl(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_sfi_pidctl(parser, events):
args = events[0].values
return BscSfiPidctl(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_coalition(parser, events):
args = events[0].values
return BscCoalition(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_coalition_info(parser, events):
args = events[0].values
return BscCoalitionInfo(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_necp_match_policy(parser, events):
args = events[0].values
return BscNecpMatchPolicy(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_getattrlistbulk(parser, events):
args = events[0].values
return BscGetattrlistbulk(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_clonefileat(parser, events):
src = parser.parse_vnode(events)
dst = parser.parse_vnode([e for e in events if e not in src.ktraces])
args = events[0].values
return BscClonefileat(events, args[0], src.path, args[2], dst.path, serialize_result(events[-1]))
def handle_openat(parser, events, no_cancel=False):
vnode = parser.parse_vnode(events)
call_flags = serialize_open_flags(events[0].values[2])
return BscOpenat(events, events[0].values[0], vnode.path, call_flags, serialize_result(events[-1], 'fd'),
no_cancel)
def handle_renameat(parser, events):
nodes = parser.parse_vnodes(events)
args = events[0].values
return BscRenameat(events, args[0], nodes[0].path, args[2], nodes[1].path, serialize_result(events[-1]))
def handle_faccessat(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
amode = serialize_access_flags(args[2])
return BscFaccessat(events, args[0], vnode.path, amode, args[3], serialize_result(events[-1]))
def handle_fchmodat(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
mode = serialize_stat_flags(args[2])
return BscFchmodat(events, args[0], vnode.path, mode, args[3], serialize_result(events[-1]))
def handle_fchownat(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
return BscFchownat(events, args[0], vnode.path, args[2], args[3], serialize_result(events[-1]))
def handle_fstatat(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
return BscFstatat(events, args[0], vnode.path, args[2], args[3], serialize_result(events[-1]))
def handle_fstatat64(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
return BscFstatat64(events, args[0], vnode.path, args[2], args[3], serialize_result(events[-1]))
def handle_linkat(parser, events):
nodes = parser.parse_vnodes(events)
path1, path2 = (nodes[0].path, nodes[1].path) if nodes else ('', '')
args = events[0].values
return BscLinkat(events, args[0], path1, args[2], path2, serialize_result(events[-1]))
def handle_unlinkat(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
return BscUnlinkat(events, args[0], vnode.path, args[2], serialize_result(events[-1]))
def handle_readlinkat(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
return BscReadlinkat(events, args[0], vnode.path, args[2], args[3], serialize_result(events[-1], 'count'))
def handle_symlinkat(parser, events):
nodes = parser.parse_vnodes(events)
oldpath = nodes[0].path if len(nodes) > 1 else ''
args = events[0].values
return BscSymlinkat(events, oldpath, args[1], nodes[-1].path, serialize_result(events[-1]))
def handle_mkdirat(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
return BscMkdirat(events, args[0], vnode.path, serialize_stat_flags(args[2]), serialize_result(events[-1]))
def handle_getattrlistat(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
return BscGetattrlistat(events, args[0], vnode.path, args[2], args[3], serialize_result(events[-1]))
def handle_proc_trace_log(parser, events):
args = events[0].values
return BscProcTraceLog(events, args[0], args[1], serialize_result(events[-1]))
def handle_bsdthread_ctl(parser, events):
args = events[0].values
return BscBsdthreadCtl(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_openbyid_np(parser, events):
args = events[0].values
return BscOpenbyidNp(events, args[0], args[1], serialize_open_flags(args[2]),
serialize_result(events[-1], 'fd'))
def handle_recvmsg_x(parser, events):
args = events[0].values
return BscRecvmsgX(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_sendmsg_x(parser, events):
args = events[0].values
return BscSendmsgX(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_thread_selfusage(parser, events):
return BscThreadSelfusage(events, serialize_result(events[-1], 'runtime'))
def handle_csrctl(parser, events):
args = events[0].values
return BscCsrctl(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_guarded_open_dprotected_np(parser, events):
vnode = parser.parse_vnode(events)
args = events[0].values
return BscGuardedOpenDprotectedNp(events, vnode.path, args[1], args[2], serialize_open_flags(args[3]),
serialize_result(events[-1], 'fd'))
def handle_guarded_write_np(parser, events):
args = events[0].values
return BscGuardedWriteNp(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_guarded_pwrite_np(parser, events):
args = events[0].values
return BscGuardedPwriteNp(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_guarded_writev_np(parser, events):
args = events[0].values
return BscGuardedWritevNp(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_renameatx_np(parser, events):
nodes = parser.parse_vnodes(events)
path1, path2 = (nodes[0].path, nodes[1].path) if nodes else ('', '')
args = events[0].values
return BscRenameatxNp(events, args[0], path1, args[2], path2, serialize_result(events[-1]))
def handle_mremap_encrypted(parser, events):
args = events[0].values
return BscMremapEncrypted(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_netagent_trigger(parser, events):
args = events[0].values
return BscNetagentTrigger(events, args[0], args[1], serialize_result(events[-1]))
def handle_stack_snapshot_with_config(parser, events):
args = events[0].values
return BscStackSnapshotWithConfig(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_microstackshot(parser, events):
args = events[0].values
return BscMicrostackshot(events, args[0], args[1], args[2], serialize_result(events[-1], 'count'))
def handle_grab_pgo_data(parser, events):
args = events[0].values
return BscGrabPgoData(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'count'))
def handle_persona(parser, events):
args = events[0].values
return BscPersona(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_mach_eventlink_signal(parser, events):
args = events[0].values
return BscMachEventlinkSignal(events, args[0], args[1], serialize_result(events[-1]))
def handle_mach_eventlink_wait_until(parser, events):
args = events[0].values
return BscMachEventlinkWaitUntil(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_mach_eventlink_signal_wait_until(parser, events):
args = events[0].values
return BscMachEventlinkSignalWaitUntil(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_work_interval_ctl(parser, events):
args = events[0].values
return BscWorkIntervalCtl(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_getentropy(parser, events):
args = events[0].values
return BscGetentropy(events, args[0], args[1], serialize_result(events[-1]))
def handle_necp_open(parser, events):
args = events[0].values
return BscNecpOpen(events, args[0], serialize_result(events[-1], 'fd'))
def handle_necp_client_action(parser, events):
args = events[0].values
return BscNecpClientAction(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'return'))
def handle_nexus_open(parser, events):
return BscNexusOpen(events, serialize_result(events[-1], 'fd'))
def handle_nexus_register(parser, events):
return BscNexusRegister(events, serialize_result(events[-1]))
def handle_nexus_deregister(parser, events):
return BscNexusDeregister(events, serialize_result(events[-1]))
def handle_nexus_create(parser, events):
return BscNexusCreate(events, serialize_result(events[-1]))
def handle_nexus_destroy(parser, events):
return BscNexusDestroy(events, serialize_result(events[-1]))
def handle_nexus_get_opt(parser, events):
return BscNexusGetOpt(events, serialize_result(events[-1]))
def handle_nexus_set_opt(parser, events):
return BscNexusSetOpt(events, serialize_result(events[-1]))
def handle_channel_open(parser, events):
return BscChannelOpen(events, serialize_result(events[-1]))
def handle_channel_get_info(parser, events):
return BscChannelGetInfo(events, serialize_result(events[-1]))
def handle_channel_sync(parser, events):
return BscChannelSync(events, serialize_result(events[-1]))
def handle_channel_get_opt(parser, events):
return BscChannelGetOpt(events, serialize_result(events[-1]))
def handle_channel_set_opt(parser, events):
return BscChannelSetOpt(events, serialize_result(events[-1]))
def handle_ulock_wait(parser, events):
args = events[0].values
return BscUlockWait(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'return'))
def handle_ulock_wake(parser, events):
args = events[0].values
return BscUlockWake(events, args[0], args[1], args[2], serialize_result(events[-1], 'return'))
def handle_fclonefileat(parser, events):
args = events[0].values
return BscFclonefileat(events, args[0], args[1], parser.parse_vnode(events).path, args[3],
serialize_result(events[-1]))
def handle_fs_snapshot(parser, events):
nodes = parser.parse_vnodes(events)
name2 = nodes[1].path if len(nodes) > 1 else ''
args = events[0].values
return BscFsSnapshot(events, FsSnapshotOp(args[0]), args[1], nodes[0].path, name2, serialize_result(events[-1]))
def handle_terminate_with_payload(parser, events):
args = events[0].values
return BscTerminateWithPayload(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_abort_with_payload(parser, events):
args = events[0].values
return BscAbortWithPayload(events, args[0], args[1], args[2], args[3])
def handle_necp_session_open(parser, events):
args = events[0].values
return BscNecpSessionOpen(events, args[0], serialize_result(events[-1], 'fd'))
def handle_necp_session_action(parser, events):
args = events[0].values
return BscNecpSessionAction(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_setattrlistat(parser, events):
args = events[0].values
return BscSetattrlistat(events, args[0], parser.parse_vnode(events).path, args[2], args[3],
serialize_result(events[-1]))
def handle_net_qos_guideline(parser, events):
args = events[0].values
return BscNetQosGuideline(events, args[0], args[1], serialize_result(events[-1], 'background'))
def handle_fmount(parser, events):
args = events[0].values
return BscFmount(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_ntp_adjtime(parser, events):
args = events[0].values
return BscNtpAdjtime(events, args[0], serialize_result(events[-1], 'return'))
def handle_ntp_gettime(parser, events):
args = events[0].values
return BscNtpGettime(events, args[0], serialize_result(events[-1]))
def handle_os_fault_with_payload(parser, events):
args = events[0].values
return BscOsFaultWithPayload(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_kqueue_workloop_ctl(parser, events):
args = events[0].values
return BscKqueueWorkloopCtl(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_mach_bridge_remote_time(parser, events):
args = events[0].values
return BscMachBridgeRemoteTime(events, args[0], serialize_result(events[-1]))
def handle_coalition_ledger(parser, events):
args = events[0].values
return BscCoalitionLedger(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_log_data(parser, events):
args = events[0].values
return BscLogData(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_memorystatus_available_memory(parser, events):
return BscMemorystatusAvailableMemory(events, serialize_result(events[-1], 'count'))
def handle_shared_region_map_and_slide_2_np(parser, events):
args = events[0].values
return BscSharedRegionMapAndSlide2Np(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
def handle_pivot_root(parser, events):
nodes = parser.parse_vnodes(events)
path1, path2 = (nodes[0].path, nodes[1].path) if nodes else ('', '')
return BscPivotRoot(events, path1, path2, serialize_result(events[-1]))
def handle_task_inspect_for_pid(parser, events):
args = events[0].values
return BscTaskInspectForPid(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_task_read_for_pid(parser, events):
args = events[0].values
return BscTaskReadForPid(events, args[0], args[1], args[2], serialize_result(events[-1]))
def handle_sys_preadv(parser, events, no_cancel=False):
args = events[0].values
return BscSysPreadv(events, args[0], args[1], args[2], ctypes.c_int64(args[0]).value,
serialize_result(events[-1], 'count'), no_cancel)
def handle_sys_pwritev(parser, events, no_cancel=False):
args = events[0].values
return BscSysPwritev(events, args[0], args[1], args[2], ctypes.c_int64(args[0]).value,
serialize_result(events[-1], 'count'), no_cancel)
def handle_ulock_wait2(parser, events):
args = events[0].values
return BscUlockWait2(events, args[0], args[1], args[2], args[3], serialize_result(events[-1], 'return'))
def handle_proc_info_extended_id(parser, events):
args = events[0].values
return BscProcInfoExtendedId(events, args[0], args[1], args[2], args[3], serialize_result(events[-1]))
handlers = {
'BSC_read': handle_read,
'BSC_write': handle_write,
'BSC_open': handle_open,
'BSC_sys_close': handle_sys_close,
'BSC_link': handle_link,
'BSC_unlink': handle_unlink,
'BSC_chdir': handle_chdir,
'BSC_fchdir': handle_fchdir,
'BSC_mknod': handle_mknod,
'BSC_chmod': handle_chmod,
'BSC_chown': handle_chown,
'BSC_getpid': handle_getpid,
'BSC_setuid': handle_setuid,
'BSC_getuid': handle_getuid,
'BSC_geteuid': handle_geteuid,
'BSC_recvmsg': handle_recvmsg,
'BSC_sendmsg': handle_sendmsg,
'BSC_recvfrom': handle_recvfrom,
'BSC_accept': handle_accept,
'BSC_getpeername': handle_getpeername,
'BSC_getsockname': handle_getsockname,
'BSC_access': handle_access,
'BSC_chflags': handle_chflags,
'BSC_fchflags': handle_fchflags,
'BSC_sync': handle_sync,
'BSC_kill': handle_kill,
'BSC_getppid': handle_getppid,
'BSC_sys_dup': handle_sys_dup,
'BSC_pipe': handle_pipe,
'BSC_getegid': handle_getegid,
'BSC_sigaction': handle_sigaction,
'BSC_getgid': handle_getgid,
'BSC_sigprocmask': handle_sigprocmask,
'BSC_getlogin': handle_getlogin,
'BSC_setlogin': handle_setlogin,
'BSC_acct': handle_acct,
'BSC_sigpending': handle_sigpending,
'BSC_sigaltstack': handle_sigaltstack,
'BSC_ioctl': handle_ioctl,
'BSC_reboot': handle_reboot,
'BSC_revoke': handle_revoke,
'BSC_symlink': handle_symlink,
'BSC_readlink': handle_readlink,
'BSC_execve': handle_execve,
'BSC_umask': handle_umask,
'BSC_chroot': handle_chroot,
'BSC_msync': handle_msync,
'BSC_vfork': handle_vfork,
'BSC_munmap': handle_munmap,
'BSC_mprotect': handle_mprotect,
'BSC_madvise': handle_madvise,
'BSC_mincore': handle_mincore,
'BSC_getgroups': handle_getgroups,
'BSC_setgroups': handle_setgroups,
'BSC_getpgrp': handle_getpgrp,
'BSC_setpgid': handle_setpgid,
'BSC_setitimer': handle_setitimer,
'BSC_swapon': handle_swapon,
'BSC_getitimer': handle_getitimer,
'BSC_sys_getdtablesize': handle_sys_getdtablesize,
'BSC_sys_dup2': handle_sys_dup2,
'BSC_sys_fcntl': handle_sys_fcntl,
'BSC_select': handle_select,
'BSC_fsync': handle_fsync,
'BSC_setpriority': handle_setpriority,
'BSC_socket': handle_socket,
'BSC_connect': handle_connect,
'BSC_getpriority': handle_getpriority,
'BSC_bind': handle_bind,
'BSC_setsockopt': handle_setsockopt,
'BSC_listen': handle_listen,
'BSC_sigsuspend': handle_sigsuspend,
'BSC_gettimeofday': handle_gettimeofday,
'BSC_getrusage': handle_getrusage,
'BSC_getsockopt': handle_getsockopt,
'BSC_readv': handle_readv,
'BSC_writev': handle_writev,
'BSC_settimeofday': handle_settimeofday,
'BSC_fchown': handle_fchown,
'BSC_fchmod': handle_fchmod,
'BSC_setreuid': handle_setreuid,
'BSC_setregid': handle_setregid,
'BSC_rename': handle_rename,
'BSC_sys_flock': handle_sys_flock,
'BSC_mkfifo': handle_mkfifo,
'BSC_sendto': handle_sendto,
'BSC_shutdown': handle_shutdown,
'BSC_socketpair': handle_socketpair,
'BSC_mkdir': handle_mkdir,
'BSC_rmdir': handle_rmdir,
'BSC_utimes': handle_utimes,
'BSC_futimes': handle_futimes,
'BSC_adjtime': handle_adjtime,
'BSC_gethostuuid': handle_gethostuuid,
'BSC_obs_killpg': handle_obs_killpg,
'BSC_setsid': handle_setsid,
'BSC_getpgid': handle_getpgid,
'BSC_setprivexec': handle_setprivexec,
'BSC_pread': handle_pread,
'BSC_pwrite': handle_pwrite,
'BSC_nfssvc': handle_nfssvc,
'BSC_statfs': handle_statfs,
'BSC_fstatfs': handle_fstatfs,
'BSC_unmount': handle_unmount,
'BSC_getfh': handle_getfh,
'BSC_quotactl': handle_quotactl,
'BSC_mount': handle_mount,
'BSC_csops': handle_csops,
'BSC_csops_audittoken': handle_csops_audittoken,
'BSC_waitid': handle_waitid,
'BSC_kdebug_typefilter': handle_kdebug_typefilter,
'BSC_setgid': handle_setgid,
'BSC_setegid': handle_setegid,
'BSC_seteuid': handle_seteuid,
'BSC_thread_selfcounts': handle_thread_selfcounts,
'BSC_fdatasync': handle_fdatasync,
'BSC_pathconf': handle_pathconf,
'BSC_sys_fpathconf': handle_sys_fpathconf,
'BSC_getrlimit': handle_getrlimit,
'BSC_setrlimit': handle_setrlimit,
'BSC_getdirentries': handle_getdirentries,
'BSC_mmap': handle_mmap,
'BSC_lseek': handle_lseek,
'BSC_truncate': handle_truncate,
'BSC_ftruncate': handle_ftruncate,
'BSC_sysctl': handle_sysctl,
'BSC_mlock': handle_mlock,
'BSC_munlock': handle_munlock,
'BSC_undelete': handle_undelete,
'BSC_open_dprotected_np': handle_open_dprotected_np,
'BSC_getattrlist': handle_getattrlist,
'BSC_setattrlist': handle_setattrlist,
'BSC_getdirentriesattr': handle_getdirentriesattr,
'BSC_exchangedata': handle_exchangedata,
'BSC_searchfs': handle_searchfs,
'BSC_fgetattrlist': handle_fgetattrlist,
'BSC_fsetattrlist': handle_fsetattrlist,
'BSC_poll': handle_poll,
'BSC_getxattr': handle_getxattr,
'BSC_fgetxattr': handle_fgetxattr,
'BSC_setxattr': handle_setxattr,
'BSC_fsetxattr': handle_fsetxattr,
'BSC_removexattr': handle_removexattr,
'BSC_fremovexattr': handle_fremovexattr,
'BSC_listxattr': handle_listxattr,
'BSC_flistxattr': handle_flistxattr,
'BSC_fsctl': handle_fsctl,
'BSC_initgroups': handle_initgroups,
'BSC_posix_spawn': handle_posix_spawn,
'BSC_ffsctl': handle_ffsctl,
'BSC_nfsclnt': handle_nfsclnt,
'BSC_fhopen': handle_fhopen,
'BSC_minherit': handle_minherit,
'BSC_semsys': handle_semsys,
'BSC_msgsys': handle_msgsys,
'BSC_shmsys': handle_shmsys,
'BSC_semctl': handle_semctl,
'BSC_semget': handle_semget,
'BSC_semop': handle_semop,
'BSC_msgctl': handle_msgctl,
'BSC_msgget': handle_msgget,
'BSC_msgsnd': handle_msgsnd,
'BSC_msgrcv': handle_msgrcv,
'BSC_shmat': handle_shmat,
'BSC_shmctl': handle_shmctl,
'BSC_shmdt': handle_shmdt,
'BSC_shmget': handle_shmget,
'BSC_shm_open': handle_shm_open,
'BSC_shm_unlink': handle_shm_unlink,
'BSC_sem_open': handle_sem_open,
'BSC_sem_close': handle_sem_close,
'BSC_sem_unlink': handle_sem_unlink,
'BSC_sem_wait': handle_sem_wait,
'BSC_sem_trywait': handle_sem_trywait,
'BSC_sem_post': handle_sem_post,
'BSC_sys_sysctlbyname': handle_sys_sysctlbyname,
'BSC_access_extended': handle_access_extended,
'BSC_gettid': handle_gettid,
'BSC_shared_region_check_np': handle_shared_region_check_np,
'BSC_psynch_mutexwait': handle_psynch_mutexwait,
'BSC_psynch_mutexdrop': handle_psynch_mutexdrop,
'BSC_psynch_cvbroad': handle_psynch_cvbroad,
'BSC_psynch_cvsignal': handle_psynch_cvsignal,
'BSC_psynch_cvwait': handle_psynch_cvwait,
'BSC_getsid': handle_getsid,
'BSC_psynch_cvclrprepost': handle_psynch_cvclrprepost,
'BSC_iopolicysys': handle_iopolicysys,
'BSC_process_policy': handle_process_policy,
'BSC_mlockall': handle_mlockall,
'BSC_munlockall': handle_munlockall,
'BSC_issetugid': handle_issetugid,
'BSC_pthread_sigmask': handle_pthread_sigmask,
'BSC_disable_threadsignal': handle_disable_threadsignal,
'BSC_semwait_signal': handle_semwait_signal,
'BSC_proc_info': handle_proc_info,
'BSC_sendfile': handle_sendfile,
'BSC_stat64': handle_stat64,
'BSC_sys_fstat64': handle_sys_fstat64,
'BSC_lstat64': handle_lstat64,
'BSC_getdirentries64': handle_getdirentries64,
'BSC_statfs64': handle_statfs64,
'BSC_fstatfs64': handle_fstatfs64,
'BSC_getfsstat64': handle_getfsstat64,
'BSC_pthread_fchdir': handle_pthread_fchdir,
'BSC_audit': handle_audit,
'BSC_auditon': handle_auditon,
'BSC_getauid': handle_getauid,
'BSC_setauid': handle_setauid,
'BSC_bsdthread_create': handle_bsdthread_create,
'BSC_kqueue': handle_kqueue,
'BSC_kevent': handle_kevent,
'BSC_lchown': handle_lchown,
'BSC_bsdthread_register': handle_bsdthread_register,
'BSC_workq_open': handle_workq_open,
'BSC_workq_kernreturn': handle_workq_kernreturn,
'BSC_kevent64': handle_kevent64,
'BSC_thread_selfid': handle_thread_selfid,
'BSC_kevent_qos': handle_kevent_qos,
'BSC_kevent_id': handle_kevent_id,
'BSC_mac_syscall': handle_mac_syscall,
'BSC_pselect': handle_pselect,
'BSC_pselect_nocancel': partial(handle_pselect, no_cancel=True),
'BSC_read_nocancel': partial(handle_read, no_cancel=True),
'BSC_write_nocancel': partial(handle_write, no_cancel=True),
'BSC_open_nocancel': partial(handle_open, no_cancel=True),
'BSC_sys_close_nocancel': partial(handle_sys_close, no_cancel=True),
'BSC_wait4_nocancel': partial(handle_wait4, no_cancel=True),
'BSC_recvmsg_nocancel': partial(handle_recvmsg, no_cancel=True),
'BSC_sendmsg_nocancel': partial(handle_sendmsg, no_cancel=True),
'BSC_recvfrom_nocancel': partial(handle_recvfrom, no_cancel=True),
'BSC_accept_nocancel': partial(handle_accept, no_cancel=True),
'BSC_msync_nocancel': partial(handle_msync, no_cancel=True),
'BSC_sys_fcntl_nocancel': partial(handle_sys_fcntl, no_cancel=True),
'BSC_select_nocancel': partial(handle_select, no_cancel=True),
'BSC_fsync_nocancel': partial(handle_fsync, no_cancel=True),
'BSC_connect_nocancel': partial(handle_connect, no_cancel=True),
'BSC_sigsuspend_nocancel': partial(handle_sigsuspend, no_cancel=True),
'BSC_readv_nocancel': partial(handle_readv, no_cancel=True),
'BSC_writev_nocancel': partial(handle_writev, no_cancel=True),
'BSC_sendto_nocancel': partial(handle_sendto, no_cancel=True),
'BSC_pread_nocancel': partial(handle_pread, no_cancel=True),
'BSC_pwrite_nocancel': partial(handle_pwrite, no_cancel=True),
'BSC_waitid_nocancel': partial(handle_waitid, no_cancel=True),
'BSC_poll_nocancel': partial(handle_poll, no_cancel=True),
'BSC_msgsnd_nocancel': partial(handle_msgsnd, no_cancel=True),
'BSC_msgrcv_nocancel': partial(handle_msgrcv, no_cancel=True),
'BSC_sem_wait_nocancel': partial(handle_sem_wait, no_cancel=True),
'BSC_semwait_signal_nocancel': partial(handle_semwait_signal, no_cancel=True),
'BSC_fsgetpath': handle_fsgetpath,
'BSC_sys_fileport_makeport': handle_sys_fileport_makeport,
'BSC_sys_fileport_makefd': handle_sys_fileport_makefd,
'BSC_audit_session_port': handle_audit_session_port,
'BSC_pid_suspend': handle_pid_suspend,
'BSC_pid_resume': handle_pid_resume,
'BSC_pid_hibernate': handle_pid_hibernate,
'BSC_pid_shutdown_sockets': handle_pid_shutdown_sockets,
'BSC_shared_region_map_and_slide_np': handle_shared_region_map_and_slide_np,
'BSC_kas_info': handle_kas_info,
'BSC_memorystatus_control': handle_memorystatus_control,
'BSC_guarded_open_np': handle_guarded_open_np,
'BSC_guarded_close_np': handle_guarded_close_np,
'BSC_guarded_kqueue_np': handle_guarded_kqueue_np,
'BSC_change_fdguard_np': handle_change_fdguard_np,
'BSC_usrctl': handle_usrctl,
'BSC_proc_rlimit_control': handle_proc_rlimit_control,
'BSC_connectx': handle_connectx,
'BSC_disconnectx': handle_disconnectx,
'BSC_peeloff': handle_peeloff,
'BSC_socket_delegate': handle_socket_delegate,
'BSC_telemetry': handle_telemetry,
'BSC_proc_uuid_policy': handle_proc_uuid_policy,
'BSC_memorystatus_get_level': handle_memorystatus_get_level,
'BSC_system_override': handle_system_override,
'BSC_vfs_purge': handle_vfs_purge,
'BSC_sfi_ctl': handle_sfi_ctl,
'BSC_sfi_pidctl': handle_sfi_pidctl,
'BSC_coalition': handle_coalition,
'BSC_coalition_info': handle_coalition_info,
'BSC_necp_match_policy': handle_necp_match_policy,
'BSC_getattrlistbulk': handle_getattrlistbulk,
'BSC_clonefileat': handle_clonefileat,
'BSC_openat': handle_openat,
'BSC_openat_nocancel': partial(handle_openat, no_cancel=True),
'BSC_renameat': handle_renameat,
'BSC_faccessat': handle_faccessat,
'BSC_fchmodat': handle_fchmodat,
'BSC_fchownat': handle_fchownat,
'BSC_fstatat': handle_fstatat,
'BSC_fstatat64': handle_fstatat64,
'BSC_linkat': handle_linkat,
'BSC_unlinkat': handle_unlinkat,
'BSC_readlinkat': handle_readlinkat,
'BSC_symlinkat': handle_symlinkat,
'BSC_mkdirat': handle_mkdirat,
'BSC_getattrlistat': handle_getattrlistat,
'BSC_proc_trace_log': handle_proc_trace_log,
'BSC_bsdthread_ctl': handle_bsdthread_ctl,
'BSC_openbyid_np': handle_openbyid_np,
'BSC_recvmsg_x': handle_recvmsg_x,
'BSC_sendmsg_x': handle_sendmsg_x,
'BSC_thread_selfusage': handle_thread_selfusage,
'BSC_csrctl': handle_csrctl,
'BSC_guarded_open_dprotected_np': handle_guarded_open_dprotected_np,
'BSC_guarded_write_np': handle_guarded_write_np,
'BSC_guarded_pwrite_np': handle_guarded_pwrite_np,
'BSC_guarded_writev_np': handle_guarded_writev_np,
'BSC_renameatx_np': handle_renameatx_np,
'BSC_mremap_encrypted': handle_mremap_encrypted,
'BSC_netagent_trigger': handle_netagent_trigger,
'BSC_stack_snapshot_with_config': handle_stack_snapshot_with_config,
'BSC_microstackshot': handle_microstackshot,
'BSC_grab_pgo_data': handle_grab_pgo_data,
'BSC_persona': handle_persona,
'BSC_mach_eventlink_signal': handle_mach_eventlink_signal,
'BSC_mach_eventlink_wait_until': handle_mach_eventlink_wait_until,
'BSC_mach_eventlink_signal_wait_until': handle_mach_eventlink_signal_wait_until,
'BSC_work_interval_ctl': handle_work_interval_ctl,
'BSC_getentropy': handle_getentropy,
'BSC_necp_open': handle_necp_open,
'BSC_necp_client_action': handle_necp_client_action,
'BSC_nexus_open': handle_nexus_open,
'BSC_nexus_register': handle_nexus_register,
'BSC_nexus_deregister': handle_nexus_deregister,
'BSC_nexus_create': handle_nexus_create,
'BSC_nexus_destroy': handle_nexus_destroy,
'BSC_nexus_get_opt': handle_nexus_get_opt,
'BSC_nexus_set_opt': handle_nexus_set_opt,
'BSC_channel_open': handle_channel_open,
'BSC_channel_get_info': handle_channel_get_info,
'BSC_channel_sync': handle_channel_sync,
'BSC_channel_get_opt': handle_channel_get_opt,
'BSC_channel_set_opt': handle_channel_set_opt,
'BSC_ulock_wait': handle_ulock_wait,
'BSC_ulock_wake': handle_ulock_wake,
'BSC_fclonefileat': handle_fclonefileat,
'BSC_fs_snapshot': handle_fs_snapshot,
'BSC_terminate_with_payload': handle_terminate_with_payload,
'BSC_abort_with_payload': handle_abort_with_payload,
'BSC_necp_session_open': handle_necp_session_open,
'BSC_necp_session_action': handle_necp_session_action,
'BSC_setattrlistat': handle_setattrlistat,
'BSC_net_qos_guideline': handle_net_qos_guideline,
'BSC_fmount': handle_fmount,
'BSC_ntp_adjtime': handle_ntp_adjtime,
'BSC_ntp_gettime': handle_ntp_gettime,
'BSC_os_fault_with_payload': handle_os_fault_with_payload,
'BSC_kqueue_workloop_ctl': handle_kqueue_workloop_ctl,
'BSC_mach_bridge_remote_time': handle_mach_bridge_remote_time,
'BSC_coalition_ledger': handle_coalition_ledger,
'BSC_log_data': handle_log_data,
'BSC_memorystatus_available_memory': handle_memorystatus_available_memory,
'BSC_shared_region_map_and_slide_2_np': handle_shared_region_map_and_slide_2_np,
'BSC_pivot_root': handle_pivot_root,
'BSC_task_inspect_for_pid': handle_task_inspect_for_pid,
'BSC_task_read_for_pid': handle_task_read_for_pid,
'BSC_sys_preadv': handle_sys_preadv,
'BSC_sys_pwritev': handle_sys_pwritev,
'BSC_sys_preadv_nocancel': partial(handle_sys_preadv, no_cancel=True),
'BSC_sys_pwritev_nocancel': partial(handle_sys_pwritev, no_cancel=True),
'BSC_ulock_wait2': handle_ulock_wait2,
'BSC_proc_info_extended_id': handle_proc_info_extended_id,
}
|
1606893
|
import cv2
VC = cv2.VideoCapture('test.mp4')
if VC.isOpened():
rval, frame = VC.read()
else:
rval = False
c = 1
while rval:
rval, frame = VC.read()
cv2.imwrite('./frames/%s.jpg' % c, frame)
c += 1
cv2.waitKey(1)
VC.release()
|
1606903
|
from FormulaLab.search import FormulaSearch
__all__ = ['FormulaSearch',
]
__version__ = '0.1.0'
|
1606921
|
import sys
import time
import tensorflow as tf
def print_time(s, start_time):
"""Take a start time, print elapsed duration, and return a new time."""
print("%s, time %ds, %s." % (s, (time.time() - start_time), time.ctime()))
# sys.stdout.flush()
return time.time()
def print_out(s, f=None, new_line=True, skip_stdout=False):
"""Similar to print but with support to flush and output to a file."""
if isinstance(s, bytes):
s = s.decode("utf-8")
if f:
f.write(s.encode("utf-8"))
if new_line:
f.write(b"\n")
# stdout
if not skip_stdout:
out_s = s.encode("utf-8")
if not isinstance(out_s, str):
out_s = out_s.decode("utf-8")
print(out_s, end="", file=sys.stdout)
if new_line:
print()
#sys.stdout.flush()
def add_summary(summary_writer, global_step, tag, value):
"""Add a new summary to the current summary_writer.
Useful to log things that are not part of the training graph, e.g., tag=BLEU.
"""
summary = tf.Summary(value=[tf.Summary.Value(tag=tag, simple_value=value)])
summary_writer.add_summary(summary, global_step)
|
1606922
|
import os
import unittest
from programy.utils.parsing.linenumxml import LineNumberingParser
import xml.etree.ElementTree as ET # pylint: disable=wrong-import-order
from programy.parser.aiml_parser import AIMLParser
from programy.dialog.sentence import Sentence
from programy.parser.pattern.nodes.oneormore import PatternOneOrMoreWildCardNode
from programy.parser.pattern.nodes.root import PatternRootNode
from programy.parser.pattern.nodes.template import PatternTemplateNode
from programy.parser.pattern.nodes.that import PatternThatNode
from programy.parser.pattern.nodes.topic import PatternTopicNode
from programy.parser.pattern.nodes.word import PatternWordNode
from programytest.client import TestClient
from programy.parser.exceptions import ParserException
from programy.parser.exceptions import DuplicateGrammarException
class MockElement(ET.Element):
def __init__(self, tag, attrib={}):
ET.Element.__init__(self, tag, attrib)
self._start_line_number = 0
self._end_line_number = 0
class AIMLParserTestClient(TestClient):
def __init__(self):
TestClient.__init__(self)
def load_storage(self):
super(AIMLParserTestClient, self).load_storage()
self.add_default_stores()
class AIMLParserTests(unittest.TestCase):
def setUp(self):
self._client = AIMLParserTestClient()
self._client_context = self._client.create_client_context("testid")
self.parser = self._client_context.brain.aiml_parser
def test__getstate__(self):
self.assertIsNotNone(self.parser.__getstate__()['_aiml_loader'])
self.assertIsNotNone(self.parser.__getstate__()['_template_parser'])
self.assertIsNotNone(self.parser.__getstate__()['_aiml_loader'])
self.assertIsNotNone(self.parser.__getstate__()['_num_categories'])
self.assertIsNone(self.parser.__getstate__().get('_brain', None))
self.assertIsNone(self.parser.__getstate__().get('_errors', None))
self.assertIsNone(self.parser.__getstate__().get('_duplicates', None))
def test__getstate__with_errors_and_duplicates(self):
self.parser._errors = []
self.parser._duplicates = []
self.assertIsNotNone(self.parser.__getstate__()['_aiml_loader'])
self.assertIsNotNone(self.parser.__getstate__()['_template_parser'])
self.assertIsNotNone(self.parser.__getstate__()['_aiml_loader'])
self.assertIsNotNone(self.parser.__getstate__()['_num_categories'])
self.assertIsNone(self.parser.__getstate__().get('_brain', None))
self.assertIsNone(self.parser.__getstate__().get('_errors', None))
self.assertIsNone(self.parser.__getstate__().get('_duplicates', None))
def test__getstate__without_errors_and_duplicates(self):
if '_errors' in self.parser.__dict__:
del self.parser.__dict__['_errors']
if '_duplicates' in self.parser.__dict__:
del self.parser.__dict__['_duplicates']
self.assertIsNotNone(self.parser.__getstate__()['_aiml_loader'])
self.assertIsNotNone(self.parser.__getstate__()['_template_parser'])
self.assertIsNotNone(self.parser.__getstate__()['_aiml_loader'])
self.assertIsNotNone(self.parser.__getstate__()['_num_categories'])
self.assertIsNone(self.parser.__getstate__().get('_brain', None))
self.assertIsNone(self.parser.__getstate__().get('_errors', None))
self.assertIsNone(self.parser.__getstate__().get('_duplicates', None))
def test_check_aiml_tag(self):
aiml = ET.fromstring( """<?xml version="1.0" encoding="ISO-8859-1"?>
<aiml version="1.01"
xmlns="http://alicebot.org/2001/AIML"
xmlns:aiml="http://alicebot.org/2001/AIML"
xmlns:html="http://www.w3.org/TR/REC-html40">
<category>
<pattern>*</pattern>
<template>RESPONSE</template>
</category>
</aiml>
""")
tag_name, namespace = AIMLParser.check_aiml_tag(aiml)
self.assertEquals("aiml", tag_name)
self.assertEquals("{http://alicebot.org/2001/AIML}", namespace)
def test_check_aiml_tag_no_aiml(self):
aiml = None
with self.assertRaises(ParserException):
tag_name, namespace = AIMLParser.check_aiml_tag(aiml)
def test_check_aiml_tag_no_namespace(self):
aiml = ET.fromstring( """<?xml version="1.0" encoding="ISO-8859-1"?>
<aiml version="1.01">
<category>
<pattern>*</pattern>
<template>RESPONSE</template>
</category>
</aiml>
""")
tag_name, namespace = AIMLParser.check_aiml_tag(aiml)
self.assertEquals("aiml", tag_name)
self.assertEquals(None, namespace)
def test_check_aiml_tag_not_aiml(self):
aiml = ET.fromstring( """<?xml version="1.0" encoding="ISO-8859-1"?>
<aipl version="1.01"
xmlns="http://alicebot.org/2001/AIML"
xmlns:aiml="http://alicebot.org/2001/AIML"
xmlns:html="http://www.w3.org/TR/REC-html40">
<category>
<pattern>*</pattern>
<template>RESPONSE</template>
</category>
</aipl>
""")
with self.assertRaises(ParserException):
_, _ = AIMLParser.check_aiml_tag(aiml)
def test_parse_from_file_valid(self):
filename = os.path.dirname(__file__)+ '/valid.aiml'
self.parser.parse_from_file(filename)
def test_aiml_with_namespace(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="ISO-8859-1"?>
<aiml version="1.01"
xmlns="http://alicebot.org/2001/AIML"
xmlns:aiml="http://alicebot.org/2001/AIML"
xmlns:html="http://www.w3.org/TR/REC-html40">
<category>
<pattern>*</pattern>
<template>RESPONSE</template>
</category>
</aiml>
""")
self.assertIsNotNone(self.parser.pattern_parser)
self.assertIsNotNone(self.parser.pattern_parser.root)
self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode)
self.assertTrue(self.parser.pattern_parser.root.has_one_or_more())
node = self.parser.pattern_parser.root.star
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternOneOrMoreWildCardNode)
self.assertEqual(node.wildcard, "*")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertTrue(topic.has_one_or_more())
self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode)
self.assertEqual(topic.star.wildcard, "*")
that = topic.star.that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
template = that.star.template
self.assertIsNotNone(template)
self.assertIsInstance(template, PatternTemplateNode)
self.assertEqual(template.template.resolve(self._client_context), "RESPONSE")
def test_base_aiml_topic_category_template(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<topic name="test">
<category>
<pattern>*</pattern>
<template>RESPONSE</template>
</category>
</topic>
</aiml>
""")
self.assertIsNotNone(self.parser.pattern_parser)
self.assertIsNotNone(self.parser.pattern_parser.root)
self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode)
self.assertTrue(self.parser.pattern_parser.root.has_one_or_more())
node = self.parser.pattern_parser.root.star
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternOneOrMoreWildCardNode)
self.assertEqual(node.wildcard, "*")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertEqual(len(topic.children), 1)
self.assertIsNotNone(topic.children[0])
self.assertIsInstance(topic.children[0], PatternWordNode)
self.assertEqual(topic.children[0].word, "test")
that = topic.children[0].that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
template = that.star.template
self.assertIsNotNone(template)
self.assertIsInstance(template, PatternTemplateNode)
self.assertEqual(template.template.resolve(self._client_context), "RESPONSE")
def test_base_aiml_topic_category_template_multi_line(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<topic name="test">
<category>
<pattern>*</pattern>
<template>
RESPONSE1,
RESPONSE2.
RESPONSE3
</template>
</category>
</topic>
</aiml>
""")
self.assertIsNotNone(self.parser.pattern_parser)
self.assertIsNotNone(self.parser.pattern_parser.root)
self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode)
self.assertTrue(self.parser.pattern_parser.root.has_one_or_more())
node = self.parser.pattern_parser.root.star
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternOneOrMoreWildCardNode)
self.assertEqual(node.wildcard, "*")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertEqual(len(topic.children), 1)
self.assertIsNotNone(topic.children[0])
self.assertIsInstance(topic.children[0], PatternWordNode)
self.assertEqual(topic.children[0].word, "test")
that = topic.children[0].that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
template = that.star.template
self.assertIsNotNone(template)
self.assertIsInstance(template, PatternTemplateNode)
self.assertEqual(template.template.resolve(self._client_context), "RESPONSE1, RESPONSE2. RESPONSE3")
def test_base_aiml_category_template(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<pattern>*</pattern>
<template>RESPONSE</template>
</category>
</aiml>
""")
self.assertIsNotNone(self.parser.pattern_parser)
self.assertIsNotNone(self.parser.pattern_parser.root)
self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode)
self.assertTrue(self.parser.pattern_parser.root.has_one_or_more())
node = self.parser.pattern_parser.root.star
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternOneOrMoreWildCardNode)
self.assertEqual(node.wildcard, "*")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertTrue(topic.has_one_or_more())
self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode)
self.assertEqual(topic.star.wildcard, "*")
that = topic.star.that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
template = that.star.template
self.assertIsNotNone(template)
self.assertIsInstance(template, PatternTemplateNode)
self.assertEqual(template.template.resolve(self._client_context), "RESPONSE")
def test_base_aiml_category_template_that(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<pattern>*</pattern>
<that>something</that>
<template>RESPONSE</template>
</category>
</aiml>
""")
self.assertIsNotNone(self.parser.pattern_parser)
self.assertIsNotNone(self.parser.pattern_parser.root)
self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode)
self.assertTrue(self.parser.pattern_parser.root.has_one_or_more())
node = self.parser.pattern_parser.root.star
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternOneOrMoreWildCardNode)
self.assertEqual(node.wildcard, "*")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertTrue(topic.has_one_or_more())
self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode)
self.assertEqual(topic.star.wildcard, "*")
that = topic.star.that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertEqual(len(that.children), 1)
self.assertIsNotNone(that.children[0])
self.assertIsInstance(that.children[0], PatternWordNode)
self.assertEqual(that.children[0].word, "something")
template = that.children[0].template
self.assertIsNotNone(template)
self.assertIsInstance(template, PatternTemplateNode)
self.assertEqual(template.template.resolve(self._client_context), "RESPONSE")
def test_base_aiml_category_template_topic(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<pattern>*</pattern>
<topic>something</topic>
<template>RESPONSE</template>
</category>
</aiml>
""")
self.assertIsNotNone(self.parser.pattern_parser)
self.assertIsNotNone(self.parser.pattern_parser.root)
self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode)
self.assertTrue(self.parser.pattern_parser.root.has_one_or_more())
node = self.parser.pattern_parser.root.star
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternOneOrMoreWildCardNode)
self.assertEqual(node.wildcard, "*")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertEqual(len(topic.children), 1)
self.assertIsNotNone(topic.children[0])
self.assertIsInstance(topic.children[0], PatternWordNode)
self.assertEqual(topic.children[0].word, "something")
that = topic.children[0].that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
template = that.star.template
self.assertIsNotNone(template)
self.assertIsInstance(template, PatternTemplateNode)
self.assertEqual(template.template.resolve(self._client_context), "RESPONSE")
def test_base_aiml_category_template_topic_that(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<pattern>*</pattern>
<that>something</that>
<topic>other</topic>
<template>RESPONSE</template>
</category>
</aiml>
""")
self.assertIsNotNone(self.parser.pattern_parser)
self.assertIsNotNone(self.parser.pattern_parser.root)
self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode)
self.assertTrue(self.parser.pattern_parser.root.has_one_or_more())
node = self.parser.pattern_parser.root.star
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternOneOrMoreWildCardNode)
self.assertEqual(node.wildcard, "*")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertEqual(len(topic.children), 1)
self.assertIsNotNone(topic.children[0])
self.assertIsInstance(topic.children[0], PatternWordNode)
self.assertEqual(topic.children[0].word, "other")
that = topic.children[0].that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertEqual(len(that.children), 1)
self.assertIsNotNone(that.children[0])
self.assertIsInstance(that.children[0], PatternWordNode)
self.assertEqual(that.children[0].word, "something")
template = that.children[0].template
self.assertIsNotNone(template)
self.assertIsInstance(template, PatternTemplateNode)
self.assertEqual(template.template.resolve(self._client_context), "RESPONSE")
def test_base_aiml_multiple_categories(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<pattern>Hello</pattern>
<template>Hiya</template>
</category>
<category>
<pattern>Goodbye</pattern>
<template>See ya</template>
</category>
</aiml>
""")
self.assertIsNotNone(self.parser.pattern_parser)
self.assertIsNotNone(self.parser.pattern_parser.root)
self.assertIsInstance(self.parser.pattern_parser.root, PatternRootNode)
self.assertEqual(2, len(self.parser.pattern_parser.root.children))
node = self.parser.pattern_parser.root.children[1]
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternWordNode)
self.assertEqual(node.word, "Hello")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertTrue(topic.has_one_or_more())
self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode)
self.assertEqual(topic.star.wildcard, "*")
that = topic.star.that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
node = self.parser.pattern_parser.root.children[0]
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternWordNode)
self.assertEqual(node.word, "Goodbye")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertTrue(topic.has_one_or_more())
self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode)
self.assertEqual(topic.star.wildcard, "*")
that = topic.star.that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
def test_base_aiml_multiple_categories_in_a_topic(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<topic name="test">
<category>
<pattern>Hello</pattern>
<template>Hiya</template>
</category>
<category>
<pattern>Goodbye</pattern>
<template>See ya</template>
</category>
</topic>
</aiml>
""")
self.assertIsNotNone(self.parser.pattern_parser.root)
self.assertEqual(2, len(self.parser.pattern_parser.root.children))
node = self.parser.pattern_parser.root.children[1]
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternWordNode)
self.assertEqual(node.word, "Hello")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertEqual(len(topic.children), 1)
self.assertIsNotNone(topic.children[0])
self.assertIsInstance(topic.children[0], PatternWordNode)
self.assertEqual(topic.children[0].word, "test")
that = topic.children[0].that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
node = self.parser.pattern_parser.root.children[0]
self.assertIsNotNone(node)
self.assertIsInstance(node, PatternWordNode)
self.assertEqual(node.word, "Goodbye")
topic = node.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertEqual(len(topic.children), 1)
self.assertIsNotNone(topic.children[0])
self.assertIsInstance(topic.children[0], PatternWordNode)
self.assertEqual(topic.children[0].word, "test")
that = topic.children[0].that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
def test_base_aiml_multiple_categories_in_and_out_of_topic(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<pattern>Welcome</pattern>
<template>Hello there</template>
</category>
<topic name="test">
<category>
<pattern>Hello</pattern>
<template>Hiya</template>
</category>
<category>
<pattern>Goodbye</pattern>
<template>See ya</template>
</category>
</topic>
<category>
<pattern>Interesting</pattern>
<template>Yes</template>
</category>
</aiml>
""")
self.assertIsNotNone(self.parser.pattern_parser.root)
self.assertEqual(4, len(self.parser.pattern_parser.root.children))
node1 = self.parser.pattern_parser.root.children[0]
self.assertIsNotNone(node1)
self.assertIsInstance(node1, PatternWordNode)
self.assertEqual(node1.word, "Interesting")
topic = node1.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertTrue(topic.has_one_or_more())
self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode)
self.assertEqual(topic.star.wildcard, "*")
that = topic.star.that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
node2 = self.parser.pattern_parser.root.children[1]
self.assertIsNotNone(node2)
self.assertIsInstance(node2, PatternWordNode)
self.assertEqual(node2.word, "Goodbye")
topic = node2.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertEqual(len(topic.children), 1)
self.assertIsNotNone(topic.children[0])
self.assertIsInstance(topic.children[0], PatternWordNode)
self.assertEqual(topic.children[0].word, "test")
that = topic.children[0].that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
node3 = self.parser.pattern_parser.root.children[2]
self.assertIsNotNone(node3)
self.assertIsInstance(node3, PatternWordNode)
self.assertEqual(node3.word, "Hello")
topic = node3.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertEqual(len(topic.children), 1)
self.assertIsNotNone(topic.children[0])
self.assertIsInstance(topic.children[0], PatternWordNode)
self.assertEqual(topic.children[0].word, "test")
that = topic.children[0].that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
node4 = self.parser.pattern_parser.root.children[3]
self.assertIsNotNone(node4)
self.assertIsInstance(node4, PatternWordNode)
self.assertEqual(node4.word, "Welcome")
topic = node4.topic
self.assertIsNotNone(topic)
self.assertIsInstance(topic, PatternTopicNode)
self.assertTrue(topic.has_one_or_more())
self.assertIsInstance(topic.star, PatternOneOrMoreWildCardNode)
self.assertEqual(topic.star.wildcard, "*")
that = topic.star.that
self.assertIsNotNone(that)
self.assertIsInstance(that, PatternThatNode)
self.assertTrue(that.has_one_or_more())
self.assertIsInstance(that.star, PatternOneOrMoreWildCardNode)
self.assertEqual(that.star.wildcard, "*")
def test_match_sentence(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<pattern>HELLO</pattern>
<template>Hiya</template>
</category>
</aiml>
""")
self.parser.pattern_parser.dump()
context = self.parser.match_sentence(self._client_context, Sentence(self._client_context, "HELLO"), "*", "*")
self.assertIsNotNone(context)
self.assertEqual("Hiya", context.template_node.template.resolve(self._client_context))
def test_inline_br_html(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<pattern>HELLO</pattern>
<template>Hello <br/> World</template>
</category>
</aiml>
""")
def test_inline_bold_html(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<pattern>HELLO</pattern>
<template>Hello <bold>You</bold> World</template>
</category>
</aiml>
""")
def test_iset(self):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<pattern>Hello</pattern>
<template>Hi There</template>
</category>
<category>
<pattern># <iset>who, what</iset> are you</pattern>
<template>OK thanks</template>
</category>
<category>
<pattern># <iset>who, what</iset> is he</pattern>
<template>OK thanks</template>
</category>
</aiml>
""")
def test_duplicate_categories_in_topic(self):
self.parser._errors = []
self.parser._duplicates = []
self.assertEquals(0, len(self.parser._duplicates))
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<topic name="Topic1">
<category>
<pattern>HELLO</pattern>
<template>Hi There</template>
</category>
<category>
<pattern>HELLO</pattern>
<template>Hi There</template>
</category>
</topic>
</aiml>
""")
self.assertEquals(1, len(self.parser._duplicates))
def test_duplicate_topic(self):
self.parser._errors = []
self.parser._duplicates = []
self.assertEquals(0, len(self.parser._errors))
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<topic name="TOPIC1" />
<topic name="TOPIC1" />
<pattern>*</pattern>
<template>
Test Text
</template>
</category>
</aiml>
""")
self.assertEquals(1, len(self.parser._errors))
def test_duplicate_that(self):
self.parser._errors = []
self.parser._duplicates = []
self.assertEquals(0, len(self.parser._errors))
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<topic name="TOPIC1" />
<that name="THAT1" />
<that name="THAT1" />
<pattern>*</pattern>
<template>
Test Text
</template>
</category>
</aiml>
""")
self.assertEquals(1, len(self.parser._errors))
def test_duplicate_pattern(self):
self.parser._errors = []
self.parser._duplicates = []
self.assertEquals(0, len(self.parser._errors))
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<topic name="TOPIC1" />
<that name="THAT1" />
<pattern>*</pattern>
<pattern>*</pattern>
<template>
Test Text
</template>
</category>
</aiml>
""")
self.assertEquals(1, len(self.parser._errors))
def test_no_pattern(self):
self.parser._errors = []
self.parser._duplicates = []
self.assertEquals(0, len(self.parser._errors))
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<topic name="TOPIC1" />
<that name="THAT1" />
<template>
Test Text
</template>
</category>
</aiml>
""")
self.assertEquals(1, len(self.parser._errors))
def test_duplicate_template(self):
self.parser._errors = []
self.parser._duplicates = []
self.assertEquals(0, len(self.parser._errors))
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<topic name="TOPIC1" />
<that name="THAT1" />
<pattern>*</pattern>
<template>
Test Text
</template>
<template>
Test Text
</template>
</category>
</aiml>
""")
self.assertEquals(1, len(self.parser._errors))
def test_no_template(self):
self.parser._errors = []
self.parser._duplicates = []
self.assertEquals(0, len(self.parser._errors))
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<category>
<topic name="TOPIC1" />
<that name="THAT1" />
<pattern>*</pattern>
</category>
</aiml>
""")
self.assertEquals(1, len(self.parser._errors))
def test_no_topic_no_category(self):
with self.assertRaises(ParserException):
self.parser.parse_from_text(
"""<?xml version="1.0" encoding="UTF-8"?>
<aiml>
<categoryX>
<topic name="TOPIC1" />
<that name="THAT1" />
<pattern>*</pattern>
<template>
Test Text
</template>
</categoryX>
</aiml>
""")
def get_temp_dir(self):
if os.name == 'posix':
return '/tmp'
elif os.name == 'nt':
import tempfile
return tempfile.gettempdir()
else:
raise Exception("Unknown operating system [%s]" % os.name)
def test_save_debug_files(self):
tmpdir = self.get_temp_dir()
errors_file = tmpdir + os.sep + "debug/errors.txt"
duplicates_file = tmpdir + os.sep + "debug/duplicates.txt"
self._client.add_debug_stores(errors_file, duplicates_file)
if os.path.exists(errors_file):
os.remove(errors_file)
if os.path.exists(duplicates_file):
os.remove(duplicates_file)
self.parser.brain.configuration.debugfiles._save_errors = True
self.parser.brain.configuration.debugfiles._save_duplicates = True
self.parser._errors = []
self.parser._errors.append(["test error message", "test.aiml1", 100, 106])
self.parser._duplicates = []
self.parser._duplicates.append(["test duplicate message", "test.aiml2", 200, 206])
self.parser.save_debug_files()
self.assertTrue(os.path.exists(errors_file))
self.assertTrue(os.path.exists(duplicates_file))
if os.path.exists(errors_file):
os.remove(errors_file)
if os.path.exists(duplicates_file):
os.remove(duplicates_file)
self.parser.display_debug_info()
def test_save_debug_files_no_storage(self):
tmpdir = self.get_temp_dir()
errors_file = tmpdir + os.sep + "debug/errors.txt"
duplicates_file = tmpdir + os.sep + "debug/duplicates.txt"
if os.path.exists(errors_file):
os.remove(errors_file)
if os.path.exists(duplicates_file):
os.remove(duplicates_file)
self.parser.brain.configuration.debugfiles._save_errors = True
self.parser.brain.configuration.debugfiles._save_duplicates = True
self.parser._errors = []
self.parser._errors.append(["test error message", "test.aiml1", 100, 106])
self.parser._duplicates = []
self.parser._duplicates.append(["test duplicate message", "test.aiml2", 200, 206])
self.parser.save_debug_files()
self.assertFalse(os.path.exists(errors_file))
self.assertFalse(os.path.exists(duplicates_file))
def test_handle_aiml_duplicate_no_expression(self):
self.parser._duplicates = []
duplicate = DuplicateGrammarException("test duplicate message")
self.parser.handle_aiml_duplicate(duplicate, "test.aiml", None)
self.assertEquals([['test duplicate message', 'test.aiml', None, None]], self.parser._duplicates)
def test_handle_aiml_error_no_expression(self):
self.parser._errors = []
duplicate = DuplicateGrammarException("test duplicate message")
self.parser.handle_aiml_error(duplicate, "test.aiml", None)
self.assertEquals([['test duplicate message', 'test.aiml', None, None]], self.parser._errors)
def test_handle_aiml_duplicate_no_duplicates(self):
aiml = ET.fromstring( """<?xml version="1.0" encoding="ISO-8859-1"?>
<aiml version="1.01">
<category>
<pattern>*</pattern>
<template>RESPONSE</template>
</category>
</aiml>
""")
duplicate = DuplicateGrammarException("test duplicate message")
self.parser.handle_aiml_duplicate(duplicate, "test.aiml", aiml)
self.assertEquals(None, self.parser._duplicates)
def test_handle_aiml_error_no_errors(self):
aiml = ET.fromstring( """<?xml version="1.0" encoding="ISO-8859-1"?>
<aiml version="1.01">
<category>
<pattern>*</pattern>
<template>RESPONSE</template>
</category>
</aiml>
""")
error = ParserException("test parser exception")
self.parser.handle_aiml_error(error, "test.aiml", aiml)
self.assertEquals(None, self.parser._errors)
def test_handle_aiml_duplicate_without_line_numbers(self):
aiml = ET.fromstring( """<?xml version="1.0" encoding="ISO-8859-1"?>
<aiml version="1.01">
<category>
<pattern>*</pattern>
<template>RESPONSE</template>
</category>
</aiml>
""")
self.parser._duplicates = []
duplicate = DuplicateGrammarException("test duplicate message")
self.parser.handle_aiml_duplicate(duplicate, "test.aiml", aiml)
self.assertEquals([['test duplicate message', 'test.aiml', None, None]], self.parser._duplicates)
def test_handle_aiml_error_without_line_numbers(self):
aiml = ET.fromstring( """<?xml version="1.0" encoding="ISO-8859-1"?>
<aiml version="1.01">
<category>
<pattern>*</pattern>
<template>RESPONSE</template>
</category>
</aiml>
""")
error = ParserException("test parser exception")
self.parser.handle_aiml_error(error, "test.aiml", aiml)
self.assertEquals([['test parser exception', 'test.aiml', '0', '0']], self.parser._errors)
def test_handle_aiml_duplicate_with_line_numbers(self):
aiml = MockElement("aiml")
aiml._start_line_number = 99
aiml._end_line_number = 999
self.parser._duplicates = []
duplicate = DuplicateGrammarException("test duplicate message")
self.parser.handle_aiml_duplicate(duplicate, "test.aiml", aiml)
self.assertEquals([['test duplicate message', 'test.aiml', '99', '999']], self.parser._duplicates)
def test_handle_aiml_error_without_line_numbers(self):
aiml = MockElement("aiml")
aiml._start_line_number = 99
aiml._end_line_number = 999
self.parser._errors = []
error = ParserException("test parser exception")
self.parser.handle_aiml_error(error, "test.aiml", aiml)
self.assertEquals([['test parser exception', 'test.aiml', '99', '999']], self.parser._errors)
|
1606935
|
import numpy as np
import tensorflow as tf
from tabnet.datasets.covertype import get_data, get_dataset
COVTYPE_CSV_PATH = "data/test/covtype_sample.csv"
SEED = 42
class TestDataset(tf.test.TestCase):
def test_gets_always_the_same_data(self):
df_tr, df_val, df_test = get_data(COVTYPE_CSV_PATH, seed=SEED)
df2_tr, df2_val, df2_test = get_data(COVTYPE_CSV_PATH, seed=SEED)
self.assertAllClose(
df_tr.values.astype(np.float32), df2_tr.values.astype(np.float32)
)
self.assertAllClose(
df_val.values.astype(np.float32), df2_val.values.astype(np.float32)
)
self.assertAllClose(
df_test.values.astype(np.float32), df2_test.values.astype(np.float32)
)
def __get_labels(self, ds: tf.data.Dataset, n_iter: int):
labels = []
ds_iter = iter(ds)
for i in range(n_iter):
_, label = next(ds_iter)
labels.append(label)
return tf.concat(labels, axis=0)
def test_dataset_is_deterministic(self):
df_tr, _, _ = get_data(COVTYPE_CSV_PATH, seed=SEED)
ds_tr = get_dataset(df_tr, shuffle=True, batch_size=32, seed=SEED, take=2)
labels1 = self.__get_labels(ds_tr, 20)
ds_tr = get_dataset(df_tr, shuffle=True, batch_size=32, seed=SEED, take=2)
labels2 = self.__get_labels(ds_tr, 20)
self.assertAllClose(labels1, labels2)
if __name__ == "__main__":
tf.test.main()
|
1606936
|
from abc import ABC, abstractmethod
from typing import Dict, Optional, Sequence, Union
from virtool.http.rights import MODIFY, READ, REMOVE, Right
from virtool.jobs.utils import JobRights
class AbstractClient(ABC):
@property
@abstractmethod
async def authenticated(self) -> bool:
...
@property
@abstractmethod
async def administrator(self) -> bool:
...
@property
@abstractmethod
async def force_reset(self) -> bool:
...
@abstractmethod
async def has_permission(self, permission) -> bool:
...
@abstractmethod
async def has_right_on_analysis(self, analysis_id: str, right: Right) -> bool:
...
@abstractmethod
async def has_right_on_hmms(self, right: Right) -> bool:
...
@abstractmethod
async def has_right_on_index(self, index_id: str, right: Right) -> bool:
...
@abstractmethod
async def has_right_on_reference(self, reference_id: str, right: Right) -> bool:
...
@abstractmethod
async def has_right_on_sample(self, sample_id: str, right: Right) -> bool:
...
@abstractmethod
async def has_right_on_subtraction(self, subtraction_id: str, right: Right) -> bool:
...
@abstractmethod
async def has_right_on_upload(self, upload_id: str, right: Right) -> bool:
...
class UserClient(AbstractClient):
def __init__(
self,
db,
administrator: bool,
force_reset: bool,
groups: Sequence[str],
permissions: Dict[str, bool],
user_id: Union[str, None],
authenticated: bool,
session_id: Optional[str] = None,
):
self._db = db
self._force_reset = force_reset
self._administrator = administrator
self._authenticated = authenticated
self.groups = groups
self.permissions = permissions
self.user_id = user_id
self.session_id = session_id
@property
def authenticated(self) -> bool:
return self._authenticated
@property
def administrator(self) -> bool:
return self._administrator
@property
def force_reset(self) -> bool:
return self._force_reset
def has_permission(self, permission: str) -> bool:
return self.permissions.get(permission, False)
async def has_right_on_analysis(self, analysis_id: str, right: Right) -> bool:
return True
async def has_right_on_hmms(self, right: Right):
if right == READ:
return True
if right == MODIFY or right == REMOVE:
return self.has_permission("modify_hmm")
async def has_right_on_index(self, index_id: str, right: Right) -> bool:
return True if right == READ else False
async def has_right_on_sample(self, sample_id: str, right: Right) -> bool:
if self.administrator:
return True
sample = await self._db.find_one(sample_id, {"quality": False})
if self.user_id == sample["user"]["id"]:
return True
is_group_member = sample["group"] and sample["group"] in self.groups
if right == READ:
return sample["all_read"] or (is_group_member and sample["group_read"])
if right == MODIFY or right == REMOVE:
return sample["all_write"] or (is_group_member and sample["group_write"])
async def has_right_on_reference(self, reference_id: str, right: Right):
return False
async def has_right_on_subtraction(self, subtraction_id: str, right: Right):
"""
Check whether the authenticated user has the passed ``right`` on a subtraction.
User rights on subtractions are based on group permissions.
"""
if right == READ:
return True
if right == MODIFY:
return self.has_permission("modify_subtraction")
if right == REMOVE:
return self.has_permission("delete_subtraction")
async def has_right_on_upload(self, upload_id: str, right: Right) -> bool:
if right == REMOVE:
return self.has_permission("remove_file")
if right == MODIFY:
return False
return True
class JobClient(AbstractClient):
def __init__(self, job_id, rights: JobRights):
self._rights = rights
self.job_id = job_id
@property
def authenticated(self) -> bool:
return True
@property
def administrator(self):
return False
@property
def force_reset(self) -> bool:
return False
def has_permission(self, permission: str) -> bool:
return False
async def has_right_on_analysis(self, analysis_id: str, right: Right) -> bool:
return self._rights.analyses.has_right(analysis_id, right)
async def has_right_on_hmms(self, right: Right) -> bool:
"""
Check whether the client has a right on HMMs.
All jobs can read HMMs. None can modify or remove them.
:param right: the right to check for
"""
if right == READ:
return True
return False
async def has_right_on_index(self, index_id, right):
return self._rights.indexes.has_right(index_id, right)
async def has_right_on_reference(self, reference_id: str, right: Right) -> bool:
return self._rights.references.has_right(reference_id, right)
async def has_right_on_sample(self, sample_id: str, right: Right) -> bool:
return self._rights.samples.has_right(sample_id, right)
async def has_right_on_subtraction(self, subtraction_id: str, right: Right) -> bool:
return self._rights.subtractions.has_right(subtraction_id, right)
async def has_right_on_upload(self, upload_id: str, right: Right) -> bool:
return self._rights.uploads.has_right(upload_id, right)
|
1606966
|
import numpy as np
import torch
from torch import nn
import copy
from collections import defaultdict
from absl import logging
from musco.pytorch.compressor.decompositions.tucker2 import Tucker2DecomposedLayer
from musco.pytorch.compressor.decompositions.cp3 import CP3DecomposedLayer
from musco.pytorch.compressor.decompositions.cp4 import CP4DecomposedLayer
from musco.pytorch.compressor.decompositions.svd_layer import SVDDecomposedLayer, SVDDecomposedConvLayer
def get_compressed_model(model,
layer_names,
ranks,
decompositions,
layer_types,
rank_selection,
vbmf_weaken_factors=None,
param_reduction_rates = None,
pretrained=None,
return_ranks=False):
'''
layer_names:list,
ranks: defaultdict,
decompositions: defaultdict,
layer_types: defaultdict,
vbmf_weaken_factors: defaultdict
'''
compressed_model = copy.deepcopy(model)
new_ranks = defaultdict()
model = None
for lname in layer_names:
rank = ranks[lname]
if rank is not None:
logging.info('Decompose layer {}'.format(lname))
subm_names = lname.strip().split('.')
## model before
#print('subm_name: {} \n'.format(subm_names))
layer = compressed_model.__getattr__(subm_names[0])
for s in subm_names[1:]:
layer = layer.__getattr__(s)
decomposition = decompositions[lname]
layer_type = layer_types[lname]['type']
if vbmf_weaken_factors is not None:
vbmf_weaken_factor = vbmf_weaken_factors[lname]
else:
vbmf_weaken_factor = None
if param_reduction_rates is not None:
param_reduction_rate = param_reduction_rates[lname]
else:
param_reduction_rate = None
print(lname, decomposition)
#print(layer)
if decomposition == 'tucker2':
decomposed_layer = Tucker2DecomposedLayer(layer,\
subm_names[-1],\
rank_selection,\
rank,\
pretrained=pretrained,\
vbmf_weaken_factor=vbmf_weaken_factor,\
param_reduction_rate = param_reduction_rate)
elif decomposition == 'cp3':
decomposed_layer = CP3DecomposedLayer(layer,\
subm_names[-1],\
rank_selection,\
rank,\
pretrained=pretrained,\
param_reduction_rate = param_reduction_rate)
elif decomposition == 'cp4':
decomposed_layer = CP4DecomposedLayer(layer,\
subm_names[-1],\
rank_selection,\
rank,\
pretrained=pretrained,\
param_reduction_rate = param_reduction_rate)
elif decomposition == 'svd':
if layer_type == nn.Conv2d:
decomposed_layer = SVDDecomposedConvLayer(layer,\
subm_names[-1],\
rank_selection,\
rank,\
pretrained=pretrained,\
vbmf_weaken_factor=vbmf_weaken_factor,\
param_reduction_rate = param_reduction_rate)
elif layer_type == nn.Linear:
decomposed_layer = SVDDecomposedLayer(layer,\
subm_names[-1],\
rank_selection,\
rank,\
pretrained=pretrained,\
vbmf_weaken_factor=vbmf_weaken_factor,\
param_reduction_rate = param_reduction_rate)
try:
new_ranks[lname] = decomposed_layer.ranks
except:
new_ranks[lname] = decomposed_layer.rank
logging.info('\t new rank: {}'.format(new_ranks[lname]))
if len(subm_names) > 1:
m = compressed_model.__getattr__(subm_names[0])
for s in subm_names[1:-1]:
m = m.__getattr__(s)
m.__setattr__(subm_names[-1], decomposed_layer.new_layers)
else:
compressed_model.__setattr__(subm_names[-1], decomposed_layer.new_layers)
else:
logging.info('Skip layer {}'.format(lname))
if return_ranks:
return compressed_model, new_ranks
else:
return compressed_model
|
1607015
|
import numpy as np
import torch
import glob
import os
import pickle
import argparse
from torch.utils.data import DataLoader
from torch.utils.data.dataset import (TensorDataset,
ConcatDataset)
from i2i.cyclegan import CycleGAN
from util import (convert_to_rgb,
H5Dataset,
DatasetFromFolder)
from torchvision import transforms
from skimage.io import imsave, imread
from skimage.transform import rescale, resize
from importlib import import_module
def get_face_swap_iterators(bs):
"""DepthNet + GT <-> frontal GT faces"""
filename_vgg = "data/vgg/vgg.h5"
filename_celeba = "data/celeba/celebA.h5"
filename_celeba_swap = "data/celeba_faceswap/celeba_faceswap.h5"
a_train = H5Dataset(filename_celeba_swap, 'imgs', train=True)
vgg_side_train = H5Dataset('%s' % filename_vgg, 'src_GT', train=True)
vgg_frontal_train = H5Dataset('%s' % filename_vgg, 'tg_GT', train=True)
celeba_side_train = H5Dataset('%s' % filename_celeba, 'src_GT', train=True)
celeba_frontal_train = H5Dataset('%s' % filename_celeba, 'tg_GT', train=True)
b_train = ConcatDataset((vgg_side_train,
vgg_frontal_train,
celeba_side_train,
celeba_frontal_train))
a_valid = H5Dataset(filename_celeba_swap, 'imgs', train=False)
vgg_side_valid = H5Dataset('%s' % filename_vgg, 'src_GT', train=False)
vgg_frontal_valid = H5Dataset('%s' % filename_vgg, 'tg_GT', train=False)
celeba_side_valid = H5Dataset('%s' % filename_celeba, 'src_GT', train=False)
celeba_frontal_valid = H5Dataset('%s' % filename_celeba, 'tg_GT', train=False)
b_valid = ConcatDataset((vgg_side_valid,
vgg_frontal_valid,
celeba_side_valid,
celeba_frontal_valid))
loader_train_a = DataLoader(a_train, batch_size=bs, shuffle=True)
loader_train_b = DataLoader(b_train, batch_size=bs, shuffle=True)
loader_valid_a = DataLoader(a_valid, batch_size=bs, shuffle=True)
loader_valid_b = DataLoader(b_valid, batch_size=bs, shuffle=True)
return loader_train_a, loader_train_b, loader_valid_a, loader_valid_b
def image_dump_handler(out_folder, scale_factor=1.):
def _fn(losses, inputs, outputs, kwargs):
if kwargs['iter'] != 1:
return
A_real = inputs[0].data.cpu().numpy()
B_real = inputs[1].data.cpu().numpy()
atob, atob_btoa, btoa, btoa_atob = \
[elem.data.cpu().numpy() for elem in outputs.values()]
outs_np = [A_real, atob, atob_btoa, B_real, btoa, btoa_atob]
# determine # of channels
n_channels = outs_np[0].shape[1]
w, h = outs_np[0].shape[-1], outs_np[0].shape[-2]
# possible that A_real.bs != B_real.bs
bs = np.min([outs_np[0].shape[0], outs_np[3].shape[0]])
grid = np.zeros((h*bs, w*6, 3))
for j in range(bs):
for i in range(6):
n_channels = outs_np[i][j].shape[0]
img_to_write = convert_to_rgb(outs_np[i][j], is_grayscale=False)
grid[j*h:(j+1)*h, i*w:(i+1)*w, :] = img_to_write
imsave(arr=rescale(grid, scale=scale_factor),
fname="%s/%i_%s.png" % (out_folder, kwargs['epoch'], kwargs['mode']))
return _fn
if __name__ == '__main__':
from torchvision.utils import save_image
def parse_args():
parser = argparse.ArgumentParser(description="")
parser.add_argument('--name', type=str,
default="my_experiment")
parser.add_argument('--batch_size', type=int, default=32)
parser.add_argument('--network', type=str, default=None)
parser.add_argument('--mode', choices=['train', 'test', 'vis'],
default='train')
parser.add_argument('--epochs', type=int, default=1000)
parser.add_argument('--loss', type=str, choices=['mse', 'bce'],
default='mse')
parser.add_argument('--lamb', type=float, default=10.0)
parser.add_argument('--beta', type=float, default=0.0)
parser.add_argument('--lr', type=float, default=2e-4)
parser.add_argument('--beta1', type=float, default=0.5)
parser.add_argument('--beta2', type=float, default=0.999)
parser.add_argument('--resume', type=str, default=None)
parser.add_argument('--save_path', type=str,
default='./results')
parser.add_argument('--model_save_path', type=str,
default='./models')
parser.add_argument('--cpu', action='store_true')
args = parser.parse_args()
return args
args = parse_args()
# Dynamically load in the selected generator
# module.
mod = import_module(args.network.replace("/", ".").\
replace(".py", ""))
gen_atob_fn, disc_a_fn, gen_btoa_fn, disc_b_fn = mod.get_network()
print("Loading iterators...")
it_train_a, it_train_b, it_valid_a, it_valid_b = \
get_face_swap_iterators(args.batch_size)
print("Loading CycleGAN...")
name = args.name
net = CycleGAN(
gen_atob_fn=gen_atob_fn,
disc_a_fn=disc_a_fn,
gen_btoa_fn=gen_btoa_fn,
disc_b_fn=disc_b_fn,
loss=args.loss,
lamb=args.lamb,
beta=args.beta,
opt_d_args={'lr': args.lr, 'betas': (args.beta1, args.beta2)},
opt_g_args={'lr': args.lr, 'betas': (args.beta1, args.beta2)},
handlers=[image_dump_handler("%s/%s" % (args.save_path, name))],
use_cuda=False if args.cpu else True
)
if args.resume is not None:
if args.resume == 'auto':
# autoresume
model_dir = "%s/%s" % (args.model_save_path, name)
# List all the pkl files.
files = glob.glob("%s/*.pkl" % model_dir)
# Make them absolute paths.
files = [ os.path.abspath(key) for key in files ]
if len(files) > 0:
# Get creation time and use that.
latest_model = max(files, key=os.path.getctime)
print("Auto-resume mode found latest model: %s" %
latest_model)
net.load(latest_model)
else:
print("Loading model: %s" % args.resume)
net.load(args.resume)
if args.mode == "train":
print("Training...")
net.train(
itr_a_train=it_train_a,
itr_b_train=it_train_b,
itr_a_valid=it_valid_a,
itr_b_valid=it_valid_b,
epochs=args.epochs,
model_dir="%s/%s" % (args.model_save_path, name),
result_dir="%s/%s" % (args.save_path, name)
)
elif args.mode == "vis":
print("Converting A -> B...")
net.g_atob.eval()
aa = iter(it_train_a).next()[0:1]
bb = net.g_atob(aa)
save_image(aa*0.5 + 0.5, "tmp/aa.png")
save_image(bb*0.5 + 0.5, "tmp/bb.png")
elif args.mode == 'test':
print("Dropping into pdb...")
import pdb
pdb.set_trace()
|
1607017
|
import pandas as pd
from os import path
from .common import *
from .time_utility import *
# Tushare access limit, as fetch times per minute.
# If fetch data from tushare gets error message like: "抱歉,您每分钟最多访问该接口x次"
# Fill the x to the corresponding entry of this table
# This config is for 5000 scores. The number in comments is for 2000 or less.
#
# You can put your delay config in config.json as:
#
# "TS_DELAY": {
# "daily_basic": "0",
# "fina_mainbz": "0",
# ......
# }
#
DEFAULT_TS_DELAYER_TABLE = {
'daily_basic': DelayerMinuteLimit(500), # 500
'fina_mainbz': DelayerMinuteLimit(60), # 60
'fina_audit': DelayerMinuteLimit(50), # 50
'balancesheet': DelayerMinuteLimit(50), # 50
'income': DelayerMinuteLimit(50), # 50
'cashflow': DelayerMinuteLimit(50), # 50
'index_daily': DelayerMinuteLimit(500), # 500
'daily_index': DelayerMinuteLimit(500), # 500
'concept_detail': DelayerMinuteLimit(100), # 100
'namechange': DelayerMinuteLimit(100), # 100
'pledge_stat': DelayerMinuteLimit(1200), # 1200
'pledge_detail': DelayerMinuteLimit(1200), # 1200
'stk_holdernumber': DelayerMinuteLimit(10), # 10
'top10_holders': DelayerMinuteLimit(10), # 10
'top10_floatholders': DelayerMinuteLimit(10), # 10
'stk_holdertrade': DelayerMinuteLimit(300), # 300
'daily': DelayerMinuteLimit(1200), # 1200
'adj_factor': DelayerMinuteLimit(1200), # 1200
'repurchase': DelayerMinuteLimit(20), # 20
'share_float': DelayerMinuteLimit(20), # 20
}
delayer_table = DEFAULT_TS_DELAYER_TABLE
def set_delay_table(table: dict):
global delayer_table
try:
for k, v in table.items():
delayer_table[k] = DelayerMinuteLimit(int(v))
except Exception as e:
delayer_table = DEFAULT_TS_DELAYER_TABLE
print('Set delay table fail: ' + str(e))
print('Use default delay table.')
finally:
pass
def ts_delay(ts_interface: str):
delayer = delayer_table.get(ts_interface)
if delayer is not None:
delayer.delay()
root_path = path.dirname(path.dirname(path.abspath(__file__)))
def param_as_date_str(kwargs: dict, param: str) -> str:
dt = kwargs.get(param)
if dt is None:
return ''
if not isinstance(dt, (datetime.datetime, datetime.date, str)):
return ''
if isinstance(dt, str):
dt = text2date(dt)
return dt.strftime('%Y%m%d')
def pickup_since_until_as_date(kwargs: dict) -> (str, str):
since = kwargs.get('since', None)
until = kwargs.get('until', None)
return param_as_date_str(kwargs, since), param_as_date_str(kwargs, until)
# def ts_exchange_to_stock_exchange(exchange: str) -> str:
# return {
# 'SH': 'SSE',
# 'SZ': 'SZSE',
# }.get(exchange, exchange)
#
#
# def ts_code_to_stock_identity(ts_code: str) -> str:
# parts = ts_code.split('.')
# if len(parts) != 2:
# # Error
# return ts_code
# return parts[0] + '.' + ts_exchange_to_stock_exchange(parts[1])
TS_SAS_IDENTITY_SUFFIX_TABLE = [
('SH', 'SSE'),
('SZ', 'SZSE'),
('CSI', 'CSI'),
('CIC', 'CICC'),
('SI', 'SW'),
('MI', 'MSCI'),
# 'OTH' not a valid exchange
]
BAO_SAS_IDENTITY_SUFFIX_TABLE = [
('sh', 'SSE'),
('sz', 'SZSE'),
]
def stock_identity_to_ts_code(stock_identity: str) -> str:
for ts_suffix, sas_suffix in TS_SAS_IDENTITY_SUFFIX_TABLE:
if stock_identity.endswith(sas_suffix):
return stock_identity.replace(sas_suffix, ts_suffix)
return stock_identity
def ts_code_to_stock_identity(ts_code: str) -> str:
for ts_suffix, sas_suffix in TS_SAS_IDENTITY_SUFFIX_TABLE:
if ts_code.endswith(ts_suffix):
return ts_code.replace(ts_suffix, sas_suffix)
return ts_code
def bao_code_to_stock_identity(bao_code: str) -> str:
for bao_prefix, sas_suffix in BAO_SAS_IDENTITY_SUFFIX_TABLE:
if bao_code.startswith(bao_prefix):
return bao_code.replace(bao_prefix + '.', '') + '.' + sas_suffix
return bao_code
def code_exchange_to_ts_code(code: str, exchange: str) -> str:
for ts_suffix, sas_suffix in TS_SAS_IDENTITY_SUFFIX_TABLE:
if exchange == sas_suffix:
return code + '.' + ts_suffix
return code + '.' + exchange
def code_exchange_to_bao_code(code: str, exchange: str) -> str:
for bao_prefix, sas_suffix in BAO_SAS_IDENTITY_SUFFIX_TABLE:
if exchange == sas_suffix:
return bao_prefix + '.' + code
return exchange + '.' + code
def pickup_ts_code(kwargs: dict) -> str:
stock_identity = kwargs.get('stock_identity')
if stock_identity is not None:
return stock_identity_to_ts_code(stock_identity)
code = kwargs.get('code')
exchange = kwargs.get('exchange')
if code is None or exchange is None:
return ''
if not isinstance(code, str) or not isinstance(exchange, str):
return ''
return code_exchange_to_ts_code(code, exchange)
def pickup_bao_code(kwargs: dict) -> str:
stock_identity = kwargs.get('stock_identity')
if stock_identity is not None:
return stock_identity_to_ts_code(stock_identity)
code = kwargs.get('code')
exchange = kwargs.get('exchange')
if code is None or exchange is None:
return ''
if not isinstance(code, str) or not isinstance(exchange, str):
return ''
return code_exchange_to_bao_code(code, exchange)
def path_from_plugin_param(**kwargs) -> str:
uri = kwargs.get('uri')
file = uri.replace('.', '_')
return root_path + '/TestData/' + file + '.csv'
def check_execute_test_flag(**kwargs) -> pd.DataFrame or None:
if kwargs.get('test_flag', False):
uri = path_from_plugin_param(**kwargs)
return pd.DataFrame.from_csv(uri)
return None
def check_execute_dump_flag(result: pd.DataFrame, **kwargs):
if kwargs.get('dump_flag', False):
uri = path_from_plugin_param(**kwargs)
result.to_csv(uri)
def is_slice_update(ts_code: str, since: datetime.datetime, until: datetime.datetime) -> bool:
return not str_available(ts_code) and isinstance(since, datetime.datetime)
def convert_ts_code_field(df: pd.DataFrame, ts_field: str = 'ts_code',
sas_field: str = 'stock_identity') -> pd.DataFrame:
df[sas_field] = df[ts_field].apply(ts_code_to_stock_identity)
if sas_field != ts_field:
del df[ts_field]
return df
def convert_ts_date_field(df: pd.DataFrame, ts_field: str,
sas_field: str or None = None) -> pd.DataFrame:
sas_field = ts_field if sas_field is None else sas_field
df[sas_field] = pd.to_datetime(df[ts_field])
if sas_field != ts_field:
del df[ts_field]
return df
|
1607021
|
import fcntl
import os
class Concurrency():
# Default constructor
def __init__(self, log, resource):
self.log = log
self.lock_file = f"/tmp/{resource}_lock"
self.lock_fh = None
def open_lock_file(self):
if os.path.exists(self.lock_file):
self.lock_fh = open(self.lock_file, "r")
else:
self.lock_fh = open(self.lock_file, "w")
def close_lock_file(self):
self.lock_fh.close()
def obtain_lock(self):
self.open_lock_file()
self.log.debug(f"Waiting for lock on {self.lock_file}")
fcntl.flock(self.lock_fh, fcntl.LOCK_EX)
self.log.debug(f"Obtained lock on {self.lock_file}")
def release_lock(self):
if self.lock_fh is not None:
fcntl.flock(self.lock_fh, fcntl.LOCK_UN)
self.log.debug(f"Released lock on {self.lock_file}")
self.lock_fh.close()
else:
self.log.debug(f"File handle {self.lock_file} is None")
if __name__ == "__main__":
import logging
import sys
import time
log = logging.getLogger()
log.setLevel(logging.DEBUG)
log.addHandler(logging.StreamHandler(sys.stdout))
concurrency = Concurrency(log, "foo")
concurrency.obtain_lock()
time.sleep(5)
concurrency.release_lock()
|
1607030
|
from . import id_software
class RavenBsp(id_software.IdTechBsp):
file_magic = b"RBSP"
# includes marker lump:
# https://github.com/TTimo/GtkRadiant/blob/master/tools/urt/tools/quake3/q3map2/bspfile_rbsp.c#L308
# sprintf( marker, "I LOVE MY Q3MAP2 %s on %s)", Q3MAP_VERSION, asctime( localtime( &t ) ) );
|
1607039
|
from .model import SMTilesMapProviderSetting, FastDFSTileProviderSetting, MongoDBTileProviderSetting, \
OTSTileProviderSetting, UGCV5TileProviderSetting, GeoPackageMapProviderSetting, MngServiceInfo, ProviderSetting
from iclientpy.dtojson import *
_provider_setting_parsers = {
'com.supermap.services.providers.SMTilesMapProvider': parser(SMTilesMapProviderSetting),
'com.supermap.services.providers.FastDFSTileProvider': parser(FastDFSTileProviderSetting),
'com.supermap.services.providers.MongoDBTileProvider': parser(MongoDBTileProviderSetting),
'com.supermap.services.providers.OTSTileProvider': parser(OTSTileProviderSetting),
'com.supermap.services.providers.UGCV5TileProvider': parser(UGCV5TileProviderSetting),
'com.supermap.services.providers.GeoPackageMapProvider': parser(GeoPackageMapProviderSetting)
}
provider_setting_parser_switcher = ByFieldValueParserSwitcher('type', _provider_setting_parsers)
mng_service_info_deserializer = deserializer(MngServiceInfo,
{(ProviderSetting, 'config'): provider_setting_parser_switcher})
|
1607041
|
import numpy as np
from .rank import pagerank
from .sentence import sent_graph
from .word import word_graph
class KeywordSummarizer:
"""
Arguments
---------
sents : list of str
Sentence list
tokenize : callable
Tokenize function: tokenize(str) = list of str
min_count : int
Minumum frequency of words will be used to construct sentence graph
window : int
Word cooccurrence window size. Default is -1.
'-1' means there is cooccurrence between two words if the words occur in a sentence
min_cooccurrence : int
Minimum cooccurrence frequency of two words
vocab_to_idx : dict or None
Vocabulary to index mapper
df : float
PageRank damping factor
max_iter : int
Number of PageRank iterations
verbose : Boolean
If True, it shows training progress
"""
def __init__(self, sents=None, tokenize=None, min_count=2,
window=-1, min_cooccurrence=2, vocab_to_idx=None,
df=0.85, max_iter=30, verbose=False):
self.tokenize = tokenize
self.min_count = min_count
self.window = window
self.min_cooccurrence = min_cooccurrence
self.vocab_to_idx = vocab_to_idx
self.df = df
self.max_iter = max_iter
self.verbose = verbose
if sents is not None:
self.train_textrank(sents)
def train_textrank(self, sents, bias=None):
"""
Arguments
---------
sents : list of str
Sentence list
bias : None or numpy.ndarray
PageRank bias term
Returns
-------
None
"""
g, self.idx_to_vocab = word_graph(sents,
self.tokenize, self.min_count,self.window,
self.min_cooccurrence, self.vocab_to_idx, self.verbose)
self.R = pagerank(g, self.df, self.max_iter, bias).reshape(-1)
if self.verbose:
print('trained TextRank. n words = {}'.format(self.R.shape[0]))
def keywords(self, topk=30):
"""
Arguments
---------
topk : int
Number of keywords selected from TextRank
Returns
-------
keywords : list of tuple
Each tuple stands for (word, rank)
"""
if not hasattr(self, 'R'):
raise RuntimeError('Train textrank first or use summarize function')
idxs = self.R.argsort()[-topk:]
keywords = [(self.idx_to_vocab[idx], self.R[idx]) for idx in reversed(idxs)]
return keywords
def summarize(self, sents, topk=30):
"""
Arguments
---------
sents : list of str
Sentence list
topk : int
Number of keywords selected from TextRank
Returns
-------
keywords : list of tuple
Each tuple stands for (word, rank)
"""
self.train_textrank(sents)
return self.keywords(topk)
class KeysentenceSummarizer:
"""
Arguments
---------
sents : list of str
Sentence list
tokenize : callable
Tokenize function: tokenize(str) = list of str
min_count : int
Minumum frequency of words will be used to construct sentence graph
min_sim : float
Minimum similarity between sentences in sentence graph
similarity : str
available similarity = ['cosine', 'textrank']
vocab_to_idx : dict or None
Vocabulary to index mapper
df : float
PageRank damping factor
max_iter : int
Number of PageRank iterations
verbose : Boolean
If True, it shows training progress
"""
def __init__(self, sents=None, tokenize=None, min_count=2,
min_sim=0.3, similarity=None, vocab_to_idx=None,
df=0.85, max_iter=30, verbose=False):
self.tokenize = tokenize
self.min_count = min_count
self.min_sim = min_sim
self.similarity = similarity
self.vocab_to_idx = vocab_to_idx
self.df = df
self.max_iter = max_iter
self.verbose = verbose
if sents is not None:
self.train_textrank(sents)
def train_textrank(self, sents, bias=None):
"""
Arguments
---------
sents : list of str
Sentence list
bias : None or numpy.ndarray
PageRank bias term
Shape must be (n_sents,)
Returns
-------
None
"""
g = sent_graph(sents, self.tokenize, self.min_count,
self.min_sim, self.similarity, self.vocab_to_idx, self.verbose)
self.R = pagerank(g, self.df, self.max_iter, bias).reshape(-1)
if self.verbose:
print('trained TextRank. n sentences = {}'.format(self.R.shape[0]))
def summarize(self, sents, topk=30, bias=None):
"""
Arguments
---------
sents : list of str
Sentence list
topk : int
Number of key-sentences to be selected.
bias : None or numpy.ndarray
PageRank bias term
Shape must be (n_sents,)
Returns
-------
keysents : list of tuple
Each tuple stands for (sentence index, rank, sentence)
Usage
-----
>>> from textrank import KeysentenceSummarizer
>>> summarizer = KeysentenceSummarizer(tokenize = tokenizer, min_sim = 0.5)
>>> keysents = summarizer.summarize(texts, topk=30)
"""
n_sents = len(sents)
if isinstance(bias, np.ndarray):
if bias.shape != (n_sents,):
raise ValueError('The shape of bias must be (n_sents,) but {}'.format(bias.shape))
elif bias is not None:
raise ValueError('The type of bias must be None or numpy.ndarray but the type is {}'.format(type(bias)))
self.train_textrank(sents, bias)
idxs = self.R.argsort()[-topk:]
keysents = [(idx, self.R[idx], sents[idx]) for idx in reversed(idxs)]
return keysents
|
1607044
|
from opentera.forms.TeraForm import *
from modules.DatabaseModule.DBManagerTeraUserAccess import DBManagerTeraUserAccess
from flask_babel import gettext
class TeraServiceConfigForm:
@staticmethod
def get_service_config_form():
form = TeraForm("service_config")
# Building lists
#################
# None to build!
# Sections
section = TeraFormSection("infos", gettext("Information"))
form.add_section(section)
# service_endpoint = db.Column(db.String, nullable=False)
# service_clientendpoint = db.Column(db.String, nullable=False)
# service_enabled = db.Column(db.Boolean, nullable=False, default=False)
# Items
section.add_item(TeraFormItem("id_service", gettext("Service ID"), "hidden", item_required=True))
section.add_item(TeraFormItem("id_service_config", gettext("Service Config ID"), "hidden", item_required=True))
section.add_item(TeraFormItem("id_user", gettext("User ID"), "hidden", item_required=False))
section.add_item(TeraFormItem("id_device", gettext("Device ID"), "hidden", item_required=False))
section.add_item(TeraFormItem("id_participant", gettext("Participant ID"), "hidden", item_required=False))
section.add_item(TeraFormItem("service_config_config", gettext("Service Config"), "hidden", item_required=True))
form_dict = form.to_dict()
# if service.has_config_schema():
# import json
# config_json = json.loads(service.service_config_schema)
# form_dict.update(config_json)
return form_dict
@staticmethod
def get_service_config_config_form(user_access: DBManagerTeraUserAccess, service_key: str):
form = TeraForm("service_config_config")
if service_key == 'VideoRehabService':
# Sections
section1 = TeraFormSection("main_audio_video", gettext("Multimedia Configuration"))
form.add_section(section1)
# Items
section1.add_item(TeraFormItem("camera", gettext("Camera"), "videoinputs", False))
section1.add_item(TeraFormItem("mirror", gettext("Mirrored image"), "boolean", False, item_default=True))
item = TeraFormItem("teracam_src", gettext("URL"), "text", True,
item_condition=TeraFormItemCondition("camera", "=", "OpenTeraCam"))
section1.add_item(item)
section1.add_item(TeraFormItem("camera_ptz", gettext("Pan-Tilt-Zoom Camera"), "boolean"))
item = TeraFormItem("camera_ptz_type", gettext("Control Type"), "array", True,
[TeraFormValue("0", gettext("Vivotek")),
# TeraFormValue("1", gettext("ONVIF (Generic)"))
],
item_condition=TeraFormItemCondition("camera_ptz", "=", True))
section1.add_item(item)
item = TeraFormItem("camera_ptz_ip", gettext("Network Address"), "text", True,
item_condition=TeraFormItemCondition("camera_ptz", "=", True))
section1.add_item(item)
item = TeraFormItem("camera_ptz_port", gettext("Port"), "numeric", True,
item_condition=TeraFormItemCondition("camera_ptz", "=", True))
section1.add_item(item)
item = TeraFormItem("camera_ptz_username", gettext("Username"), "text", True,
item_condition=TeraFormItemCondition("camera_ptz", "=", True))
section1.add_item(item)
item = TeraFormItem("camera_ptz_password", gettext("Password"), "password", True,
item_condition=TeraFormItemCondition("camera_ptz", "=", True))
section1.add_item(item)
section1.add_item(TeraFormItem("audio", gettext("Microphone"), "audioinputs", False))
section1.add_item(TeraFormItem("camera2", gettext("Secondary Camera"), "videoinputs"))
return form.to_dict()
|
1607063
|
import os
import sys
import time
import glob
import shutil
import argparse
import cv2
import numpy as np
sys.path.insert(0, '..')
import plantid
def imread_ex(filename, flags=-1):
try:
return cv2.imdecode(np.fromfile(filename, dtype=np.uint8), flags)
except Exception as e:
return None
def split_images_by_identify(src_dir, dst_dir):
plant_identifier = plantid.PlantIdentifier()
filenames = glob.glob(os.path.join(src_dir, '*'))
start_time = time.time()
for k, filename in enumerate(filenames):
image = imread_ex(filename)
outputs = plant_identifier.identify(image, topk=1)
if outputs['status'] == 0:
chinese_name = outputs['results'][0]['chinese_name']
latin_name = outputs['results'][0]['latin_name']
confidence = outputs['results'][0]['probability']
if latin_name == '':
taxon_name = chinese_name
else:
taxon_name = '{} {}'.format(chinese_name, latin_name)
if confidence > 0.1:
dst_subdir = os.path.join(dst_dir, taxon_name)
os.makedirs(dst_subdir, exist_ok=True)
dst_filename = os.path.join(dst_subdir, '{:.3f}_{}'.format(confidence, os.path.basename(filename)))
shutil.move(filename, dst_filename)
print('[{}/{}] Time: {:.3f}s {}'.format(k+1, len(filenames), time.time() - start_time, filename))
start_time = time.time()
def parse_arguments(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--src_dir', type=str, default='E:/test_images')
parser.add_argument('--dst_dir', type=str, default='E:/test_images_results')
return parser.parse_args(argv)
if __name__ == '__main__':
args = parse_arguments(sys.argv[1:])
if not os.path.exists(args.src_dir):
raise ValueError('src_dir does not exist!')
split_images_by_identify(args.src_dir, dst_dir=args.dst_dir)
|
1607083
|
import subprocess
import sys
import os
import get_mjpeg
import make_movie
import shutil
import datetime
import util
config = util.get_config()
tmp_dir = config.get('treasurecolumn', 'tmp_dir')
ffmpeg_location = config.get('treasurecolumn', 'ffmpeg_location')
def convert_mp4(input_file, output_file):
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
os.mkdir(tmp_dir)
cmd = [ffmpeg_location,
'-i', input_file,
'-threads', '2', '-y',
'%s/%%05d.jpg' % tmp_dir
]
subprocess.call(cmd)
make_movie.make_movie(tmp_dir, output_file, 99999, 30)
def convert(input_url, output_file, max_frames, rate=-1):
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
os.mkdir(tmp_dir)
start = datetime.datetime.now()
get_mjpeg.get(input_url, tmp_dir, max_frames)
end = datetime.datetime.now()
delta = end - start
if rate == -1:
rate = int(max_frames / delta.total_seconds())
make_movie.make_movie(tmp_dir, output_file, max_frames, rate)
if __name__ == '__main__':
input_url = sys.argv[1]
output_file = sys.argv[2]
max_frames = int(sys.argv[3]) if len(sys.argv) >= 4 else 100
convert(input_url, output_file, max_frames)
|
1607085
|
import glob
import os
from pprint import pformat
from shutil import rmtree, copy2
from pick_model_runs_fun import test_diff_list, load_packages_verbose, run_and_compare_h_cbc
from flopyparser.model import Model
"""This script does three things
1. reorganize the mf5 examples, so that each example has its own folder. Origional input files are placed in inputref.
2. Run the namefile directly from inputdirect folder, without interacting with flopy. Logs are saved to the
`report_direct` list.
Succesful models: etsdrt, l1b2k, testsfr2, ibs2k, l1a2k, twrihfb, bcf2ss, restest, tc2hufv4, twrip,
3. Load the namefile with flopy. Write the input files to inputflopyload, run mf to write input files to inputflopyload
and run input files. Logs are saved to the `report_inputflopyload` list.
Succesful models: etsdrt, l1b2k, testsfr2, ibs2k, l1a2k, twrihfb, bcf2ss, restest, str, twri
4. Create Python script from inputref folder with flopyparser. Run Python script to write input files to
inputmetascript
Source of test models:
- MF2005: https://water.usgs.gov/ogw/modflow/MODFLOW-2005_v1.12.00/MF2005.1_12u.zip
Currently all MF2005 examples pass step 2. The following examples fail at step 3:
MNW2-Fig28.nam Does not converge
l1b2k_bath.nam Error in the write routine. [DATA 22 lak1b_bath.txt]
testsfr2_tab.nam [DATA 55 ./../../tests/data/reference_model_runs_old/MF2005/testsfr2_tab/input/testsfr2_tab.tab] not found
Should it be an absolut path?
"""
# 1. Create folder structure
mf_exe_examplerun_dir = os.path.abspath("../../scratch/MF2005.1_12u/test-run")
test_example_dir = os.path.abspath("../../tests/reference_model_runs/MF2005")
basenamepaths = sorted(glob.glob(os.path.join(mf_exe_examplerun_dir, '*.nam')))
assert basenamepaths, "Folder empty"
if os.path.exists(test_example_dir):
rmtree(test_example_dir)
os.mkdir(test_example_dir)
for b in basenamepaths:
# MF2005
basename = os.path.basename(b)
modelname = ''.join(basename.split('.')[:-1])
# copy model input from exe_folder
test_model_dir = os.path.join(test_example_dir, modelname)
test_model_inputref_dir = os.path.join(test_example_dir, modelname, 'inputref')
test_paths = [test_model_dir, test_model_inputref_dir]
for test_path in test_paths:
if os.path.exists(test_path):
rmtree(test_path)
os.mkdir(test_path)
for file in glob.glob(os.path.join(mf_exe_examplerun_dir, modelname + '.*')):
copy2(file, test_model_inputref_dir)
|
1607089
|
import os
import sys
from easydict import EasyDict
CONF = EasyDict()
# path
CONF.PATH = EasyDict()
CONF.PATH.BASE = "/home/yuanzhihao/Projects/X-Trans2Cap/" # TODO: change this
CONF.PATH.CLUSTER = "/mntntfs/med_data1/yuanzhihao/X-Trans2Cap/" # TODO: change this
CONF.PATH.DATA = os.path.join(CONF.PATH.BASE, 'data')
CONF.PATH.SCANNET = os.path.join(CONF.PATH.DATA, "scannet")
CONF.PATH.LIB = os.path.join(CONF.PATH.BASE, "lib")
CONF.PATH.MODELS = os.path.join(CONF.PATH.BASE, "models")
CONF.PATH.UTILS = os.path.join(CONF.PATH.BASE, "utils")
# append to syspath
for _, path in CONF.PATH.items():
sys.path.append(path)
# scannet data
CONF.PATH.SCANNET_SCANS = os.path.join(CONF.PATH.SCANNET, "scans")
CONF.PATH.SCANNET_META = os.path.join(CONF.PATH.SCANNET, "meta_data")
CONF.PATH.SCANNET_DATA = os.path.join(CONF.PATH.SCANNET, "scannet_data")
# data
CONF.NYU40_LABELS = os.path.join(CONF.PATH.SCANNET_META, "nyu40_labels.csv")
# scannet
CONF.SCANNETV2_TRAIN = os.path.join(CONF.PATH.SCANNET_META, "scannetv2_train.txt")
CONF.SCANNETV2_VAL = os.path.join(CONF.PATH.SCANNET_META, "scannetv2_val.txt")
CONF.SCANNETV2_TEST = os.path.join(CONF.PATH.SCANNET_META, "scannetv2_test.txt")
CONF.SCANNETV2_LIST = os.path.join(CONF.PATH.SCANNET_META, "scannetv2.txt")
# output
CONF.PATH.OUTPUT = os.path.join(CONF.PATH.BASE, "outputs")
# train
CONF.TRAIN = EasyDict()
CONF.TRAIN.MAX_DES_LEN = 30
CONF.TRAIN.SEED = 42
CONF.TRAIN.OVERLAID_THRESHOLD = 0.5
CONF.TRAIN.MIN_IOU_THRESHOLD = 0.25
CONF.TRAIN.NUM_BINS = 6
# eval
CONF.EVAL = EasyDict()
CONF.EVAL.MIN_IOU_THRESHOLD = 0.5
# data path
CONF.SCANNET_V2_TSV = os.path.join(CONF.PATH.SCANNET_META, "scannetv2-labels.combined.tsv")
CONF.VOCAB = os.path.join(CONF.PATH.DATA, "{}_vocabulary.json") # dataset_name
CONF.GLOVE_PICKLE = os.path.join(CONF.PATH.DATA, "glove.p")
CONF.VOCAB_WEIGHTS = os.path.join(CONF.PATH.DATA, "{}_vocabulary_weights.json") # dataset_name
CONF.PATH.DATA_2D = os.path.join(CONF.PATH.DATA, "2d_feature_agg.npz") # processed 2D features
|
1607140
|
import numpy as np
import scipy
from ... import operators
from ... import utilits as ut
from . _ar_yule_walker import ar_yule_walker
__all__ = ['arma_hannan_rissanen']
#------------------------------------------------------------------
def arma_hannan_rissanen(x, poles_order=0, zeros_order=0, unbias = True):
'''
Hannan_Rissanen method for
autoregressive - moving average
(ARMA) model approximation.
Parameters
---------------
* x: 1d ndarray.
* poles_order: int.
the autoregressive model (pole model)
order of the desired model.
* zeros_order: int.
the moving average model (zeros model)
order of the desired model.
* unbias: bool,
if True, unbiased autocorrleation
(sum(x(k)*x(n-k))/(N-n)) will be taken.
Returns
--------------
* a: 1d ndarray,
autoregressive coefficients of the ARMA model.
* b: 1d ndarray,
moving average coefficients of the ARMA model.
* noise_variace: complex of float,
variance of model residulas.
Notes:
------------
* Here are implemented simplified model.
High order AR model is taken equal to
deisred one.
Examples
------------
References
------------
[1] Brockwell, <NAME>., and <NAME>. 2016.
Introduction to Time Series and Forecasting. Springer.
See also
-----------
'''
x = np.asarray(x)
N = x.shape[0]
a,_ = ar_yule_walker(x,
poles_order,
unbias=unbias)
r = operators.lags_matrix(x,
mode='full',
lags=poles_order+1,)
r1 = r[zeros_order:,0] #x[poly_order+zreo_order]
# for i in range(1):
#------------
resid = r[:,0] - r[:,1:].dot(-a[1:])
rresid = operators.lags_matrix(resid,
mode='full',
lags=zeros_order+1,)
rn = np.append(r[zeros_order:,1:],
rresid[2*zeros_order:,1:],axis=1)
# res=np.dot(np.linalg.pinv(-rn),r1)
res = scipy.linalg.lstsq(rn,r1)[0]
a = np.append([1],-res[:poles_order])
#------------
b = res[poles_order:]#np.append([0],res[poles_order:])
err=1
return a,b,err
# def arma_hannan_rissanen_unbiased(x, poly_order=0, zero_order=0,
# unbias = True, n_psd = None):
# '''
# #FOR TEST!
# Hannan_Rissanen method for autoregressive - moving average
# (ARMA) model approximation with additinal unbias of coefficients.
# Parameters
# ---------------
# * x: 1d ndarray,
# inputs.
# * poly_order: int.
# the autoregressive model (pole model)
# order of the desired model.
# * zero_order: int.
# the moving average model (zeros model)
# order of the desired model.
# * n_psd: int or None.
# length of desired pseudospctrum,
# if None, n_psd = x.shape[0],
# if n_psd<0, than model coefficients (1,-a)
# and noise_variance (\sigma^2) will be returend.
# * unbias: bool,
# if True, unbiased autocorrleation
# (sum(x(k)*x(n-k))/(N-n)) will be taken.
# Returns
# --------------
# > if n_psd>0:
# * pseudo-spectrum,
# > else:
# * ar_cofs (a), ma_cofs (b) - 2 1d ndarray;
# * noise_variace - variance of model residulas.
# Notes:
# ------------
# * Here are implemented simplified model.
# High order AR model is taken equal to
# deisred one.
# Examples
# ------------
# References
# ------------
# [1] Brockwell, <NAME>., and <NAME>. 2016.
# Introduction to Time Series and Forecasting. Springer.
# See also
# -----------
# '''
# x = np.asarray(x)
# N = x.shape[0]
# a,b,_ = arma_hannan_rissanen(x,
# poly_order=poly_order,
# zero_order=zero_order,
# unbias = unbias,
# n_psd = -1)
# # unbias
# z = np.zeros(x.shape,dtype = x.dtype)
# for n in np.arange(np.max([poly_order, zero_order]), N):
# tmp_ar = np.dot(-a[1:], x[n - poly_order:n][::-1])
# tmp_ma = np.dot(b,x[n - zero_order:n][::-1])
# z[n] = x[n] - tmp_ar - tmp_ma
# mh = scipy.signal.lfilter([1], a, z)
# ah = scipy.signal.lfilter(np.r_[1, -b], [1], z)
# #i'm not sure here
# rm = matrix.lags_matrix(mh,
# mode='full',
# mcolumns=poly_order+1,)[2*poly_order:,:-1]
# ra = matrix.lags_matrix(ah,
# mode='full',
# mcolumns=zero_order+1,)[2*zero_order:,:-1]
# print(ra.shape,rm.shape)
# r1 = z[max(poly_order, zero_order):] #x[poly_order+zreo_order]
# rn = np.append(rm[max(zero_order - poly_order, 0):,:],
# ra[max(poly_order - zero_order, 0):,:],axis=1)
# res=np.dot(np.linalg.pinv(rn),r1)
# err = np.sum(np.square(r1- rn.dot(res)))/res.size
# a = np.append([1],-(-a[1:]+res[:poly_order]))
# b = b+res[poly_order:]
# if(n_psd<1):
# return a,b,err
# else:
# psd = ut.arma2psd(a,b,np.abs(err),n_psd)
# return psd
# def arma_hannan_rissanen(x, poly_order=0, zero_order=0,
# unbias = True, n_psd = None):
# '''
# Hannan_Rissanen method for autoregressive - moving average
# (ARMA) model approximation.
# Parameters
# ---------------
# * x: 1d ndarray,
# inputs.
# * poly_order: int.
# the autoregressive model (pole model)
# order of the desired model.
# * zero_order: int.
# the moving average model (zeros model)
# order of the desired model.
# * n_psd: int or None.
# length of desired pseudospctrum,
# if None, n_psd = x.shape[0],
# if n_psd<0, than model coefficients (1,-a)
# and noise_variance (\sigma^2) will be returend.
# * unbias: bool,
# if True, unbiased autocorrleation
# (sum(x(k)*x(n-k))/(N-n)) will be taken.
# Returns
# --------------
# > if n_psd>0:
# * pseudo-spectrum,
# > else:
# * ar_cofs (a), ma_cofs (b) - 2 1d ndarray;
# * noise_variace - variance of model residulas.
# Notes:
# ------------
# * Here are implemented simplified model.
# High order AR model is taken equal to
# deisred one.
# Examples
# ------------
# References
# ------------
# [1] Brockwell, <NAME>., and <NAME>. 2016.
# Introduction to Time Series and Forecasting. Springer.
# See also
# -----------
# '''
# x = np.asarray(x)
# N = x.shape[0]
# if n_psd == None: n_psd = N
# a,_ = spectrum.yule_walker(x,
# poly_order,
# n_psd=-1,
# unbias=unbias)
# a = -a[1:]
# r = matrix.lags_matrix(x,
# mode='full',
# mcolumns=poly_order+1,)
# resid = r[:,0] - r[:,1:].dot(a)
# rresid = matrix.lags_matrix(resid,
# mode='full',
# mcolumns=zero_order+1,)
# # Alternatively covar mode can be applied
# # r = matrix.lags_matrix(x,
# # mode='covar',
# # mcolumns=poly_order+1,)
# # resid = r[:,0] - r[:,1:].dot(a)
# # rresid = matrix.lags_matrix(resid,
# # mode='covar',
# # mcolumns=zero_order+1,)
# # rn = np.append(r[zero_order:,1:], rresid[:,1:],axis=1)
# r1 = r[zero_order:,0] #x[poly_order+zreo_order]
# rn = np.append(r[zero_order:,1:], rresid[2*zero_order:,1:],axis=1)
# res=np.dot(np.linalg.pinv(-rn),r1)
# a = np.append([1],res[:poly_order])
# b = res[poly_order:]
# err=1
# if(n_psd<1):
# return a,b,err
# else:
# psd = ut.arma2psd(a,b,np.abs(err),n_psd)
# return psd
|
1607152
|
import argparse
import os
import abc
import time
import common.evalutation.eval as ev
import rechun.eval.analysis as analysis
import rechun.eval.hook as hooks
import rechun.eval.evaldata as evdata
import rechun.directories as dirs
def main(dataset, to_eval, action_names):
if dataset not in ('brats', 'isic'):
raise ValueError('chose "brats" or "isic" as dataset')
prefix = dataset
if prefix == 'brats':
eval_data_list = evdata.get_brats_eval_data(to_eval)
ece_details = 'foreground'
base_dir = dirs.BRATS_EVAL_DIR
else:
eval_data_list = evdata.get_isic_eval_data(to_eval)
ece_details = ''
base_dir = dirs.ISIC_EVAL_DIR
min_max_dir = os.path.join(base_dir, dirs.MINMAX_NAME)
actions = get_actions(action_names, min_max_dir, base_dir, ece_details)
for entry in eval_data_list:
for action in actions:
action.setup_eval(entry)
for action in actions:
action.start_eval()
for i, sf in enumerate(entry.subject_files):
print('[{}/{}] {}'.format(i + 1, len(entry.subject_files), sf.subject), end=' ', flush=True)
loader = analysis.Loader()
start = time.time()
for action in actions:
action.eval_subject(sf, loader)
print('({}s)'.format(time.time() - start))
for action in actions:
action.finish_eval()
def _make_dir_if_not_exists(directory):
if not os.path.isdir(directory):
os.makedirs(directory)
class EvalCase(abc.ABC):
def __init__(self, metric, hook, id_='') -> None:
super().__init__()
self.result_history = {}
self.metric = metric
self.hook = hook
self.id_ = id_
def do_eval(self, to_eval, subject_name, id_):
results = {}
self.metric(to_eval, results)
self.hook.on_subject(results, subject_name, id_)
for k, v in results.items():
self.result_history.setdefault(k, []).append(v)
class EvalAction(abc.ABC):
def __init__(self) -> None:
super().__init__()
self.load_params = None
self.prepare = None
self.eval_cases = []
self.id_ = ''
@abc.abstractmethod
def _setup_eval(self, eval_data: evdata.EvalData):
pass
def setup_eval(self, eval_data: evdata.EvalData):
self._setup_eval(eval_data)
def start_eval(self):
sub_ids = ', '.join(s.id_ for s in self.eval_cases if s.id_ != '')
print(self.id_ + sub_ids)
for eval_case in self.eval_cases:
eval_case.hook.on_run_start(self.id_)
def eval_subject(self, sf, loader):
to_eval = loader.get_data(sf, self.load_params)
if self.prepare:
to_eval = self.prepare(to_eval)
for eval_case in self.eval_cases:
eval_case.do_eval(to_eval, sf.subject, self.id_)
def finish_eval(self):
for eval_case in self.eval_cases:
eval_case.hook.on_run_end(eval_case.result_history, self.id_)
class EceCalibrationAction(EvalAction):
def __init__(self, base_dir: str, details: str = '', rescale_confidence='subject', rescale_sigma='subject',
min_max_dir: str = None) -> None:
super().__init__()
self.need_mask = details == 'foreground'
self.rescale_confidence = rescale_confidence
self.rescale_sigma = rescale_sigma
self.min_max_dir = min_max_dir
self.out_dir = os.path.join(base_dir, dirs.CALIB_NAME)
_make_dir_if_not_exists(self.out_dir)
def _setup_eval(self, eval_data: evdata.EvalData):
self.prepare, self.id_ = analysis.get_probability_preparation(eval_data,
rescale_confidence=self.rescale_confidence,
rescale_sigma=self.rescale_sigma,
min_max_dir=self.min_max_dir)
self.load_params = analysis.Loader.Params(eval_data.confidence_entry, need_t2_mask=self.need_mask)
metric = ev.ComposeEvaluation([
ev.EceBinaryNumpy(threshold_range=None, return_bins=True, with_mask=self.need_mask),
ev.DiceNumpy()
])
hook = hooks.ReducedComposeEvalHook([
hooks.WriteBinsCsvHook(os.path.join(self.out_dir, dirs.CALIBRATION_PLACEHOLDER.format(self.id_)))
])
self.eval_cases = [EvalCase(metric, hook)]
class EceAction(EvalAction):
def __init__(self, base_dir: str, details: str, rescale_confidence='subject', rescale_sigma='subject',
min_max_dir: str = None) -> None:
super().__init__()
self.rescale_confidence = rescale_confidence
self.rescale_sigma = rescale_sigma
self.min_max_dir = min_max_dir
self.need_t2_mask = details == 'foreground'
if details == 'foreground':
self._m = [ev.EceBinaryNumpy(threshold_range=None, with_mask=True)]
self.out_dir = os.path.join(base_dir, dirs.ECE_FOREGROUND_NAME)
else:
self._m = [ev.EceBinaryNumpy(threshold_range=None)]
self.out_dir = os.path.join(base_dir, dirs.ECE_NAME)
self.ece_entries = ['ece']
_make_dir_if_not_exists(self.out_dir)
def _setup_eval(self, eval_data: evdata.EvalData):
self.prepare, self.id_ = analysis.get_probability_preparation(eval_data,
rescale_confidence=self.rescale_confidence,
rescale_sigma=self.rescale_sigma,
min_max_dir=self.min_max_dir)
self.load_params = analysis.Loader.Params(eval_data.confidence_entry, need_t2_mask=self.need_t2_mask)
metric = ev.ComposeEvaluation([*self._m, ev.DiceNumpy(), ev.ConfusionMatrix()])
hook = hooks.ReducedComposeEvalHook([
hooks.WriteCsvHook(os.path.join(self.out_dir, dirs.ECE_PLACEHOLDER.format(self.id_)),
entries=(*self.ece_entries, 'dice', 'tp', 'tn', 'fp', 'fn', 'n'))
])
self.eval_cases = [EvalCase(metric, hook)]
class CorrectionAction(EvalAction):
def __init__(self, thresholds: list, base_dir: str, rescale_confidence='', rescale_sigma='global',
min_max_dir: str = None) -> None:
super().__init__()
self.thresholds = thresholds
self.rescale_confidence = rescale_confidence
self.rescale_sigma = rescale_sigma
self.min_max_dir = min_max_dir
self.out_dir = os.path.join(base_dir, dirs.UNCERTAINTY_NAME)
_make_dir_if_not_exists(self.out_dir)
def _setup_eval(self, eval_data: evdata.EvalData):
self.prepare, self.id_ = analysis.get_uncertainty_preparation(eval_data,
rescale_confidence=self.rescale_confidence,
rescale_sigma=self.rescale_sigma,
min_max_dir=self.min_max_dir)
self.load_params = analysis.Loader.Params(eval_data.confidence_entry)
self.eval_cases = []
for threshold in self.thresholds:
metric = ev.UncertaintyAndCorrectionEvalNumpy(threshold)
threshold_str = '{:.2f}'.format(threshold).replace('.', '')
out_csv = os.path.join(self.out_dir, dirs.UNCERTAINTY_PLACEHOLDER.format(self.id_, threshold_str))
hook = hooks.WriteCsvHook(out_csv, None)
self.eval_cases.append(EvalCase(metric, hook))
class SaveMinMaxAction(EvalAction):
def __init__(self, min_max_dir: str) -> None:
super().__init__()
self.min_max_dir = min_max_dir
_make_dir_if_not_exists(min_max_dir)
def _setup_eval(self, eval_data: evdata.EvalData):
self.prepare, self.id_ = analysis.get_confidence_entry_preparation(eval_data, 'probabilities')
self.load_params = analysis.Loader.Params(eval_data.confidence_entry)
metric = ev.ComposeEvaluation([
ev.LambdaEvaluation(lambda x: x.min(), ('probabilities',), 'min'),
ev.LambdaEvaluation(lambda x: x.max(), ('probabilities',), 'max')
])
hook = hooks.WriteSummaryCsvHook(os.path.join(self.min_max_dir, dirs.MINMAX_PLACEHOLDER.format(self.id_)),
confidence_entry=eval_data.confidence_entry)
self.eval_cases = [EvalCase(metric, hook)]
def get_actions(action_names, min_max_dir, base_dir, ece_details):
actions = []
for action_name in action_names:
action = None
if action_name == 'minmax':
action = SaveMinMaxAction(min_max_dir)
elif action_name == 'ece_dice':
action = EceAction(base_dir, ece_details, rescale_confidence='subject', rescale_sigma='global',
min_max_dir=min_max_dir)
elif action_name == 'calib':
action = EceCalibrationAction(base_dir, ece_details, rescale_confidence='subject', rescale_sigma='global',
min_max_dir=min_max_dir)
elif action_name == 'bnf_ue':
action = CorrectionAction([0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95], base_dir,
rescale_confidence='subject', rescale_sigma='global', min_max_dir=min_max_dir)
if action is not None:
actions.append(action)
return actions
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--ds', type=str, nargs='?', help='the dataset to evaluate the runs on')
parser.add_argument('--ids', type=str, nargs='*', help='the ids of the runs to be evaluated')
parser.add_argument('--act', type=str, nargs='*', help='the names of the evaluation configuration')
args = parser.parse_args()
ds = args.ds
if ds is None:
ds = 'brats'
to_evaluate = args.ids
if to_evaluate is None:
# no command line arguments given
to_evaluate = [
'baseline',
'baseline_mc',
'center',
'center_mc',
'ensemble',
'auxiliary_feat',
'auxiliary_segm',
'aleatoric',
]
action_ids = args.act
if action_ids is None:
# no command line arguments given
action_ids = [
'minmax',
'ece_dice',
'calib',
'bnf_ue'
]
print('\n**************************************')
print('dataset: {}'.format(ds))
print('to_evaluate: {}'.format(to_evaluate))
print('eval_actions: {}'.format(action_ids))
print('**************************************\n')
main(ds, to_evaluate, action_ids)
|
1607187
|
class Solution:
def numWays(self, n: int, k: int) -> int:
if n == 0:
return 0
elif n == 1:
return k
elif n == 2:
return k * k
else:
prev01, prev02 = k, k * k
n -= 2
while n > 0:
prev02, prev01 = (k - 1) * (prev02 + prev01), prev02
n -= 1
return prev02
|
1607189
|
import pathlib
from typing import Dict, Optional, Type, Union
from astro.constants import FileType as FileTypeConstants
from astro.files.types.base import FileType
from astro.files.types.csv import CSVFileType
from astro.files.types.json import JSONFileType
from astro.files.types.ndjson import NDJSONFileType
from astro.files.types.parquet import ParquetFileType
def create_file_type(
path: str,
filetype: Union[FileTypeConstants, None] = None,
normalize_config: Optional[dict] = None,
):
"""Factory method to create FileType super objects based on the file extension in path or filetype specified."""
filetype_to_class: Dict[FileTypeConstants, Type[FileType]] = {
FileTypeConstants.CSV: CSVFileType,
FileTypeConstants.JSON: JSONFileType,
FileTypeConstants.NDJSON: NDJSONFileType,
FileTypeConstants.PARQUET: ParquetFileType,
}
if not filetype:
filetype = get_filetype(path)
try:
return filetype_to_class[filetype](path=path, normalize_config=normalize_config)
except KeyError:
raise ValueError(
f"Non supported file type provided {filetype}, file_type should be among {', '.join(FileTypeConstants)}."
)
def get_filetype(filepath: Union[str, pathlib.PosixPath]) -> FileTypeConstants:
"""
Return a FileType given the filepath. Uses a naive strategy, using the file extension.
:param filepath: URI or Path to a file
:type filepath: str or pathlib.PosixPath
:return: The filetype (e.g. csv, ndjson, json, parquet)
:rtype: astro.constants.FileType
"""
if isinstance(filepath, pathlib.PosixPath):
extension = filepath.suffix[1:]
else:
extension = ""
tokenized_path = filepath.split(".")
if len(tokenized_path) > 1:
extension = tokenized_path[-1]
if extension == "":
raise ValueError(
f"Missing file extension, cannot automatically determine filetype from path '{filepath}'."
f" Please pass the 'filetype' param with the explicit filetype (e.g. csv, ndjson, etc.)."
)
try:
return FileTypeConstants(extension)
except ValueError:
raise ValueError(f"Unsupported filetype '{extension}' from file '{filepath}'.")
|
1607213
|
import os
import pandas as pd
class DatasetConverter(object):
def __init__(self, dataset_path, dataset_name=None):
self.dataset_path = dataset_path
if dataset_name is None:
self.dataset_name = os.path.basename(dataset_path).upper()
else:
self.dataset_name = dataset_name
def convert_lines(self):
raise NotImplementedError("not implemented.")
def construct_line(self, item_id, phrase,
meddra_code='', sct_id='', umls_cui=''):
return (self.dataset_name, item_id, phrase,
meddra_code, sct_id, umls_cui)
def _padded_print(self, message, padding):
print("%s%s" % (padding, message))
def print_stats(self, padding=''):
print("")
class CsvDatasetConverter(DatasetConverter):
def __init__(self, dataset_path, dataset_name=None,
item_id_col=0, input_col=1, target_col=2,
target_term='meddra_code',
sep=',', **read_kwargs):
super(CsvDatasetConverter, self).__init__(dataset_path, dataset_name)
self.input_col = input_col
self.item_id_col = item_id_col
self.target_col = target_col
self.target_term = target_term
self.sep = sep
self.read_kwargs = read_kwargs
def convert_lines(self):
df = pd.read_csv(self.dataset_path, sep=self.sep, dtype='str',
**self.read_kwargs)
lines = []
if self.item_id_col is None:
collection = zip(df.index,
df[df.columns[self.input_col]].values,
df[df.columns[self.target_col]].values)
else:
collection = zip(df[df.columns[self.item_id_col]].values,
df[df.columns[self.input_col]].values,
df[df.columns[self.target_col]].values)
for item_id, phrase, target in collection:
line_params = {'item_id': item_id, 'phrase': phrase,
self.target_term: target}
lines.append(self.construct_line(**line_params))
return lines
|
1607223
|
import sys
sys.path.insert(0,'..')
from algobpy.parse import parse_params
from pyteal import *
def dao_fund_lsig(ARG_DAO_APP_ID):
"""
Represents DAO treasury (ALGO/ASA)
"""
# check no rekeying, close remainder to, asset close to for a txn
def basic_checks(txn: Txn): return And(
txn.rekey_to() == Global.zero_address(),
txn.close_remainder_to() == Global.zero_address(),
txn.asset_close_to() == Global.zero_address()
)
# verify funds are transfered only when paired with DAO app (during execute call)
payment = And(
# verify first transaction
basic_checks(Gtxn[0]),
Gtxn[0].type_enum() == TxnType.ApplicationCall,
Gtxn[0].application_id() == Int(ARG_DAO_APP_ID),
Gtxn[0].application_args[0] == Bytes("execute"),
# verify second transaction (either payment in asa or ALGO)
basic_checks(Gtxn[1]),
Or(
Gtxn[1].type_enum() == TxnType.AssetTransfer,
Gtxn[1].type_enum() == TxnType.Payment,
)
)
# Opt-in transaction is allowed
opt_in = And(
basic_checks(Txn),
Txn.type_enum() == TxnType.AssetTransfer,
Txn.asset_amount() == Int(0)
)
program = program = Cond(
[Global.group_size() == Int(1), opt_in],
[Global.group_size() == Int(2), payment]
)
return program
if __name__ == "__main__":
params = {
"ARG_DAO_APP_ID": 99
}
# Overwrite params if sys.argv[1] is passed
if(len(sys.argv) > 1):
params = parse_params(sys.argv[1], params)
print(compileTeal(dao_fund_lsig(params["ARG_DAO_APP_ID"]), Mode.Signature, version = 4))
|
1607251
|
import logging
import time
import psutil
from icrawl_plugin import IHostCrawler
from utils.features import DiskioFeature
logger = logging.getLogger('crawlutils')
class DiskioHostCrawler(IHostCrawler):
'''
Plugin for crawling disk I/O counters from host and
computing the bytes/second rate for read and write operations
'''
def __init__(self):
self._cached_values = {}
self._previous_rates = {}
def _cache_put_value(self, key, value):
self._cached_values[key] = (value, time.time())
def _cache_get_value(self, key):
if key in self._cached_values:
return self._cached_values[key]
else:
return None, None
def _crawl_disk_io_counters(self):
try:
disk_counters = psutil.disk_io_counters(perdisk=True)
for device_name in disk_counters:
counters = disk_counters[device_name]
curr_counters = [
counters.read_count,
counters.write_count,
counters.read_bytes,
counters.write_bytes
]
logger.debug(
u'Disk I/O counters - {0}: {1}'.format(device_name,
curr_counters))
yield (device_name, curr_counters)
except OSError as e:
logger.debug(
u'Caught exception when crawling disk I/O counters: {0}'.
format(e))
def crawl(self, **kwargs):
logger.debug('Crawling %s' % self.get_feature())
diskio_data = self._crawl_disk_io_counters()
for device_name, curr_counters in diskio_data:
logger.debug(u'Processing device {0}; counters = {1}'.
format(device_name, curr_counters))
feature_key = '{0}-{1}'.format('diskio', device_name)
cache_key = '{0}-{1}'.format('INVM', feature_key)
(prev_counters, prev_time) = self._cache_get_value(cache_key)
self._cache_put_value(cache_key, curr_counters)
if prev_counters and prev_time:
# Compute the rates (per second) for each attribute, namely:
# read_op/s, write_op/s, read_bytes/s, and write_bytes/s
time_diff = time.time() - prev_time
rates = [
round(
(a - b) / time_diff,
2) for (
a,
b) in zip(
curr_counters,
prev_counters)]
for i in range(len(rates)):
if rates[i] < 0:
# The corresponding OS counter has wrapped
# For now, let's return the previous measurement
# to avoid a huge drop on the metric graph
rates[i] = self._previous_rates[device_name][i]
logger.debug(
u'Counter "{0}" for device {1} has wrapped'.
format(i, device_name))
else:
# first measurement
rates = [0] * 4
self._previous_rates[device_name] = rates
logger.debug(
u'Disk I/O counters rates- {0}: {1}'.format(device_name,
rates))
diskio_feature_attributes = DiskioFeature._make(rates)
yield(feature_key, diskio_feature_attributes, 'diskio')
def get_feature(self):
return 'diskio'
|
1607253
|
from __future__ import absolute_import
import functools
__all__ = ["IdentityContext", "PermissionDenied"]
class PermissionContext(object):
"""A context of decorator to check the permission."""
def __init__(self, checker, exception=None, **exception_kwargs):
self._check = checker
self.in_context = False
self.exception = exception or PermissionDenied
self.exception_kwargs = exception_kwargs
def __call__(self, wrapped):
def wrapper(*args, **kwargs):
with self:
return wrapped(*args, **kwargs)
return functools.update_wrapper(wrapper, wrapped)
def __enter__(self):
self.in_context = True
self.check()
return self
def __exit__(self, exception_type, exception, traceback):
self.in_context = False
def __bool__(self):
return bool(self._check())
def __nonzero__(self):
return self.__bool__()
def check(self):
if not self._check():
raise self.exception(**self.exception_kwargs)
return True
class IdentityContext(object):
"""A context of identity, providing the enviroment to control access."""
def __init__(self, acl, roles_loader=None):
self.acl = acl
self.set_roles_loader(roles_loader)
def set_roles_loader(self, role_loader):
"""Set a callable object (such as a function) which could return a
iteration to provide all roles of current context user.
Example:
>>> @context.set_roles_loader
... def load_roles():
... user = request.context.current_user
... for role in user.roles:
... yield role
"""
self.load_roles = role_loader
def check_permission(self, operation, resource,
assertion_kwargs=None, **exception_kwargs):
"""A context to check the permission.
The keyword arguments would be stored into the attribute `kwargs` of
the exception `PermissionDenied`.
If the key named `exception` is existed in the `kwargs`, it will be
used instead of the `PermissionDenied`.
The return value of this method could be use as a decorator, a with
context enviroment or a boolean-like value.
"""
exception = exception_kwargs.pop("exception", PermissionDenied)
checker = functools.partial(self._docheck,
operation=operation, resource=resource,
**assertion_kwargs or {})
return PermissionContext(checker, exception, **exception_kwargs)
def has_permission(self, *args, **kwargs):
return bool(self.check_permission(*args, **kwargs))
def has_roles(self, role_groups):
had_roles = frozenset(self.load_roles())
return any(all(role in had_roles for role in role_group)
for role_group in role_groups)
def _docheck(self, operation, resource, **assertion_kwargs):
had_roles = self.load_roles()
role_list = list(had_roles)
assert len(role_list) == len(set(role_list)) # duplicate role check
return self.acl.is_any_allowed(role_list, operation, resource,
**assertion_kwargs)
class PermissionDenied(Exception):
"""The exception for denied access request."""
def __init__(self, message="", **kwargs):
super(PermissionDenied, self).__init__(message)
self.kwargs = kwargs
self.kwargs['message'] = message
|
1607299
|
import numpy as np
from wtm_envs.mujoco import robot_env, utils
import mujoco_py
from queue import deque
from mujoco_py import modder
import matplotlib.pyplot as plt
from matplotlib.backends.backend_agg import FigureCanvasAgg
import platform
import os
def goal_distance(goal_a, goal_b):
assert goal_a.shape == goal_b.shape
norm_dist = np.linalg.norm(goal_a - goal_b, axis=-1)
if goal_a.shape[-1] % 3 == 0:
n_xyz = int(goal_a.shape[-1] / 3)
max_dist = np.zeros(norm_dist.shape)
for n in range(n_xyz):
start = n * 3
end = start + 3
subg_a = goal_a[..., start:end]
subg_b = goal_b[..., start:end]
dist = np.asarray(np.linalg.norm(subg_a - subg_b, axis=-1))
if len(max_dist.shape) == 0:
max_dist = np.max([float(dist), float(max_dist)])
else:
max_dist = np.max([dist, max_dist], axis=0)
return max_dist
else:
return norm_dist
class PercDeque(deque):
def __init__(self, maxlen, perc_recomp=100):
self.ctr = 0
self.upper_perc = None
self.lower_perc = None
self.perc_recomp = perc_recomp
super(PercDeque, self).__init__(maxlen=maxlen)
def append(self, vec):
super(PercDeque, self).append(vec)
if self.ctr == self.maxlen:
self.ctr = 0
if self.ctr % self.perc_recomp == 0:
hist_vec = np.array(self)
self.upper_perc = np.percentile(hist_vec, 75, axis=0)
self.lower_perc = np.percentile(hist_vec, 25, axis=0)
self.ctr += 1
class WTMEnv(robot_env.RobotEnv):
def __init__(
self, model_path, n_substeps, initial_qpos, n_actions=4
):
"""Initializes a new Fetch environment.
Args:
model_path (string): path to the environments XML file
n_substeps (int): number of substeps the simulation runs on every call to step
gripper_extra_height (float): additional height above the table when positioning the gripper
block_gripper (boolean): whether or not the gripper is blocked (i.e. not movable) or not
target_in_the_air (boolean): whether or not the target should be in the air above the table or on the table surface
target_offset (float or array with 3 elements): offset of the target
obj_range (float): range of a uniform distribution for sampling initial object positions
target_range (float): range of a uniform distribution for sampling a target
distance_threshold (float): the threshold after which a goal is considered achieved
initial_qpos (dict): a dictionary of joint names and values that define the initial configuration
reward_type ('sparse' or 'dense'): the reward type, i.e. sparse or dense
gripper_goal ('gripper_none', 'gripper_above', 'gripper_random'): the gripper's goal location
n_objects (int): no of objects in the environment. If none, then no_of_objects=0
min_tower_height (int): the minimum height of the tower.
max_tower_height (int): the maximum height of the tower.
"""
self._viewers = {}
self.obs_history = PercDeque(maxlen=5000)
self.obs_noise_coefficient = 0.0
self.plan_cache = {}
self.goal_hierarchy = {}
self.goal = []
self.final_goal = []
self.graph_values = {}
super(WTMEnv, self).__init__(
model_path=model_path, n_substeps=n_substeps, n_actions=n_actions,
initial_qpos=initial_qpos)
self.mod = modder.TextureModder(self.sim)
# assert self.gripper_goal in ['gripper_above', 'gripper_random'], "gripper_none is not supported anymore"
# GoalEnv methods
# ----------------------------
def compute_reward(self, achieved_goal, goal, info):
if self.reward_type == 'sparse':
success = self._is_success(achieved_goal, goal)
return (success - 1).astype(np.float32)
else:
d = goal_distance(achieved_goal, goal)
return -d
# RobotEnv methods
# ----------------------------
def _step_callback(self):
if "block_gripper" in self.__dict__.keys():
if self.block_gripper:
self.sim.data.set_joint_qpos('robot0:l_gripper_finger_joint', 0.)
self.sim.data.set_joint_qpos('robot0:r_gripper_finger_joint', 0.)
self.sim.forward()
def _goal2obs(self, goal):
if len(goal.shape) == 1:
goal_arr = np.array([goal])
else:
goal_arr = goal
assert len(goal_arr.shape) == 2
obs = []
o_dims = self.observation_space.spaces['observation'].shape[0]
o = np.zeros(o_dims, np.float32)
for g in goal_arr:
o[:self.goal_size] = g
obs.append(o.copy())
obs = np.array(obs)
if len(goal.shape) == 1:
return obs[0]
else:
return obs
def _set_action(self, action):
assert action.shape == (4,)
action = action.copy() # ensure that we don't change the action outside of this scope
pos_ctrl, gripper_ctrl = action[:3], action[3]
pos_ctrl *= 0.05 # limit maximum change in position
rot_ctrl = [1., 0., 1., 0.] # fixed rotation of the end effector, expressed as a quaternion
gripper_ctrl = np.array([gripper_ctrl, gripper_ctrl])
assert gripper_ctrl.shape == (2,)
if self.block_gripper:
gripper_ctrl = np.zeros_like(gripper_ctrl)
action = np.concatenate([pos_ctrl, rot_ctrl, gripper_ctrl])
# Apply action to simulation.
utils.ctrl_set_action(self.sim, action)
utils.mocap_set_action(self.sim, action)
self.step_ctr += 1
def add_noise(self, vec, history, noise_coeff):
history.append(vec)
range = history.upper_perc - history.lower_perc
coeff_range = noise_coeff * range
noise = np.random.normal(loc=np.zeros_like(coeff_range), scale=coeff_range)
vec = vec.copy() + noise
return vec
def _get_viewer(self, mode='human'):
viewer = self._viewers.get(mode)
if viewer is None:
if mode == 'human':
viewer = mujoco_py.MjViewer(self.sim)
elif mode == 'rgb_array' or mode == 'depth_array':
viewer = mujoco_py.MjViewer(self.sim)
# The following should work but it does not. Therefore, replaced by human rendering (with MjViewer, the line above) now.
# viewer = mujoco_py.MjRenderContextOffscreen(self.sim, -1)
# viewer = mujoco_py.MjRenderContext(self.sim, -1)
self._viewers[mode] = viewer
self._viewer_setup(mode=mode)
return self._viewers[mode]
def _viewer_setup(self, mode='human'):
if mode == 'human':
body_id = self.sim.model.body_name2id('robot0:gripper_link')
lookat = self.sim.data.body_xpos[body_id]
for idx, value in enumerate(lookat):
self._viewers[mode].cam.lookat[idx] = value
self._viewers[mode].cam.distance = 2.5
self._viewers[mode].cam.azimuth = 132.
self._viewers[mode].cam.elevation = -14.
elif mode == 'rgb_array':
body_id = self.sim.model.body_name2id('robot0:gripper_link')
lookat = self.sim.data.body_xpos[body_id]
for idx, value in enumerate(lookat):
self._viewers[mode].cam.lookat[idx] = value
self._viewers[mode].cam.distance = 1.
self._viewers[mode].cam.azimuth = 180.
self._viewers[mode].cam.elevation = -40.
def _is_success(self, achieved_goal, desired_goal):
d = goal_distance(achieved_goal, desired_goal)
return (d < self.distance_threshold).astype(np.float32)
def render(self, mode='human'):
self._render_callback()
if mode == 'rgb_array':
self._get_viewer(mode).render()
# window size used for old mujoco-py:
width, height = 1920, 1180
data = self._get_viewer(mode).read_pixels(width, height, depth=False)
# original image is upside-down, so flip it
return data[::-1, :, :]
elif mode == 'human':
self._get_viewer().render()
if bool(self.graph_values):
body_names = [self.sim.model.body_id2name(x) for x in np.arange(self.sim.model.nbody)]
if 'graph_body' in body_names: # check if canvas in XML
self._get_viewer().vopt.geomgroup[3] = 1 # make canvas visible
self.mod.set_rgb("graph_geom", self.create_graph())
def create_graph(self):
# create Graph
fig = plt.figure(figsize=(6.4, 6.4))
canvas = FigureCanvasAgg(fig)
keys = self.graph_values.keys()
keys = filter(lambda x: x[-2:] != '_x', keys)
for i, key in enumerate(keys):
frame_on = i==0
ax = fig.add_subplot(111, label=str(i), frame_on=frame_on)
ax.set_ylabel(str(key), color="C"+str(i))
ax.set_xlabel('step', color="C"+str(i))
if i % 2 != 0:
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.tick_params(axis='y', colors="C"+str(i))
ax.plot(self.graph_values[key+'_x'], self.graph_values[key], color="C"+str(i))
plt.tight_layout()
canvas.draw()
buf = canvas.tostring_rgb()
ncols, nrows = fig.canvas.get_width_height()
plt.close(fig)
return np.fromstring(buf, dtype=np.uint8).reshape(nrows, ncols, 3)
def add_graph_values(self, axis_name, val, x, reset=False):
if reset and axis_name in self.graph_values.keys():
del self.graph_values[axis_name]
del self.graph_values[axis_name+'_x']
if axis_name in self.graph_values:
self.graph_values[axis_name].append(val)
self.graph_values[axis_name+'_x'].append(x)
else:
self.graph_values[axis_name]=[val[0]]
self.graph_values[axis_name+'_x'] = [x]
|
1607363
|
from typing import Dict, List, Any
import json
import e2e.Libs.Ristretto.Ristretto as Ristretto
from e2e.Libs.BLS import PrivateKey
from e2e.Classes.Transactions.Transactions import Data, Transactions
from e2e.Classes.Consensus.VerificationPacket import VerificationPacket
from e2e.Classes.Consensus.SendDifficulty import SendDifficulty
from e2e.Classes.Consensus.DataDifficulty import DataDifficulty
from e2e.Classes.Consensus.SpamFilter import SpamFilter
from e2e.Classes.Merit.Merit import Merit
from e2e.Vectors.Generation.PrototypeChain import PrototypeBlock
merit: Merit = Merit()
blocks: List[Dict[str, Any]] = []
transactions: Transactions = Transactions()
dataFilter: SpamFilter = SpamFilter(5)
edPrivKey: Ristretto.SigningKey = Ristretto.SigningKey(b'\0' * 32)
edPubKey: bytes = edPrivKey.get_verifying_key()
blsPrivKey: PrivateKey = PrivateKey(0)
#Generate a Data to verify for the VerificationPacket Block.
data: Data = Data(bytes(32), edPubKey)
data.sign(edPrivKey)
data.beat(dataFilter)
transactions.add(data)
packet: VerificationPacket = VerificationPacket(data.hash, [1])
blocks.append(
PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
packets=[VerificationPacket(data.hash, [1])],
minerID=blsPrivKey
).finish(0, merit).toJSON()
)
#Generate the SendDifficulty Block.
blocks.append(
PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
elements=[SendDifficulty(0, 0, 1)],
minerID=blsPrivKey
).finish(0, merit).toJSON()
)
#Generate the DataDifficulty Block.
blocks.append(
PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
elements=[DataDifficulty(0, 0, 1)],
minerID=blsPrivKey
).finish(0, merit).toJSON()
)
with open("e2e/Vectors/Consensus/HundredSix/BlockElements.json", "w") as vectors:
vectors.write(json.dumps({
"blocks": blocks,
"transactions": transactions.toJSON()
}))
|
1607369
|
from lsassy.output import IOutput
class Output(IOutput):
"""
Returns output in greppable format
"""
def get_output(self):
credentials = set()
for cred in self._credentials:
line = "{}\t{}\t{}\t{}\t{}\t{}\t{}".format(cred["ssp"], cred["domain"], cred["username"], cred["password"], cred["lmhash"], cred["nthash"], cred["sha1"])
credentials.add(line)
return "\n".join(cred for cred in credentials)
|
1607375
|
import numpy as np
import os
import pickle as p
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--result_dir', type=str,required=True)
args = parser.parse_args()
def compute_iou(pred_box, ref_bbox):
N=pred_box.shape[0]
pred_box_rb = pred_box[:,0:3] - pred_box[:,3:6] / 2.0
ref_bbox_rb = ref_bbox[:,0:3] - ref_bbox[:,3:6] / 2.0
pred_box_lt = pred_box[:,0:3] + pred_box[:,3:6] / 2.0
ref_bbox_lt = ref_bbox[:,0:3] + ref_bbox[:,3:6] / 2.0
lt = np.min(np.concatenate([pred_box_lt[:,:,np.newaxis],np.repeat(ref_bbox_lt[:,:,np.newaxis],N,axis=0)],axis=2),axis=2)
rb = np.max(np.concatenate([pred_box_rb[:,:,np.newaxis],np.repeat(ref_bbox_rb[:,:,np.newaxis],N,axis=0)],axis=2),axis=2)
whz = lt - rb
whz[whz < 0] = 0
inter = whz[:, 0] * whz[:, 1] * whz[:, 2]
pred_box_area = pred_box[:, 3] * pred_box[:, 4] * pred_box[:, 5]
ref_box_area = ref_bbox[:, 3] * ref_bbox[:, 4] * ref_bbox[:, 5]
# print(pred_box_area.shape,inter.shape,ref_box_area.shape)
iou = inter / (pred_box_area + np.repeat(ref_box_area,N,axis=0) - inter)
# print(iou)
return iou
result_dir=args.result_dir
k=5
success_count_iou25=0
success_count_iou50=0
Rat2_count=0
Rat5_count=0
Rat10_count=0
Rat20_count=0
total_count=0
Max_IoU=0
success_count=0
iou_sum=0
par_success_count_iou25=0
par_success_count_iou50=0
par_Rat2_count=0
par_Rat5_count=0
par_Rat10_count=0
par_Rat20_count=0
par_Max_IoU=0
scan_list=os.listdir(result_dir)
#print(scan_list)
#scan_list=[scan_list[0]]
for scan_file in scan_list:
scan_output_file=os.path.join(result_dir,scan_file)
scan=scan_file[:12]
#print(scan)
with open(scan_output_file,"rb") as f:
output_content=p.load(f)
object_id_list=list(output_content.keys())
for object_id in object_id_list:
for object_data in output_content[object_id]:
prediction=object_data["pred_intact_box"].T
gt=object_data["gt_intact_bbox"].T
partial_pred=object_data["pred_partial_box"]
partial_gt=object_data["gt_partial_bbox"].T
#prediction=partial_pred[:,0:6]
#gt=partial_gt
#print(prediction.shape)
bbox=prediction[:,0:6]
#print(prediction.shape)
partial_bbox=partial_pred[:,0:6]
confidence=object_data["output"][:,6]
sort_id=np.argsort(-confidence)
topk_id=sort_id[:20]
topk_bbox=bbox[topk_id]
topk_partial_bbox=partial_bbox[topk_id]
target_bbox=gt[np.newaxis,:]
target_partial_bbox=partial_gt[np.newaxis,:]
#print(target_bbox.shape)
iou=compute_iou(topk_bbox,target_bbox)
par_iou=compute_iou(topk_partial_bbox,target_partial_bbox)
par_Max_IoU+=np.max(par_iou)
Max_IoU+=np.max(iou)
if iou[0]>0.25:
success_count_iou25+=1
if iou[0]>0.5:
success_count_iou50+=1
iou_sum+=iou[0]
success_count+=1
if np.max(iou[0:2])>0.5:
Rat2_count+=1
if np.max(iou[0:5])>0.5:
Rat5_count+=1
if np.max(iou[0:10])>0.5:
Rat10_count+=1
if np.max(iou[0:20])>0.5:
Rat20_count+=1
if par_iou[0]>0.25:
par_success_count_iou25+=1
if par_iou[0]>0.5:
par_success_count_iou50+=1
if np.max(par_iou[0:2])>0.5:
par_Rat2_count+=1
if np.max(par_iou[0:5])>0.5:
par_Rat5_count+=1
if np.max(par_iou[0:10])>0.5:
par_Rat10_count+=1
if np.max(par_iou[0:20])>0.5:
par_Rat20_count+=1
total_count+=1
print("---------------intact_bbox--------------------------")
print("IoU25_success_rate:",success_count_iou25/total_count)
print("IoU50_success_rate:", success_count_iou50 / total_count)
print("R@2:", Rat2_count / total_count)
print("R@5:", Rat5_count / total_count)
print("R@10:", Rat10_count / total_count)
print("R@20", Rat20_count / total_count)
print("Max_IoU",Max_IoU/total_count)
print("success mean IoU",iou_sum/success_count)
print("---------------partial_bbox--------------------------")
print("IoU25_success_rate:",par_success_count_iou25/total_count)
print("IoU50_success_rate:", par_success_count_iou50 / total_count)
print("R@2:", Rat2_count / total_count)
print("R@5:", par_Rat5_count / total_count)
print("R@10:", par_Rat10_count / total_count)
print("R@20", par_Rat20_count / total_count)
print("Max_IoU",par_Max_IoU/total_count)
|
1607378
|
from viznet import *
import matplotlib.pyplot as plt
def draw_caizi_nn(ax, num_node_visible, num_node_hidden):
'''CaiZi R-Theta Network'''
# brush
conv = NodeBrush('nn.convolution', ax)
input = NodeBrush('nn.input', ax)
hidden = NodeBrush('nn.hidden', ax)
output = NodeBrush('nn.output', ax)
op = NodeBrush('basic', ax, size='small')
de = EdgeBrush('-->', ax)
ude = EdgeBrush('---', ax)
# parameters
offset_amplitude = -num_node_hidden / 2.
offset_sign = num_node_hidden / 2. - 0.5
# visible layers
sigma = node_sequence(input, num_node_visible, center=(0, 0))
# hidden layers
da, db = 0.8, 1.0
y1 = db + 0.5
y2 = y1 + da
y3 = y2 + db
y4 = y3 + da
y5 = y4 + da + 0.3
y6 = y5 + da
h1_text, h1p_text, h2_text = 'h^{(1)}', '$h^{\prime(1)}$', '$h^{(2)}$'
h1 = node_sequence(hidden, num_node_hidden,
center=(offset_amplitude, y1))
h1p = hidden >> (offset_sign, y1)
# nonlinear layers
sigma1 = node_sequence(op, num_node_hidden,
center=(offset_amplitude, y2))
cos = op >> (offset_sign, y2)
# linear in amplitude
h2 = hidden >> (offset_amplitude, y3)
sigma2 = op >> (offset_amplitude, y4)
# output
sign_txt = r'\frac{\psi}{|\psi|}'
times = op >> (-0.5, y5)
psi = output >> (-0.5, y6)
for node_list, base_string in zip([sigma, h1], ['\sigma^z', h1_text]):
for i, node in enumerate(node_list):
node.text('$%s_%d$'%(base_string, i))
h1p.text(h1p_text)
h2.text(h2_text)
psi.text(r'$\psi$')
times.text(r'$\times$')
cos.text(r'$\cos$', position='left')
sigma1[0].text(r'$\sigma$', position='left')
cos.text(r'$%s$' % sign_txt, fontsize=10)
sigma2.text(r'$|\psi|$')
# connect them
connecta2a(sigma, h1, de)
connecta2a(sigma, [h1p], de)
connect121(h1, sigma1, de)
connecta2a(sigma1, [h2], de)
de >> (h1p, cos)
de >> (h2, sigma2)
ude >> (sigma2, times)
ude >> (cos, times)
de >> (times, psi)
def draw_instance():
with DynamicShow((5, 5), '_caizi.pdf') as d:
draw_caizi_nn(d.ax, 5, 4)
if __name__ == '__main__':
draw_instance()
|
1607418
|
from miscellanies.torch.metric_logger import MetricLogger, SmoothedValue
import copy
from core.run.event_dispatcher.register import EventRegister
def _load_metric_definitions(metric_definitions, logger: MetricLogger):
name_check = set()
for metric_definition in metric_definitions:
window_size = 20
fmt_string = "{median:.4f} ({global_avg:.4f})"
name = metric_definition['name']
assert name not in name_check
name_check.add(name)
if 'window_size' in metric_definition:
window_size = metric_definition['window_size']
if 'fmt' in metric_definition:
fmt_string = metric_definition['fmt']
logger.add_meter(name, SmoothedValue(window_size, fmt_string))
class LocalLoggerWrapper:
def __init__(self, print_freq, prefix, print_epoch_average, header):
self.print_freq = print_freq
self.prefix = prefix
self.metric_definitions = []
self.print_epoch_average = print_epoch_average
self.header = header
self.metric_logger = None
self.summary_metrics = None
def on_epoch_begin(self, epoch):
self.metric_logger = MetricLogger(delimiter=' ')
self.epoch_header = self.header.format(epoch=epoch)
self.summary_metrics = None
_load_metric_definitions(self.metric_definitions, self.metric_logger)
def on_epoch_end(self, epoch):
if self.print_epoch_average:
print("Averaged stats:", self.metric_logger)
if self.summary_metrics is not None:
print(f'Epoch [{epoch}] summary metrics:\n' + ('\n'.join("{}: {}".format(k, v) for k, v in self.summary_metrics.items())))
self.summary_metrics = None
self.metric_logger = None
self.epoch_header = None
def on_finished(self):
if self.summary_metrics is not None:
print('summary metrics:\n' + ('\n'.join("{}: {}".format(k, v) for k, v in self.summary_metrics.items())))
self.summary_metrics = None
def log_every(self, iterable):
return self.metric_logger.log_every(iterable, self.print_freq, self.epoch_header)
def log(self, metrics, step):
if self.prefix is not None:
metrics = {self.prefix + k: v for k, v in metrics.items()}
self.metric_logger.update(**metrics)
def define_metrics(self, metric_definitions):
if self.prefix is not None:
metric_definitions = copy.copy(metric_definitions)
for metric_definition in metric_definitions:
metric_definition['name'] = self.prefix + metric_definition['name']
self.metric_definitions.extend(metric_definitions)
if self.metric_logger is not None:
_load_metric_definitions(metric_definitions, self.metric_logger)
def synchronize(self):
self.metric_logger.synchronize_between_processes()
def log_summary(self, summary_metrics):
if self.summary_metrics is None:
self.summary_metrics = {}
self.summary_metrics.update(summary_metrics)
def _local_logger_register_event_callback(logger, branch_event_register: EventRegister, global_event_register: EventRegister):
branch_event_register.register_epoch_begin_hook(logger)
branch_event_register.register_epoch_end_hook(logger)
global_event_register.register_finished_hook(logger)
def build_local_logger(logging_config, branch_event_register: EventRegister, global_event_register: EventRegister):
if logging_config is None:
logger = LocalLoggerWrapper(1, None, False, '')
_local_logger_register_event_callback(logger, branch_event_register, global_event_register)
return logger
interval = 1
if 'local' in logging_config and 'interval' in logging_config['local']:
interval = logging_config['local']['interval']
elif 'interval' in logging_config:
interval = logging_config['interval']
header = ''
prefix = None
if 'metric_prefix' in logging_config:
prefix = logging_config['metric_prefix']
print_epoch_average = False
if 'local' in logging_config:
local_logger_config = logging_config['local']
if 'epoch_summary' in local_logger_config:
print_epoch_average = local_logger_config['epoch_summary']['enabled']
assert local_logger_config['epoch_summary']['method'] == 'mean'
if 'header' in local_logger_config:
header = local_logger_config['header']
logger = LocalLoggerWrapper(interval, prefix, print_epoch_average, header)
_local_logger_register_event_callback(logger, branch_event_register, global_event_register)
return logger
|
1607440
|
from .temporal import *
import numpy as np
defaults = dict(
ydeg=15,
udeg=2,
r=20.0,
dr=None,
a=0.40,
b=0.27,
c=0.1,
n=10.0,
p=1.0,
i=60.0,
u=np.zeros(30),
tau=None,
temporal_kernel=Matern32Kernel,
normalized=True,
normalization_order=20,
normalization_zmax=0.023,
marginalize_over_inclination=True,
baseline_mean=0.0,
baseline_var=0.0,
driver="numpy",
eps=1e-8,
epsy=1e-12,
epsy15=1e-9,
covpts=300,
log_alpha_max=10,
log_beta_max=10,
abmin=1e-12,
sigma_max=45.0,
mx=300,
my=150,
)
|
1607446
|
import numpy as np
import pytest
import math
from sklearn.base import clone
from sklearn.linear_model import Lasso, ElasticNet
import doubleml as dml
from ._utils import draw_smpls
from ._utils_plr_manual import fit_plr, boot_plr, tune_nuisance_plr
@pytest.fixture(scope='module',
params=[Lasso(),
ElasticNet()])
def learner_g(request):
return request.param
@pytest.fixture(scope='module',
params=[Lasso(),
ElasticNet()])
def learner_m(request):
return request.param
@pytest.fixture(scope='module',
params=['partialling out'])
def score(request):
return request.param
@pytest.fixture(scope='module',
params=['dml2'])
def dml_procedure(request):
return request.param
@pytest.fixture(scope='module',
params=[True, False])
def tune_on_folds(request):
return request.param
def get_par_grid(learner):
if learner.__class__ == Lasso:
par_grid = {'alpha': np.linspace(0.05, .95, 7)}
else:
assert learner.__class__ == ElasticNet
par_grid = {'l1_ratio': [.1, .5, .7, .9, .95, .99, 1], 'alpha': np.linspace(0.05, 1., 7)}
return par_grid
@pytest.fixture(scope="module")
def dml_plr_fixture(generate_data2, learner_g, learner_m, score, dml_procedure, tune_on_folds):
par_grid = {'ml_g': get_par_grid(learner_g),
'ml_m': get_par_grid(learner_m)}
n_folds_tune = 4
boot_methods = ['normal']
n_folds = 2
n_rep_boot = 502
# collect data
obj_dml_data = generate_data2
# Set machine learning methods for m & g
ml_g = clone(learner_g)
ml_m = clone(learner_m)
np.random.seed(3141)
dml_plr_obj = dml.DoubleMLPLR(obj_dml_data,
ml_g, ml_m,
n_folds,
score=score,
dml_procedure=dml_procedure)
# tune hyperparameters
_ = dml_plr_obj.tune(par_grid, tune_on_folds=tune_on_folds, n_folds_tune=n_folds_tune)
# fit with tuned parameters
dml_plr_obj.fit()
np.random.seed(3141)
y = obj_dml_data.y
x = obj_dml_data.x
d = obj_dml_data.d
n_obs = len(y)
all_smpls = draw_smpls(n_obs, n_folds)
smpls = all_smpls[0]
if tune_on_folds:
g_params, m_params = tune_nuisance_plr(y, x, d,
clone(learner_g), clone(learner_m), smpls, n_folds_tune,
par_grid['ml_g'], par_grid['ml_m'])
else:
xx = [(np.arange(len(y)), np.array([]))]
g_params, m_params = tune_nuisance_plr(y, x, d,
clone(learner_g), clone(learner_m), xx, n_folds_tune,
par_grid['ml_g'], par_grid['ml_m'])
g_params = g_params * n_folds
m_params = m_params * n_folds
res_manual = fit_plr(y, x, d, clone(learner_g), clone(learner_m),
all_smpls, dml_procedure, score,
g_params=g_params, m_params=m_params)
res_dict = {'coef': dml_plr_obj.coef,
'coef_manual': res_manual['theta'],
'se': dml_plr_obj.se,
'se_manual': res_manual['se'],
'boot_methods': boot_methods}
for bootstrap in boot_methods:
np.random.seed(3141)
boot_theta, boot_t_stat = boot_plr(y, d, res_manual['thetas'], res_manual['ses'],
res_manual['all_g_hat'], res_manual['all_m_hat'],
all_smpls, score, bootstrap, n_rep_boot)
np.random.seed(3141)
dml_plr_obj.bootstrap(method=bootstrap, n_rep_boot=n_rep_boot)
res_dict['boot_coef' + bootstrap] = dml_plr_obj.boot_coef
res_dict['boot_t_stat' + bootstrap] = dml_plr_obj.boot_t_stat
res_dict['boot_coef' + bootstrap + '_manual'] = boot_theta
res_dict['boot_t_stat' + bootstrap + '_manual'] = boot_t_stat
return res_dict
@pytest.mark.ci
def test_dml_plr_coef(dml_plr_fixture):
assert math.isclose(dml_plr_fixture['coef'],
dml_plr_fixture['coef_manual'],
rel_tol=1e-9, abs_tol=1e-4)
@pytest.mark.ci
def test_dml_plr_se(dml_plr_fixture):
assert math.isclose(dml_plr_fixture['se'],
dml_plr_fixture['se_manual'],
rel_tol=1e-9, abs_tol=1e-4)
@pytest.mark.ci
def test_dml_plr_boot(dml_plr_fixture):
for bootstrap in dml_plr_fixture['boot_methods']:
assert np.allclose(dml_plr_fixture['boot_coef' + bootstrap],
dml_plr_fixture['boot_coef' + bootstrap + '_manual'],
rtol=1e-9, atol=1e-4)
assert np.allclose(dml_plr_fixture['boot_t_stat' + bootstrap],
dml_plr_fixture['boot_t_stat' + bootstrap + '_manual'],
rtol=1e-9, atol=1e-4)
|
1607448
|
from sly import Lexer, Parser
from os import path
from defs import *
global curr_file, curr_text, error_occurred, curr_namespace, reserved_names
def syntax_error(line, msg=''):
global error_occurred
error_occurred = True
print()
if msg:
print(f"Syntax Error in file {curr_file} line {line}:")
print(f" {msg}")
else:
print(f"Syntax Error in file {curr_file} line {line}")
def syntax_warning(line, is_error, msg=''):
if is_error:
global error_occurred
error_occurred = True
print()
print(f"Syntax Warning in file {curr_file}", end="")
if line is not None:
print(f" line {line}", end="")
if msg:
print(f":")
print(f" {msg}")
else:
print()
class FJLexer(Lexer):
tokens = {NS, DEF, REP,
WFLIP, SEGMENT, RESERVE,
ID, DOT_ID, NUMBER, STRING,
LE, GE, EQ, NEQ,
SHL, SHR,
NL, SC}
literals = {'=', '+', '-', '*', '/', '%',
'(', ')',
'$',
'^', '|', '&',
'?', ':',
'<', '>',
'"',
'#',
'{', '}',
"@", ","}
ignore_ending_comment = r'//.*'
# Tokens
DOT_ID = dot_id_re
ID = id_re
NUMBER = number_re
STRING = string_re
ID[r'def'] = DEF
ID[r'rep'] = REP
ID[r'ns'] = NS
ID[r'wflip'] = WFLIP
ID[r'segment'] = SEGMENT
ID[r'reserve'] = RESERVE
global reserved_names
reserved_names = {DEF, REP, NS, WFLIP, SEGMENT, RESERVE}
LE = "<="
GE = ">="
EQ = "=="
NEQ = "!="
SHL = r'<<'
SHR = r'>>'
# Punctuations
NL = r'[\r\n]'
SC = r';'
ignore = ' \t'
def NUMBER(self, t):
n = t.value
if len(n) >= 2:
if n[0] == "'":
t.value = handle_char(n[1:-1])[0]
elif n[1] in 'xX':
t.value = int(n, 16)
elif n[1] in 'bB':
t.value = int(n, 2)
else:
t.value = int(n)
else:
t.value = int(t.value)
return t
def STRING(self, t):
chars = []
s = t.value[1:-1]
i = 0
while i < len(s):
val, length = handle_char(s[i:])
chars.append(val)
i += length
t.value = sum(val << (i*8) for i, val in enumerate(chars))
return t
def NL(self, t):
self.lineno += 1
return t
def error(self, t):
global error_occurred
error_occurred = True
print()
print(f"Lexing Error in file {curr_file} line {self.lineno}: {t.value[0]}")
self.index += 1
class FJParser(Parser):
tokens = FJLexer.tokens
# TODO add Unary Minus (-), Unary Not (~). Maybe add logical or (||) and logical and (&&). Maybe handle power (**).
precedence = (
('right', '?', ':'),
('left', '|'),
('left', '^'),
('nonassoc', '<', '>', LE, GE),
('left', EQ, NEQ),
('left', '&'),
('left', SHL, SHR),
('left', '+', '-'),
('left', '*', '/', '%'),
('right', '#'),
)
# debugfile = 'src/parser.out'
def __init__(self, w, warning_as_errors, verbose=False):
self.verbose = verbose
self.defs = {'w': Expr(w)}
self.warning_as_errors = warning_as_errors
# [(params, quiet_params), statements, (curr_file, p.lineno, ns_name)]
self.macros = {main_macro: [([], []), [], (None, None, '')]}
def check_macro_name(self, name, line):
global reserved_names
base_name = self.ns_to_base_name(name[0])
if base_name in reserved_names:
syntax_error(line, f'macro name can\'t be {name[0]} ({base_name} is a reserved name)!')
if name in self.macros:
_, _, (other_file, other_line, _) = self.macros[name]
syntax_error(line, f'macro {name} is declared twice! '
f'also declared in file {other_file} (line {other_line}).')
def check_params(self, ids, macro_name, line):
for param_id in ids:
if param_id in self.defs:
syntax_error(line, f'parameter {param_id} in macro {macro_name[0]}({macro_name[1]}) '
f'is also defined as a constant variable (with value {self.defs[param_id]})')
for i1 in range(len(ids)):
for i2 in range(i1):
if ids[i1] == ids[i2]:
syntax_error(line, f'parameter {ids[i1]} in macro {macro_name[0]}({macro_name[1]}) '
f'is declared twice!')
def check_label_usage(self, labels_used, labels_declared, params, externs, global_labels, line, macro_name):
if global_labels & externs:
syntax_error(line, f"In macro {macro_name[0]}({macro_name[1]}): "
f"extern labels can't be global labels: " + ', '.join(global_labels & externs))
if global_labels & params:
syntax_error(line, f"In macro {macro_name[0]}({macro_name[1]}): "
f"extern labels can't be regular labels: " + ', '.join(global_labels & params))
if externs & params:
syntax_error(line, f"In macro {macro_name[0]}({macro_name[1]}): "
f"global labels can't be regular labels: " + ', '.join(externs & params))
# params.update([self.ns_full_name(p) for p in params])
# externs = set([self.ns_full_name(p) for p in externs])
# globals.update([self.ns_full_name(p) for p in globals])
unused_labels = params - labels_used.union(self.ns_to_base_name(label) for label in labels_declared)
if unused_labels:
syntax_warning(line, self.warning_as_errors,
f"In macro {macro_name[0]}({macro_name[1]}): "
f"unused labels: {', '.join(unused_labels)}.")
bad_declarations = labels_declared - set(self.ns_full_name(label) for label in externs.union(params))
if bad_declarations:
syntax_warning(line, self.warning_as_errors,
f"In macro {macro_name[0]}({macro_name[1]}): "
f"Declared a not extern/parameter label: {', '.join(bad_declarations)}.")
bad_uses = labels_used - global_labels - params - set(labels_declared) - {'$'}
if bad_uses:
# print('\nused:', labels_used, 'globals:', globals, 'params:', params)
syntax_warning(line, self.warning_as_errors,
f"In macro {macro_name[0]}({macro_name[1]}): "
f"Used a not global/parameter/declared-extern label: {', '.join(bad_uses)}.")
@staticmethod
def ns_name():
return '.'.join(curr_namespace)
@staticmethod
def ns_full_name(base_name):
return '.'.join(curr_namespace + [base_name])
@staticmethod
def dot_id_to_ns_full_name(p):
base_name = p.DOT_ID
without_dots = base_name.lstrip('.')
if len(without_dots) == len(base_name):
return base_name
num_of_dots = len(base_name) - len(without_dots)
if num_of_dots - 1 > len(curr_namespace):
syntax_error(p.lineno, f'Used more leading dots than current namespace depth '
f'({num_of_dots}-1 > {len(curr_namespace)})')
return '.'.join(curr_namespace[:len(curr_namespace)-(num_of_dots-1)] + [without_dots])
@staticmethod
def ns_to_base_name(name):
return name.split('.')[-1]
def error(self, token):
global error_occurred
error_occurred = True
print()
print(f'Syntax Error in file {curr_file} line {token.lineno}, token=("{token.type}", {token.value})')
@_('definable_line_statements')
def program(self, p):
ops = p.definable_line_statements
self.macros[main_macro][1] = ops
# labels_used, labels_declared = all_used_labels(ops)
# bad_uses = labels_used - set(labels_declared) - {'$'}
# if bad_uses:
# syntax_warning(None, self.warning_as_errors,
# f"Outside of macros: "
# f"Used a not declared label: {', '.join(bad_uses)}.")
@_('definable_line_statements NL definable_line_statement')
def definable_line_statements(self, p):
if p.definable_line_statement:
return p.definable_line_statements + p.definable_line_statement
return p.definable_line_statements
@_('definable_line_statement')
def definable_line_statements(self, p):
if p.definable_line_statement:
return p.definable_line_statement
return []
@_('')
def empty(self, p):
return None
@_('line_statement')
def definable_line_statement(self, p):
return p.line_statement
@_('macro_def')
def definable_line_statement(self, p):
return []
@_('NS ID')
def namespace(self, p):
curr_namespace.append(p.ID)
@_('namespace "{" NL definable_line_statements NL "}"')
def definable_line_statement(self, p):
curr_namespace.pop()
return p.definable_line_statements
@_('DEF ID macro_params "{" NL line_statements NL "}"')
def macro_def(self, p):
params, local_params, global_params, extern_params = p.macro_params
name = (self.ns_full_name(p.ID), len(params))
self.check_macro_name(name, p.lineno)
self.check_params(params + local_params, name, p.lineno)
ops = p.line_statements
self.check_label_usage(*all_used_labels(ops), set(params + local_params), set(extern_params),
set(global_params), p.lineno, name)
self.macros[name] = [(params, local_params), ops, (curr_file, p.lineno, self.ns_name())]
return None
@_('empty')
def maybe_ids(self, p):
return []
@_('IDs')
def maybe_ids(self, p):
return p.IDs
@_('empty')
def maybe_local_ids(self, p):
return []
@_('"@" IDs')
def maybe_local_ids(self, p):
return p.IDs
@_('empty')
def maybe_extern_ids(self, p):
return []
@_('empty')
def maybe_global_ids(self, p):
return []
@_('"<" ids')
def maybe_global_ids(self, p):
return p.ids
@_('">" IDs')
def maybe_extern_ids(self, p):
return p.IDs
@_('maybe_ids maybe_local_ids maybe_global_ids maybe_extern_ids')
def macro_params(self, p):
return p.maybe_ids, p.maybe_local_ids, p.maybe_global_ids, p.maybe_extern_ids
@_('IDs "," ID')
def IDs(self, p):
return p.IDs + [p.ID]
@_('ID')
def IDs(self, p):
return [p.ID]
@_('line_statements NL line_statement')
def line_statements(self, p):
return p.line_statements + p.line_statement
@_('line_statement')
def line_statements(self, p):
return p.line_statement
# @_('empty')
# def line_statements(self, p):
# return []
@_('empty')
def line_statement(self, p):
return []
@_('statement')
def line_statement(self, p):
if p.statement:
return [p.statement]
return []
@_('label statement')
def line_statement(self, p):
if p.statement:
return [p.label, p.statement]
return [p.label]
@_('label')
def line_statement(self, p):
return [p.label]
@_('ID ":"')
def label(self, p):
return Op(OpType.Label, (self.ns_full_name(p.ID),), curr_file, p.lineno)
@_('expr SC')
def statement(self, p):
return Op(OpType.FlipJump, (p.expr, next_address()), curr_file, p.lineno)
@_('expr SC expr')
def statement(self, p):
return Op(OpType.FlipJump, (p.expr0, p.expr1), curr_file, p.lineno)
@_('SC expr')
def statement(self, p):
return Op(OpType.FlipJump, (Expr(0), p.expr), curr_file, p.lineno)
@_('SC')
def statement(self, p):
return Op(OpType.FlipJump, (Expr(0), next_address()), curr_file, p.lineno)
@_('ID')
def id(self, p):
return p.ID, p.lineno
@_('DOT_ID')
def id(self, p):
return self.dot_id_to_ns_full_name(p), p.lineno
@_('ids "," id')
def ids(self, p):
return p.ids + [p.id[0]]
@_('id')
def ids(self, p):
return [p.id[0]]
@_('id')
def statement(self, p):
macro_name, lineno = p.id
return Op(OpType.Macro, ((macro_name, 0), ), curr_file, lineno)
@_('id expressions')
def statement(self, p):
macro_name, lineno = p.id
return Op(OpType.Macro, ((macro_name, len(p.expressions)), *p.expressions), curr_file, lineno)
@_('WFLIP expr "," expr')
def statement(self, p):
return Op(OpType.WordFlip, (p.expr0, p.expr1, next_address()), curr_file, p.lineno)
@_('WFLIP expr "," expr "," expr')
def statement(self, p):
return Op(OpType.WordFlip, (p.expr0, p.expr1, p.expr2), curr_file, p.lineno)
@_('ID "=" expr')
def statement(self, p):
name = self.ns_full_name(p.ID)
if name in self.defs:
syntax_error(p.lineno, f'Can\'t redeclare the variable "{name}".')
if not p.expr.eval(self.defs, curr_file, p.lineno):
self.defs[name] = p.expr
return None
syntax_error(p.lineno, f'Can\'t evaluate expression: {str(p.expr)}.')
@_('REP "(" expr "," ID ")" id')
def statement(self, p):
macro_name, lineno = p.id
return Op(OpType.Rep,
(p.expr, p.ID, Op(OpType.Macro, ((macro_name, 0), ), curr_file, lineno)),
curr_file, p.lineno)
@_('REP "(" expr "," ID ")" id expressions')
def statement(self, p):
exps = p.expressions
macro_name, lineno = p.id
return Op(OpType.Rep,
(p.expr, p.ID, Op(OpType.Macro, ((macro_name, len(exps)), *exps), curr_file, lineno)),
curr_file, p.lineno)
@_('SEGMENT expr')
def statement(self, p):
return Op(OpType.Segment, (p.expr,), curr_file, p.lineno)
@_('RESERVE expr')
def statement(self, p):
return Op(OpType.Reserve, (p.expr,), curr_file, p.lineno)
@_('expressions "," expr')
def expressions(self, p):
return p.expressions + [p.expr]
@_('expr')
def expressions(self, p):
return [p.expr]
@_('_expr')
def expr(self, p):
return p._expr[0]
@_('_expr "+" _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(a + b), p.lineno
return Expr(('+', (a, b))), p.lineno
@_('_expr "-" _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(a - b), p.lineno
return Expr(('-', (a, b))), p.lineno
@_('_expr "*" _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(a * b), p.lineno
return Expr(('*', (a, b))), p.lineno
@_('"#" _expr')
def _expr(self, p):
a = p._expr[0]
if a is int:
return Expr(a.bit_length()), p.lineno
return Expr(('#', (a,))), p.lineno
@_('_expr "/" _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(a // b), p.lineno
return Expr(('/', (a, b))), p.lineno
@_('_expr "%" _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(a % b), p.lineno
return Expr(('%', (a, b))), p.lineno
@_('_expr SHL _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(a << b), p.lineno
return Expr(('<<', (a, b))), p.lineno
@_('_expr SHR _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(a >> b), p.lineno
return Expr(('>>', (a, b))), p.lineno
@_('_expr "^" _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(a ^ b), p.lineno
return Expr(('^', (a, b))), p.lineno
@_('_expr "|" _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(a | b), p.lineno
return Expr(('|', (a, b))), p.lineno
@_('_expr "&" _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(a & b), p.lineno
return Expr(('&', (a, b))), p.lineno
@_('_expr "?" _expr ":" _expr')
def _expr(self, p):
a, b, c = p._expr0[0], p._expr1[0], p._expr2[0]
if a is int and b is int and c is int:
return Expr(b if a else c), p.lineno
return Expr(('?:', (a, b, c))), p.lineno
@_('_expr "<" _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(1 if a < b else 0), p.lineno
return Expr(('<', (a, b))), p.lineno
@_('_expr ">" _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(1 if a > b else 0), p.lineno
return Expr(('>', (a, b))), p.lineno
@_('_expr LE _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(1 if a <= b else 0), p.lineno
return Expr(('<=', (a, b))), p.lineno
@_('_expr GE _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(1 if a >= b else 0), p.lineno
return Expr(('>=', (a, b))), p.lineno
@_('_expr EQ _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(1 if a == b else 0), p.lineno
return Expr(('==', (a, b))), p.lineno
@_('_expr NEQ _expr')
def _expr(self, p):
a, b = p._expr0[0], p._expr1[0]
if a is int and b is int:
return Expr(1 if a != b else 0), p.lineno
return Expr(('!=', (a, b))), p.lineno
@_('"(" _expr ")"')
def _expr(self, p):
return p._expr
@_('NUMBER')
def _expr(self, p):
return Expr(p.NUMBER), p.lineno
@_('STRING')
def _expr(self, p):
return Expr(p.STRING), p.lineno
@_('"$"')
def _expr(self, p):
return next_address(), p.lineno
@_('id')
def _expr(self, p):
id_str, lineno = p.id
if id_str in self.defs:
return self.defs[id_str], lineno
return Expr(id_str), lineno
def exit_if_errors():
if error_occurred:
raise FJParsingException(f'Errors found in file {curr_file}. Assembly stopped.')
def parse_macro_tree(input_files, w, warning_as_errors, verbose=False):
global curr_file, curr_text, error_occurred, curr_namespace
error_occurred = False
lexer = FJLexer()
parser = FJParser(w, warning_as_errors, verbose=verbose)
for curr_file in input_files:
if not path.isfile(curr_file):
raise FJParsingException(f"No such file {curr_file}.")
curr_text = open(curr_file, 'r').read()
curr_namespace = []
lex_res = lexer.tokenize(curr_text)
exit_if_errors()
parser.parse(lex_res)
exit_if_errors()
return parser.macros
|
1607470
|
from .start_encoding_trimming import StartEncodingTrimming
from .scheduling import Scheduling
from .tweaks import Tweaks
from .start_encoding_request import StartEncodingRequest
from .manifests import StartManifest, VodStartManifest, VodDashStartManifest, VodHlsStartManifest
|
1607488
|
import pycomicvine
import datetime
from pycomicvine.tests.utils import *
pycomicvine.api_key = "476302e62d7e8f8f140182e36aebff2fe935514b"
class TestLocationsList(ListResourceTestCase):
def test_get_id_and_name(self):
self.get_id_and_name_test(
pycomicvine.Locations,
pycomicvine.Location
)
class TestLocationAttributes(SingularResourceTestCase):
def setUp(self):
self.get_random_instance(pycomicvine.Locations)
def test_search(self):
self.search_test(pycomicvine.Locations, pycomicvine.Location)
def test_get_all_attributes(self):
location = self.get_sample(pycomicvine.Location)
if location != None:
self.assertIsInstance(
location.aliases,
(type(None),list)
)
self.assertIsInstance(
location.api_detail_url,
(type(None),str)
)
self.assertIsInstance(
location.count_of_issue_appearances,
int
)
self.assertIsInstance(
location.date_added,
datetime.datetime
)
self.assertIsInstance(
location.date_last_updated,
datetime.datetime
)
self.assertIsInstance(
location.deck,
(type(None),str)
)
self.assertIsInstance(
location.description,
(type(None),str)
)
self.assertIsInstance(
location.first_appeared_in_issue,
(type(None),pycomicvine.Issue)
)
self.assertIsInstance(
location.id,
int
)
self.assertIsInstance(
location.image,
(type(None),dict)
)
self.assertIsInstance(
location.issue_credits,
pycomicvine.Issues
)
self.assertIsInstance(
location.movies,
pycomicvine.Movies
)
self.assertIsInstance(
location.name,
(type(None),str)
)
self.assertIsInstance(
location.site_detail_url,
(type(None),str)
)
self.assertIsInstance(
location.start_year,
(type(None),int)
)
self.assertIsInstance(
location.story_arc_credits,
pycomicvine.StoryArcs
)
self.assertIsInstance(
location.volume_credits,
pycomicvine.Volumes
)
|
1607498
|
import unittest
import numpy as np
import torch
from pytorch_adapt.layers import MMDBatchedLoss, MMDLoss
from pytorch_adapt.layers.utils import get_kernel_scales
from .. import TEST_DEVICE
# from https://github.com/thuml/Xlearn/blob/master/pytorch/src/loss.py
def guassian_kernel(source, target, kernel_mul=2.0, kernel_num=5, fix_sigma=None):
n_samples = int(source.size()[0]) + int(target.size()[0])
total = torch.cat([source, target], dim=0)
total0 = total.unsqueeze(0).expand(
int(total.size(0)), int(total.size(0)), int(total.size(1))
)
total1 = total.unsqueeze(1).expand(
int(total.size(0)), int(total.size(0)), int(total.size(1))
)
L2_distance = ((total0 - total1) ** 2).sum(2)
if fix_sigma:
bandwidth = fix_sigma
else:
bandwidth = torch.sum(L2_distance.data) / (n_samples**2 - n_samples)
bandwidth /= kernel_mul ** (kernel_num // 2)
bandwidth_list = [bandwidth * (kernel_mul**i) for i in range(kernel_num)]
kernel_val = [
torch.exp(-L2_distance / bandwidth_temp) for bandwidth_temp in bandwidth_list
]
return sum(kernel_val) / len(kernel_val)
# from https://github.com/thuml/Xlearn/blob/master/pytorch/src/loss.py
def DAN(source, target, kernel_mul=2.0, kernel_num=5, fix_sigma=None):
batch_size = int(source.size()[0])
kernels = guassian_kernel(
source,
target,
kernel_mul=kernel_mul,
kernel_num=kernel_num,
fix_sigma=fix_sigma,
)
loss1 = 0
for s1 in range(batch_size):
for s2 in range(s1 + 1, batch_size):
t1, t2 = s1 + batch_size, s2 + batch_size
loss1 += kernels[s1, s2] + kernels[t1, t2]
loss1 = loss1 / float(batch_size * (batch_size - 1) / 2)
loss2 = 0
for s1 in range(batch_size):
for s2 in range(batch_size):
t1, t2 = s1 + batch_size, s2 + batch_size
loss2 -= kernels[s1, t2] + kernels[s2, t1]
loss2 = loss2 / float(batch_size * batch_size)
return loss1 + loss2
# modified version of above function
def DAN_diff_size(source, target, kernel_mul=2.0, kernel_num=5, fix_sigma=None):
s_size = int(source.size()[0])
t_size = int(target.size()[0])
kernels = guassian_kernel(
source,
target,
kernel_mul=kernel_mul,
kernel_num=kernel_num,
fix_sigma=fix_sigma,
)
loss1 = 0
for s1 in range(s_size):
for s2 in range(s1 + 1, s_size):
loss1 += kernels[s1, s2]
loss1 = loss1 / float(s_size * (s_size - 1) / 2)
loss2 = 0
for t1 in range(t_size):
for t2 in range(t1 + 1, t_size):
loss2 += kernels[s_size + t1, s_size + t2]
loss2 = loss2 / float(t_size * (t_size - 1) / 2)
loss3 = 0
for s in range(s_size):
for t in range(t_size):
loss3 -= kernels[s, s_size + t]
loss3 = 2 * loss3 / float(s_size * t_size)
return loss1 + loss2 + loss3
# from https://github.com/thuml/Xlearn/blob/master/pytorch/src/loss.py
def DAN_Linear(source, target, kernel_mul=2.0, kernel_num=5, fix_sigma=None):
batch_size = int(source.size()[0])
kernels = guassian_kernel(
source,
target,
kernel_mul=kernel_mul,
kernel_num=kernel_num,
fix_sigma=fix_sigma,
)
# Linear version
loss = 0
# original
# for i in range(batch_size):
# s1, s2 = i, (i+1)%batch_size
# t1, t2 = s1+batch_size, s2+batch_size
# loss += kernels[s1, s2] + kernels[t1, t2]
# loss -= kernels[s1, t2] + kernels[s2, t1]
# return loss / float(batch_size)
# according to https://arxiv.org/pdf/1502.02791.pdf and https://jmlr.csail.mit.edu/papers/volume13/gretton12a/gretton12a.pdf
for i in range(0, batch_size // 2):
s1, s2 = (2 * i), (2 * i) + 1
t1, t2 = s1 + batch_size, s2 + batch_size
loss += kernels[s1, s2] + kernels[t1, t2]
loss -= kernels[s1, t2] + kernels[s2, t1]
return 2 * loss / float(batch_size)
def get_bandwidth(s, original_bandwidth):
bandwidth = original_bandwidth
if bandwidth is None:
bandwidth = torch.median(torch.cdist(s, s) ** 2)
return bandwidth
class TestMMDLossWithOriginal(unittest.TestCase):
def test_mmd_loss_with_original(self):
torch.manual_seed(49)
for kernel_num in [1, 5]:
half = kernel_num // 2
kernel_scales = get_kernel_scales(
low=-half, high=half, num_kernels=kernel_num
)
for s_size, t_size in [(128, 128), (128, 70), (70, 128)]:
s = torch.randn(s_size, 32, device=TEST_DEVICE)
t = torch.randn(t_size, 32, device=TEST_DEVICE) + 0.5
same_size = s_size == t_size
for mmd_type in ["linear", "quadratic"]:
for original_bandwidth in [None, 0.5, 1]:
loss_fn = MMDLoss(
kernel_scales=kernel_scales,
mmd_type=mmd_type,
bandwidth=original_bandwidth,
)
if not same_size and mmd_type == "linear":
with self.assertRaises(ValueError):
loss = loss_fn(s, t)
continue
loss = loss_fn(s, t)
if same_size:
correct_fn = {"linear": DAN_Linear, "quadratic": DAN}[
mmd_type
]
else:
correct_fn = DAN_diff_size
correct = correct_fn(
s,
t,
kernel_num=kernel_num,
fix_sigma=get_bandwidth(s, original_bandwidth),
)
self.assertTrue(
np.isclose(loss.item(), correct.item(), rtol=1e-4)
)
if mmd_type == "quadratic":
for batch_size in [2, 10, 31, 32, 128, 199]:
loss_fn = MMDBatchedLoss(
kernel_scales=kernel_scales,
mmd_type=mmd_type,
bandwidth=original_bandwidth,
batch_size=batch_size,
)
loss = loss_fn(s, t)
rtol = 1e-4 if original_bandwidth is not None else 1e-2
self.assertTrue(
np.isclose(loss.item(), correct.item(), rtol=rtol)
)
|
1607532
|
import hashlib
from pathlib import Path
try:
import importlib.resources as resources
except ImportError:
# python < 3.7
import importlib_resources as resources # type: ignore[no-redef]
from docutils import nodes
from docutils.parsers.rst import directives
from sphinx.application import Sphinx
from sphinx.environment import BuildEnvironment
from sphinx.transforms import SphinxTransform
from sphinx.util.docutils import SphinxDirective
from . import compiled as static_module
from .article_info import setup_article_info
from .badges_buttons import setup_badges_and_buttons
from .cards import setup_cards
from .dropdown import setup_dropdown
from .grids import setup_grids
from .icons import setup_icons
from .shared import PassthroughTextElement, create_component
from .tabs import setup_tabs
def setup_extension(app: Sphinx) -> None:
"""Set up the sphinx extension."""
app.connect("builder-inited", update_css_js)
app.connect("env-updated", update_css_links)
# we override container html visitors, to stop the default behaviour
# of adding the `container` class to all nodes.container
app.add_node(
nodes.container, override=True, html=(visit_container, depart_container)
)
app.add_node(
PassthroughTextElement,
html=(visit_depart_null, visit_depart_null),
latex=(visit_depart_null, visit_depart_null),
text=(visit_depart_null, visit_depart_null),
man=(visit_depart_null, visit_depart_null),
texinfo=(visit_depart_null, visit_depart_null),
)
app.add_directive(
"div", Div, override=True
) # override sphinx-panels implementation
app.add_transform(AddFirstTitleCss)
setup_badges_and_buttons(app)
setup_cards(app)
setup_grids(app)
setup_dropdown(app)
setup_icons(app)
setup_tabs(app)
setup_article_info(app)
def update_css_js(app: Sphinx):
"""Copy the CSS to the build directory."""
# reset changed identifier
app.env.sphinx_design_css_changed = False
# setup up new static path in output dir
static_path = (Path(app.outdir) / "_sphinx_design_static").absolute()
static_existed = static_path.exists()
static_path.mkdir(exist_ok=True)
app.config.html_static_path.append(str(static_path))
# Copy JS to the build directory.
js_path = static_path / "design-tabs.js"
app.add_js_file(js_path.name)
if not js_path.exists():
content = resources.read_text(static_module, "sd_tabs.js")
js_path.write_text(content)
# Read the css content and hash it
content = resources.read_text(static_module, "style.min.css")
hash = hashlib.md5(content.encode("utf8")).hexdigest()
# Write the css file
css_path = static_path / f"design-style.{hash}.min.css"
app.add_css_file(css_path.name)
if css_path.exists():
return
if static_existed:
app.env.sphinx_design_css_changed = True
for path in static_path.glob("*.css"):
path.unlink()
css_path.write_text(content, encoding="utf8")
def update_css_links(app: Sphinx, env: BuildEnvironment):
"""If CSS has changed, all files must be re-written, to include the correct stylesheets."""
if env.sphinx_design_css_changed:
return list(env.all_docs.keys())
def visit_container(self, node: nodes.Node):
classes = "docutils container"
attrs = {}
if node.get("is_div", False):
# we don't want the CSS for container for these nodes
classes = "docutils"
if "style" in node:
attrs["style"] = node["style"]
self.body.append(self.starttag(node, "div", CLASS=classes, **attrs))
def depart_container(self, node: nodes.Node):
self.body.append("</div>\n")
def visit_depart_null(self, node: nodes.Element) -> None:
"""visit/depart passthrough"""
class Div(SphinxDirective):
"""Same as the ``container`` directive, but does not add the ``container`` class in HTML outputs,
which can interfere with Bootstrap CSS.
"""
optional_arguments = 1 # css classes
final_argument_whitespace = True
option_spec = {"style": directives.unchanged, "name": directives.unchanged}
has_content = True
def run(self):
try:
if self.arguments:
classes = directives.class_option(self.arguments[0])
else:
classes = []
except ValueError:
raise self.error(
'Invalid class attribute value for "%s" directive: "%s".'
% (self.name, self.arguments[0])
)
node = create_component("div", rawtext="\n".join(self.content), classes=classes)
if "style" in self.options:
node["style"] = self.options["style"]
self.set_source_info(node)
self.add_name(node)
if self.content:
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
class AddFirstTitleCss(SphinxTransform):
"""Add a CSS class to to the first sections title."""
default_priority = 699 # priority main
def apply(self):
hide = False
for docinfo in self.document.traverse(nodes.docinfo):
for name in docinfo.traverse(nodes.field_name):
if name.astext() == "sd_hide_title":
hide = True
break
break
if not hide:
return
for section in self.document.traverse(nodes.section):
if isinstance(section.children[0], nodes.title):
if "classes" in section.children[0]:
section.children[0]["classes"].append("sd-d-none")
else:
section.children[0]["classes"] = ["sd-d-none"]
break
|
1607535
|
from typing import Dict, List, Optional
from rotkehlchen.fval import FVal
def assert_serialized_lists_equal(
a: List,
b: List,
max_length_to_check: Optional[int] = None,
ignore_keys: Optional[List] = None,
length_list_keymap: Optional[Dict] = None,
max_diff: str = "1e-6",
) -> None:
"""Compares lists of serialized dicts"""
assert isinstance(a, list), "Expected 2 lists. Comparing {type(a)} to {type(b)}"
assert isinstance(b, list), "Expected 2 lists. Comparing {type(a)} to {type(b)}"
if not max_length_to_check:
assert len(a) == len(b), f"Lists don't have the same key length {len(a)} != {len(b)}"
for idx, a_entry in enumerate(a):
if max_length_to_check and idx + 1 > max_length_to_check:
break
try:
if a_entry == b[idx]:
continue
except NotImplementedError:
pass
assert_serialized_dicts_equal(
a=a_entry,
b=b[idx],
ignore_keys=ignore_keys,
length_list_keymap=length_list_keymap,
max_diff=max_diff,
)
def assert_serialized_dicts_equal(
a: Dict,
b: Dict,
ignore_keys: Optional[List] = None,
length_list_keymap: Optional[Dict] = None,
max_diff: str = "1e-6",
same_key_length=True,
) -> None:
"""Compares serialized dicts so that serialized numbers can be compared for equality"""
if same_key_length:
assert len(a) == len(b), f"Dicts don't have the same key length {len(a)} != {len(b)}"
for a_key, a_val in a.items():
if ignore_keys and a_key in ignore_keys:
continue
if isinstance(a_val, FVal):
try:
compare_val = FVal(b[a_key])
except ValueError:
raise AssertionError(
f'Could not turn {a_key} amount {b[a_key]} into an FVal',
) from None
msg = f"{a_key} amount doesn't match. {compare_val} != {a_val}"
assert compare_val.is_close(a_val, max_diff=max_diff), msg
elif isinstance(b[a_key], FVal):
try:
compare_val = FVal(a_val)
except ValueError:
raise AssertionError(
f'Could not turn {a_key} value {a[a_key]} into an FVal',
) from None
msg = f"{a_key} doesn't match. {compare_val} != {b[a_key]}"
assert compare_val.is_close(b[a_key], max_diff=max_diff), msg
elif isinstance(a_val, str) and isinstance(b[a_key], str):
if a_val == b[a_key]:
continue
if '%' in a_val:
raise AssertionError(f'{a_val} != {b[a_key]}')
# if strings are not equal, try to turn them to Fvals
try:
afval = FVal(a_val)
except ValueError:
raise AssertionError(
f'After string comparison failure could not turn {a_val} to a number '
f'to compare with {b[a_key]}',
) from None
try:
bfval = FVal(b[a_key])
except ValueError:
raise AssertionError(
f'After string comparison failure could not turn {b[a_key]} to a number '
f'to compare with {b[a_key]}',
) from None
msg = f"{a_key} doesn't match. {afval} != {bfval}"
assert afval.is_close(bfval, max_diff=max_diff), msg
elif isinstance(a_val, dict) and isinstance(b[a_key], dict):
assert_serialized_dicts_equal(
a=a_val,
b=b[a_key],
ignore_keys=ignore_keys,
length_list_keymap=length_list_keymap,
max_diff=max_diff, same_key_length=same_key_length,
)
elif isinstance(a_val, list):
max_length_to_check = None
if length_list_keymap and a_key in length_list_keymap:
max_length_to_check = length_list_keymap[a_key]
assert_serialized_lists_equal(
a=a_val,
b=b[a_key],
max_length_to_check=max_length_to_check,
ignore_keys=ignore_keys,
length_list_keymap=length_list_keymap,
)
else:
assert a_val == b[a_key], f"{a_key} doesn't match. {a_val} != {b[a_key]}"
|
1607536
|
import json
from google.protobuf import json_format
import tinkoff_voicekit_client.speech_utils.apis.tinkoff.cloud.longrunning.v1.longrunning_pb2 as pb_operations
def get_proto_operation_request(request: dict):
grpc_request = json_format.Parse(json.dumps(request), pb_operations.GetOperationRequest())
return grpc_request
def get_proto_delete_operation_request(operation_filter: dict):
grpc_filter = json_format.Parse(json.dumps(operation_filter), pb_operations.OperationFilter())
grpc_delete_request = pb_operations.DeleteOperationRequest()
grpc_delete_request.filter.CopyFrom(grpc_filter)
return grpc_delete_request
def get_proto_list_operations_request(request: dict):
grpc_list_operation_request = json_format.Parse(json.dumps(request), pb_operations.ListOperationsRequest())
return grpc_list_operation_request
def get_proto_cancel_operation_request(operation_filter: dict):
grpc_filter = json_format.Parse(json.dumps(operation_filter), pb_operations.OperationFilter())
grpc_cancel_request = pb_operations.CancelOperationRequest()
grpc_cancel_request.filter.CopyFrom(grpc_filter)
return grpc_cancel_request
def get_proto_watch_operations_request(request: dict):
grpc_watch = json_format.Parse(json.dumps(request), pb_operations.WatchOperationsRequest())
return grpc_watch
def get_proto_wait_operation_request(request: dict):
grpc_wait = json_format.Parse(json.dumps(request), pb_operations.WaitOperationRequest())
return grpc_wait
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.