hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2d15090890b5f8c08420208655d93187f3798213 | 17,364 | py | Python | python/craftassist/mc_memory.py | anoushkt/craftassist | c200af65e52e800f0f0cc540fe836b644383349d | [
"MIT"
] | 1 | 2021-04-13T06:01:03.000Z | 2021-04-13T06:01:03.000Z | python/craftassist/mc_memory.py | anoushkt/craftassist | c200af65e52e800f0f0cc540fe836b644383349d | [
"MIT"
] | null | null | null | python/craftassist/mc_memory.py | anoushkt/craftassist | c200af65e52e800f0f0cc540fe836b644383349d | [
"MIT"
] | null | null | null | """
Copyright (c) Facebook, Inc. and its affiliates.
"""
import os
import random
import sys
from typing import Optional, List
from build_utils import npy_to_blocks_list
import minecraft_specs
import dance
PERCEPTION_RANGE = 64
BASE_AGENT_ROOT = os.path.join(os.path.dirname(__file__), "..")
sys.path.append(BASE_AGENT_ROOT)
from base_agent.util import XYZ, Block
from base_agent.sql_memory import AgentMemory
from base_agent.memory_nodes import ( # noqa
TaskNode,
PlayerNode,
MemoryNode,
ChatNode,
TimeNode,
LocationNode,
SetNode,
ReferenceObjectNode,
)
from mc_memory_nodes import ( # noqa
DanceNode,
VoxelObjectNode,
BlockObjectNode,
BlockTypeNode,
MobNode,
ItemStackNode,
MobTypeNode,
InstSegNode,
SchematicNode,
NODELIST,
)
from word_maps import SPAWN_OBJECTS
BASE_AGENT_ROOT = os.path.join(os.path.dirname(__file__), "..")
SCHEMAS = [
os.path.join(os.path.join(BASE_AGENT_ROOT, "base_agent"), "base_memory_schema.sql"),
os.path.join(os.path.dirname(__file__), "mc_memory_schema.sql"),
]
SCHEMA = os.path.join(os.path.dirname(__file__), "memory_schema.sql")
THROTTLING_TICK_UPPER_LIMIT = 64
THROTTLING_TICK_LOWER_LIMIT = 4
# TODO "snapshot" memory type (giving a what mob/object/player looked like at a fixed timestamp)
# TODO when a memory is removed, its last state should be snapshotted to prevent tag weirdness
class MCAgentMemory(AgentMemory):
def __init__(
self,
db_file=":memory:",
db_log_path=None,
schema_paths=SCHEMAS,
load_minecraft_specs=True,
load_block_types=True,
load_mob_types=True,
preception_range=PERCEPTION_RANGE,
):
super(MCAgentMemory, self).__init__(
db_file=db_file, schema_paths=schema_paths, db_log_path=db_log_path, nodelist=NODELIST
)
self.banned_default_behaviors = [] # FIXME: move into triple store?
self._safe_pickle_saved_attrs = {}
self._load_schematics(load_minecraft_specs)
self._load_block_types(load_block_types)
self._load_mob_types(load_mob_types)
self.dances = {}
dance.add_default_dances(self)
self.perception_range = preception_range
########################
### ReferenceObjects ###
########################
def get_entity_by_eid(self, eid) -> Optional["ReferenceObjectNode"]:
r = self._db_read_one("SELECT uuid FROM ReferenceObjects WHERE eid=?", eid)
if r:
return self.get_mem_by_id(r[0])
else:
return None
###############
### Voxels ###
###############
# count updates are done by hand to not need to count all voxels every time
# use these functions, don't add/delete/modify voxels with raw sql
def update_voxel_count(self, memid, dn):
c = self._db_read_one("SELECT voxel_count FROM ReferenceObjects WHERE uuid=?", memid)
if c:
count = c[0] + dn
self._db_write("UPDATE ReferenceObjects SET voxel_count=? WHERE uuid=?", count, memid)
return count
else:
return None
def update_voxel_mean(self, memid, count, loc):
""" update the x, y, z entries in ReferenceObjects
to account for the removal or addition of a block.
count should be the number of voxels *after* addition if >0
and -count the number *after* removal if count < 0
count should not be 0- handle that outside
"""
old_loc = self._db_read_one("SELECT x, y, z FROM ReferenceObjects WHERE uuid=?", memid)
# TODO warn/error if no such memory?
assert count != 0
if old_loc:
b = 1 / count
if count > 0:
a = (count - 1) / count
else:
a = (1 - count) / (-count)
new_loc = (
old_loc[0] * a + loc[0] * b,
old_loc[1] * a + loc[1] * b,
old_loc[2] * a + loc[2] * b,
)
self._db_write(
"UPDATE ReferenceObjects SET x=?, y=?, z=? WHERE uuid=?", *new_loc, memid
)
return new_loc
def remove_voxel(self, x, y, z, ref_type):
memids = self._db_read_one(
"SELECT uuid FROM VoxelObjects WHERE x=? and y=? and z=? and ref_type=?",
x,
y,
z,
ref_type,
)
if not memids:
# TODO error/warning?
return
memid = memids[0]
c = self.update_voxel_count(memid, -1)
if c > 0:
self.update_voxel_mean(memid, c, (x, y, z))
self._db_write(
"DELETE FROM VoxelObjects WHERE x=? AND y=? AND z=? and ref_type=?", x, y, z, ref_type
)
if c == 0:
# if not self.memory.check_memid_exists(memid, "VoxelObjects"):
# object is gone now. TODO be more careful here... maybe want to keep some records?
self.remove_memid_triple(memid, role="both")
# this only upserts to the same ref_type- if the voxel is occupied by
# a different ref_type it will insert a new ref object even if update is True
def upsert_block(
self,
block: Block,
memid: str,
ref_type: str,
player_placed: bool = False,
agent_placed: bool = False,
update: bool = True, # if update is set to False, forces a write
):
((x, y, z), (b, m)) = block
old_memid = self._db_read_one(
"SELECT uuid FROM VoxelObjects WHERE x=? AND y=? AND z=? and ref_type=?",
x,
y,
z,
ref_type,
)
# add to voxel count
new_count = self.update_voxel_count(memid, 1)
assert new_count
self.update_voxel_mean(memid, new_count, (x, y, z))
if old_memid and update:
if old_memid != memid:
self.remove_voxel(x, y, z, ref_type)
cmd = "INSERT INTO VoxelObjects (uuid, bid, meta, updated, player_placed, agent_placed, ref_type, x, y, z) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
else:
cmd = "UPDATE VoxelObjects SET uuid=?, bid=?, meta=?, updated=?, player_placed=?, agent_placed=? WHERE ref_type=? AND x=? AND y=? AND z=?"
else:
cmd = "INSERT INTO VoxelObjects (uuid, bid, meta, updated, player_placed, agent_placed, ref_type, x, y, z) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
self._db_write(
cmd, memid, b, m, self.get_time(), player_placed, agent_placed, ref_type, x, y, z
)
######################
### BlockObjects ###
######################
# rename this... "object" is bad name
def get_object_by_id(self, memid: str, table="BlockObjects") -> "VoxelObjectNode":
if table == "BlockObjects":
return BlockObjectNode(self, memid)
elif table == "InstSeg":
return InstSegNode(self, memid)
else:
raise ValueError("Bad table={}".format(table))
# and rename this
def get_object_info_by_xyz(self, xyz: XYZ, ref_type: str, just_memid=True):
r = self._db_read(
"SELECT DISTINCT(uuid), bid, meta FROM VoxelObjects WHERE x=? AND y=? AND z=? and ref_type=?",
*xyz,
ref_type
)
if just_memid:
return [memid for (memid, bid, meta) in r]
else:
return r
# WARNING: these do not search archived/snapshotted block objects
# TODO replace all these all through the codebase with generic counterparts
def get_block_object_ids_by_xyz(self, xyz: XYZ) -> List[str]:
return self.get_object_info_by_xyz(xyz, "BlockObjects")
def get_block_object_by_xyz(self, xyz: XYZ) -> Optional["VoxelObjectNode"]:
memids = self.get_block_object_ids_by_xyz(xyz)
if len(memids) == 0:
return None
return self.get_block_object_by_id(memids[0])
def get_block_object_by_id(self, memid: str) -> "VoxelObjectNode":
return self.get_object_by_id(memid, "BlockObjects")
def tag_block_object_from_schematic(self, block_object_memid: str, schematic_memid: str):
self.add_triple(subj=block_object_memid, pred_text="_from_schematic", obj=schematic_memid)
#####################
### InstSegObject ###
#####################
def get_instseg_object_ids_by_xyz(self, xyz: XYZ) -> List[str]:
r = self._db_read(
'SELECT DISTINCT(uuid) FROM VoxelObjects WHERE ref_type="inst_seg" AND x=? AND y=? AND z=?',
*xyz
)
return r
####################
### Schematics ###
####################
def get_schematic_by_id(self, memid: str) -> "SchematicNode":
return SchematicNode(self, memid)
def get_schematic_by_property_name(self, name, table_name) -> Optional["SchematicNode"]:
r = self._db_read(
"""
SELECT {}.type_name
FROM {} INNER JOIN Triples as T ON T.subj={}.uuid
WHERE (T.pred_text="has_name" OR T.pred_text="has_tag") AND T.obj_text=?""".format(
table_name, table_name, table_name
),
name,
)
if not r:
return None
result = [] # noqa
for e in r:
schematic_name = e[0]
schematics = self._db_read(
"""
SELECT Schematics.uuid
FROM Schematics INNER JOIN Triples as T ON T.subj=Schematics.uuid
WHERE (T.pred_text="has_name" OR T.pred_text="has_tag") AND T.obj_text=?""",
schematic_name,
)
if schematics:
result.extend(schematics)
if result:
return self.get_schematic_by_id(random.choice(result)[0])
else:
return None
def get_mob_schematic_by_name(self, name: str) -> Optional["SchematicNode"]:
return self.get_schematic_by_property_name(name, "MobTypes")
# TODO call this in get_schematic_by_property_name
def get_schematic_by_name(self, name: str) -> Optional["SchematicNode"]:
r = self._db_read(
"""
SELECT Schematics.uuid
FROM Schematics INNER JOIN Triples as T ON T.subj=Schematics.uuid
WHERE (T.pred_text="has_name" OR T.pred_text="has_tag") AND T.obj_text=?""",
name,
)
if r: # if multiple exist, then randomly select one
return self.get_schematic_by_id(random.choice(r)[0])
# if no schematic with exact matched name exists, search for a schematic
# with matched property name instead
else:
return self.get_schematic_by_property_name(name, "BlockTypes")
def convert_block_object_to_schematic(self, block_object_memid: str) -> "SchematicNode":
r = self._db_read_one(
'SELECT subj FROM Triples WHERE pred_text="_source_block_object" AND obj=?',
block_object_memid,
)
if r:
# previously converted; return old schematic
return self.get_schematic_by_id(r[0])
else:
# get up to date BlockObject
block_object = self.get_block_object_by_id(block_object_memid)
# create schematic
memid = SchematicNode.create(self, list(block_object.blocks.items()))
# add triple linking the object to the schematic
self.add_triple(subj=memid, pred_text="_source_block_object", obj=block_object.memid)
return self.get_schematic_by_id(memid)
def _load_schematics(self, load_minecraft_specs=True):
if load_minecraft_specs:
for premem in minecraft_specs.get_schematics():
npy = premem["schematic"]
memid = SchematicNode.create(self, npy_to_blocks_list(npy))
if premem.get("name"):
for n in premem["name"]:
self.add_triple(subj=memid, pred_text="has_name", obj_text=n)
self.add_triple(subj=memid, pred_text="has_tag", obj_text=n)
if premem.get("tags"):
for t in premem["tags"]:
self.add_triple(subj=memid, pred_text="has_tag", obj_text=t)
# load single blocks as schematics
bid_to_name = minecraft_specs.get_block_data()["bid_to_name"]
for (d, m), name in bid_to_name.items():
if d >= 256:
continue
memid = SchematicNode.create(self, [((0, 0, 0), (d, m))])
self.add_triple(subj=memid, pred_text="has_name", obj_text=name)
if "block" in name:
self.add_triple(
subj=memid, pred_text="has_name", obj_text=name.strip("block").strip()
)
# tag single blocks with 'block'
self.add_triple(subj=memid, pred_text="has_name", obj_text="block")
def _load_block_types(
self,
load_block_types=True,
load_color=True,
load_block_property=True,
simple_color=False,
load_material=True,
):
if not load_block_types:
return
bid_to_name = minecraft_specs.get_block_data()["bid_to_name"]
color_data = minecraft_specs.get_colour_data()
if simple_color:
name_to_colors = color_data["name_to_simple_colors"]
else:
name_to_colors = color_data["name_to_colors"]
block_property_data = minecraft_specs.get_block_property_data()
block_name_to_properties = block_property_data["name_to_properties"]
for (b, m), type_name in bid_to_name.items():
if b >= 256:
continue
memid = BlockTypeNode.create(self, type_name, (b, m))
self.add_triple(subj=memid, pred_text="has_name", obj_text=type_name)
if "block" in type_name:
self.add_triple(
subj=memid, pred_text="has_name", obj_text=type_name.strip("block").strip()
)
if load_color:
if name_to_colors.get(type_name) is not None:
for color in name_to_colors[type_name]:
self.add_triple(subj=memid, pred_text="has_colour", obj_text=color)
if load_block_property:
if block_name_to_properties.get(type_name) is not None:
for property in block_name_to_properties[type_name]:
self.add_triple(subj_text=memid, pred_text="has_name", obj_text=property)
def _load_mob_types(self, load_mob_types=True):
if not load_mob_types:
return
mob_property_data = minecraft_specs.get_mob_property_data()
mob_name_to_properties = mob_property_data["name_to_properties"]
for (name, m) in SPAWN_OBJECTS.items():
type_name = "spawn " + name
# load single mob as schematics
memid = SchematicNode.create(self, [((0, 0, 0), (383, m))])
self.add_triple(subj=memid, pred_text="has_name", obj_text=type_name)
if "block" in type_name:
self.add_triple(
subj=memid, pred_text="has_name", obj_text=type_name.strip("block").strip()
)
# then load properties
memid = MobTypeNode.create(self, type_name, (383, m))
self.add_triple(subj=memid, pred_text="has_name", obj_text=type_name)
if mob_name_to_properties.get(type_name) is not None:
for property in mob_name_to_properties[type_name]:
self.add_triple(subj=memid, pred_text="has_name", obj_text=property)
##############
### Mobs ###
##############
def set_mob_position(self, mob) -> "MobNode":
r = self._db_read_one("SELECT uuid FROM ReferenceObjects WHERE eid=?", mob.entityId)
if r:
self._db_write(
"UPDATE ReferenceObjects SET x=?, y=?, z=?, yaw=?, pitch=? WHERE eid=?",
mob.pos.x,
mob.pos.y,
mob.pos.z,
mob.look.yaw,
mob.look.pitch,
mob.entityId,
)
(memid,) = r
else:
memid = MobNode.create(self, mob)
return self.get_mem_by_id(memid)
####################
### ItemStacks ###
####################
def set_item_stack_position(self, item_stack) -> "ItemStackNode":
r = self._db_read_one("SELECT uuid FROM ReferenceObjects WHERE eid=?", item_stack.entityId)
if r:
self._db_write(
"UPDATE ReferenceObjects SET x=?, y=?, z=? WHERE eid=?",
item_stack.pos.x,
item_stack.pos.y,
item_stack.pos.z,
item_stack.entityId,
)
(memid,) = r
else:
memid = ItemStackNode.create(self, item_stack)
return self.get_mem_by_id(memid)
###############
### Dances ##
###############
def add_dance(self, dance_fn, name=None, tags=[]):
# a dance is movement determined as a sequence of steps, rather than by its destination
return DanceNode.create(self, dance_fn, name=name, tags=tags)
| 37.023454 | 161 | 0.580396 |
a6d1e27896fa24a1e2aab87d859a10cb3c0ad47d | 229 | py | Python | examples/include_no_requirements.py | f0k/tqdm | ebda7120efef500f6a7c8dea80e5ea6f47090803 | [
"MIT"
] | 1 | 2019-02-01T04:33:53.000Z | 2019-02-01T04:33:53.000Z | examples/include_no_requirements.py | f0k/tqdm | ebda7120efef500f6a7c8dea80e5ea6f47090803 | [
"MIT"
] | 6 | 2019-09-10T14:12:32.000Z | 2020-02-26T15:50:56.000Z | examples/include_no_requirements.py | f0k/tqdm | ebda7120efef500f6a7c8dea80e5ea6f47090803 | [
"MIT"
] | 1 | 2020-06-27T07:34:32.000Z | 2020-06-27T07:34:32.000Z | # How to import tqdm without enforcing it as a dependency
try:
from tqdm import tqdm
except ImportError:
def tqdm(*args, **kwargs):
if args:
return args[0]
return kwargs.get('iterable', None)
| 22.9 | 57 | 0.633188 |
2c2f7f5e0dd541f194455f77238fcea00268fdcb | 297 | py | Python | dump/callback_compile.py | conductiveIT/pymunk-1 | 61de8b2e652503356ac14a2d648cc11aa6a8070f | [
"MIT"
] | 670 | 2015-01-01T19:10:15.000Z | 2022-03-29T23:05:47.000Z | dump/callback_compile.py | reter695/pymunk | 9e9d3bf14cd57f61006588b1c56fefc21b453733 | [
"MIT"
] | 122 | 2015-01-02T19:06:19.000Z | 2022-03-20T19:44:25.000Z | dump/callback_compile.py | reter695/pymunk | 9e9d3bf14cd57f61006588b1c56fefc21b453733 | [
"MIT"
] | 222 | 2015-01-28T03:34:52.000Z | 2022-03-27T06:44:52.000Z | import os
os.system("gcc -mrtd -O3 -shared -fno-omit-frame-pointer -std=gnu99 -fPIC -ffast-math -m32 -c callback.c")
#os.system("gcc -O1 -std=gnu99 -fPIC -ffast-math -c callback.c")
#os.system("gcc -dynamiclib ex.o -o libex.dylib")
os.system("gcc -shared -mrtd callback.o -o callback.dll")
| 42.428571 | 107 | 0.690236 |
d8a747e460706c434dfedfb56f1adc27134824c4 | 4,678 | py | Python | models/Mobile_Net_V3_SSD.py | chuliuT/MobileNet_V3_SSD.pytorch | f28e38fd197e8eff72346dc70f2b8430111edac1 | [
"MIT"
] | 4 | 2020-04-01T02:17:04.000Z | 2022-01-16T07:17:28.000Z | models/Mobile_Net_V3_SSD.py | chuliuT/MobileNet_V3_SSD.pytorch | f28e38fd197e8eff72346dc70f2b8430111edac1 | [
"MIT"
] | null | null | null | models/Mobile_Net_V3_SSD.py | chuliuT/MobileNet_V3_SSD.pytorch | f28e38fd197e8eff72346dc70f2b8430111edac1 | [
"MIT"
] | 5 | 2020-01-07T03:52:46.000Z | 2020-05-17T12:36:11.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from layers import *
from data import coco,voc
import torchvision.transforms as transforms
import torchvision.models as models
import torch.backends.cudnn as cudnn
import os
from models.mobile_net_v3 import mobilenetv3
class MobileNetV3(nn.Module):
def __init__(self, phase, size, head, num_classes):
super(MobileNetV3, self).__init__()
self.phase = phase
self.num_classes = num_classes
self.cfg = (coco, voc)[num_classes == 21]
self.priorbox = PriorBox(self.cfg)
self.priors = Variable(self.priorbox.forward(), volatile=True)
self.size = size
self.base = mobilenetv3()
self.loc = nn.ModuleList(head[0])
self.conf = nn.ModuleList(head[1])
if self.phase == 'test':
self.softmax = nn.Softmax(dim=-1)
self.detect = Detect(num_classes, 0, 200, 0.01, 0.45)
def forward(self, x):
loc = list()
conf = list()
f1, f2, f3, f4, f5, f6 = self.base(x)
sources = [f1, f2, f3, f4, f5, f6]
# apply multibox head to source layers
for (x, l, c) in zip(sources, self.loc, self.conf):
loc.append(l(x).permute(0, 2, 3, 1).contiguous())
conf.append(c(x).permute(0, 2, 3, 1).contiguous())
loc = torch.cat([o.view(o.size(0), -1) for o in loc], 1)
conf = torch.cat([o.view(o.size(0), -1) for o in conf], 1)
if self.phase == "test":
output = self.detect(
loc.view(loc.size(0), -1, 4), # loc preds
self.softmax(conf.view(conf.size(0), -1,
self.num_classes)), # conf preds
self.priors.type(type(x.data)) # default boxes
)
else:
output = (
loc.view(loc.size(0), -1, 4),
conf.view(conf.size(0), -1, self.num_classes),
)
return output
def load_weights(self, base_file):
other, ext = os.path.splitext(base_file)
if ext == '.pkl' or '.pth':
print('Loading weights into state dict...')
self.load_state_dict(torch.load(base_file))
print('Finished!')
else:
print('Sorry only .pth and .pkl files supported.')
def multibox(num_classes=21):
anchor_num = [4, 6, 6, 6, 4, 4] # number of boxes per feature map location
loc_layers = []
conf_layers = []
# ===================================================================================#
loc_layers += [nn.Conv2d(40, anchor_num[0] * 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(40, anchor_num[0] * num_classes, kernel_size=3, padding=1)]
# ===================================================================================#
loc_layers += [nn.Conv2d(112, anchor_num[1] * 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(112, anchor_num[1] * num_classes, kernel_size=3, padding=1)]
# ===================================================================================#
loc_layers += [nn.Conv2d(160, anchor_num[2] * 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(160, anchor_num[2] * num_classes, kernel_size=3, padding=1)]
# ===================================================================================#
loc_layers += [nn.Conv2d(160, anchor_num[3] * 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(160, anchor_num[3] * num_classes, kernel_size=3, padding=1)]
# ===================================================================================#
loc_layers += [nn.Conv2d(160, anchor_num[4] * 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(160, anchor_num[4] * num_classes, kernel_size=3, padding=1)]
# ===================================================================================#
loc_layers += [nn.Conv2d(160, anchor_num[5] * 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(160, anchor_num[5] * num_classes, kernel_size=3, padding=1)]
return (loc_layers, conf_layers)
def build_net(phase, size=300, num_classes=21):
if phase != "test" and phase != "train":
print("Error: Phase not recognized")
return
return MobileNetV3(phase, size, multibox(num_classes), num_classes)
if __name__ == '__main__':
x = torch.randn(1, 3, 300, 300)
net = build_net('test')
net.eval()
from utils.timer import Timer
_t = {'im_detect': Timer()}
for i in range(300):
_t['im_detect'].tic()
net.forward(x)
detect_time = _t['im_detect'].toc()
print(detect_time)
| 39.310924 | 90 | 0.53292 |
28789848c68e98effdcbe2d8048d8d093e895ba4 | 833 | py | Python | rpi/cartographer.py | RimorRes/MDRS_Rover | a46f9a2482febd588d621a51a784bee648198c4d | [
"MIT"
] | 7 | 2021-09-18T11:18:53.000Z | 2022-02-17T21:57:58.000Z | rpi/cartographer.py | RimorRes/MDRS_Rover | a46f9a2482febd588d621a51a784bee648198c4d | [
"MIT"
] | 8 | 2021-10-29T19:27:00.000Z | 2022-02-04T15:32:03.000Z | rpi/cartographer.py | RimorRes/MDRS_Rover | a46f9a2482febd588d621a51a784bee648198c4d | [
"MIT"
] | 3 | 2021-09-24T13:56:33.000Z | 2021-11-27T08:54:10.000Z | import csv
from datetime import datetime
def create_csv_file(file_path):
"""
Creates output file for the
recorded data
args:
file path: Path object
"""
with open(file_path, 'w', newline='') as f:
writer = csv.writer(f)
# Write headers
writer.writerow(
(
'Time',
'Temperature (C)',
'Pressure (hPa)',
'Humidity (%)',
'CPU Temp (C)'
)
)
def store_to_csv(file_path, values):
"""
Stores the data in a csv file.
args:
file path: Path object
data: list
"""
with open(file_path, 'a', newline='') as f:
writer = csv.writer(f)
# Write data to row
writer.writerow([datetime.now()] + values)
f.close()
| 20.317073 | 50 | 0.492197 |
1bbe9e1e355c98df6bf2773fc93b77ff0859c23a | 1,820 | py | Python | ATLASEventService.py | nikmagini/pilot | 1c84fcf6f7e43b669d2357326cdbe06382ac829f | [
"Apache-2.0"
] | 13 | 2015-02-19T17:17:10.000Z | 2021-12-22T06:48:02.000Z | ATLASEventService.py | nikmagini/pilot | 1c84fcf6f7e43b669d2357326cdbe06382ac829f | [
"Apache-2.0"
] | 85 | 2015-01-06T15:01:51.000Z | 2018-11-29T09:03:35.000Z | ATLASEventService.py | nikmagini/pilot | 1c84fcf6f7e43b669d2357326cdbe06382ac829f | [
"Apache-2.0"
] | 22 | 2015-06-09T12:08:29.000Z | 2018-11-20T10:07:01.000Z | # Class definition:
# ATLASEventService
# This class is inheriting from the main Event Service class
# Instances are generated with ExperimentFactory via pUtil::getEventService()
# Implemented as a singleton class
# http://stackoverflow.com/questions/42558/python-and-the-singleton-pattern
# PanDA Pilot modules
from EventService import EventService
from PilotErrors import PilotErrors
from pUtil import tolog # Dump to pilot log
from pUtil import readpar # Used to read values from the schedconfig DB (queuedata)
# Standard python modules
import os
import re
import commands
class ATLASEventService(EventService):
# private data members
__experiment = "ATLAS" # String defining the experiment
__instance = None # Boolean used by subclasses to become a Singleton
__error = PilotErrors() # PilotErrors object
def __init__(self):
""" Default initialization """
# e.g. self.__errorLabel = errorLabel
pass
def __new__(cls, *args, **kwargs):
""" Override the __new__ method to make the class a singleton """
if not cls.__instance:
cls.__instance = super(ATLASEventService, cls).__new__(cls, *args, **kwargs)
return cls.__instance
def getEventService(self):
""" Return a string with the experiment name """
return self.__experiment
def processEvents(self):
""" Process events from Event Server """
# In development: the idea is that the pilot will not process events from staged-in input files,
# but download events from an Event Server and then process them.
# This method is used to process events downloaded from an Event Server
import time
time.sleep(4)
| 33.090909 | 104 | 0.671978 |
1bb0ef5f4e273ec1887e2fa6cff9bfe68c20699e | 1,908 | py | Python | oneflow/python/test/ops/test_TestDataTypeAttr.py | Gavin-Hoang/oneflow | 38d9e15d22c6c62827456b097a1d50adda67ca9b | [
"Apache-2.0"
] | 1 | 2020-12-04T03:06:16.000Z | 2020-12-04T03:06:16.000Z | oneflow/python/test/ops/test_TestDataTypeAttr.py | weibao918/oneflow | 320038ff5efd948516f7259442190f9b31f75027 | [
"Apache-2.0"
] | null | null | null | oneflow/python/test/ops/test_TestDataTypeAttr.py | weibao918/oneflow | 320038ff5efd948516f7259442190f9b31f75027 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from collections import OrderedDict
import numpy as np
import oneflow as flow
import oneflow.typing as oft
from test_util import GenArgList, type_name_to_flow_type, type_name_to_np_type
def TestDataTypeAttr(input, output_type):
assert output_type in flow.dtypes()
return (
flow.user_op_builder("TestDataTypeAttr")
.Op("TestDataTypeAttr")
.Input("in", [input])
.Output("out")
.Attr("output_type", output_type)
.Build()
.InferAndTryRun()
.RemoteBlobList()[0]
)
def RunTest(data_type):
flow.clear_default_session()
func_config = flow.FunctionConfig()
func_config.default_data_type(flow.float)
@flow.global_function(func_config)
def TestDataTypeAttrJob(input: oft.Numpy.Placeholder((10, 10), dtype=flow.float)):
return TestDataTypeAttr(input, type_name_to_flow_type[data_type])
input = np.random.random_sample((10, 10)).astype(np.float32)
output = TestDataTypeAttrJob(input).get().numpy()
assert output.dtype == type_name_to_np_type[data_type]
def test_data_type_attr(test_case):
# TODO: fix bugs in ForeignOutputKernel with "float16" and "char" dtype, do not test these two dtypes here
for data_type in ["float32", "double", "int8", "int32", "int64", "uint8"]:
RunTest(data_type)
| 34.071429 | 110 | 0.730608 |
40169fb8a9b0c522b6a29a423abbcd8318c57d72 | 21,094 | py | Python | tornado/options.py | takluyver/tornado | 34903f9e1a99441b2729bbe6f1d65d46cf352ea7 | [
"Apache-2.0"
] | 1 | 2016-01-04T07:40:28.000Z | 2016-01-04T07:40:28.000Z | tornado/options.py | takluyver/tornado | 34903f9e1a99441b2729bbe6f1d65d46cf352ea7 | [
"Apache-2.0"
] | 1 | 2017-09-19T09:52:54.000Z | 2017-09-19T09:52:54.000Z | tornado/options.py | takluyver/tornado | 34903f9e1a99441b2729bbe6f1d65d46cf352ea7 | [
"Apache-2.0"
] | 1 | 2016-09-12T09:32:32.000Z | 2016-09-12T09:32:32.000Z | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A command line parsing module that lets modules define their own options.
Each module defines its own options which are added to the global
option namespace, e.g.::
from tornado.options import define, options
define("mysql_host", default="127.0.0.1:3306", help="Main user DB")
define("memcache_hosts", default="127.0.0.1:11011", multiple=True,
help="Main user memcache servers")
def connect():
db = database.Connection(options.mysql_host)
...
The ``main()`` method of your application does not need to be aware of all of
the options used throughout your program; they are all automatically loaded
when the modules are loaded. However, all modules that define options
must have been imported before the command line is parsed.
Your ``main()`` method can parse the command line or parse a config file with
either::
tornado.options.parse_command_line()
# or
tornado.options.parse_config_file("/etc/server.conf")
.. note:
When using tornado.options.parse_command_line or
tornado.options.parse_config_file, the only options that are set are
ones that were previously defined with tornado.options.define.
Command line formats are what you would expect (``--myoption=myvalue``).
Config files are just Python files. Global names become options, e.g.::
myoption = "myvalue"
myotheroption = "myothervalue"
We support `datetimes <datetime.datetime>`, `timedeltas
<datetime.timedelta>`, ints, and floats (just pass a ``type`` kwarg to
`define`). We also accept multi-value options. See the documentation for
`define()` below.
`tornado.options.options` is a singleton instance of `OptionParser`, and
the top-level functions in this module (`define`, `parse_command_line`, etc)
simply call methods on it. You may create additional `OptionParser`
instances to define isolated sets of options, such as for subcommands.
.. note::
By default, several options are defined that will configure the
standard `logging` module when `parse_command_line` or `parse_config_file`
are called. If you want Tornado to leave the logging configuration
alone so you can manage it yourself, either pass ``--logging=none``
on the command line or do the following to disable it in code::
from tornado.options import options, parse_command_line
options.logging = None
parse_command_line()
.. versionchanged:: 4.3
Dashes and underscores are fully interchangeable in option names;
options can be defined, set, and read with any mix of the two.
Dashes are typical for command-line usage while config files require
underscores.
"""
from __future__ import absolute_import, division, print_function, with_statement
import datetime
import numbers
import re
import sys
import os
import textwrap
from tornado.escape import _unicode, native_str
from tornado.log import define_logging_options
from tornado import stack_context
from tornado.util import basestring_type, exec_in
class Error(Exception):
"""Exception raised by errors in the options module."""
pass
class OptionParser(object):
"""A collection of options, a dictionary with object-like access.
Normally accessed via static functions in the `tornado.options` module,
which reference a global instance.
"""
def __init__(self):
# we have to use self.__dict__ because we override setattr.
self.__dict__['_options'] = {}
self.__dict__['_parse_callbacks'] = []
self.define("help", type=bool, help="show this help information",
callback=self._help_callback)
def _normalize_name(self, name):
return name.replace('_', '-')
def __getattr__(self, name):
name = self._normalize_name(name)
if isinstance(self._options.get(name), _Option):
return self._options[name].value()
raise AttributeError("Unrecognized option %r" % name)
def __setattr__(self, name, value):
name = self._normalize_name(name)
if isinstance(self._options.get(name), _Option):
return self._options[name].set(value)
raise AttributeError("Unrecognized option %r" % name)
def __iter__(self):
return (opt.name for opt in self._options.values())
def __contains__(self, name):
name = self._normalize_name(name)
return name in self._options
def __getitem__(self, name):
return self.__getattr__(name)
def __setitem__(self, name, value):
return self.__setattr__(name, value)
def items(self):
"""A sequence of (name, value) pairs.
.. versionadded:: 3.1
"""
return [(opt.name, opt.value()) for name, opt in self._options.items()]
def groups(self):
"""The set of option-groups created by ``define``.
.. versionadded:: 3.1
"""
return set(opt.group_name for opt in self._options.values())
def group_dict(self, group):
"""The names and values of options in a group.
Useful for copying options into Application settings::
from tornado.options import define, parse_command_line, options
define('template_path', group='application')
define('static_path', group='application')
parse_command_line()
application = Application(
handlers, **options.group_dict('application'))
.. versionadded:: 3.1
"""
return dict(
(opt.name, opt.value()) for name, opt in self._options.items()
if not group or group == opt.group_name)
def as_dict(self):
"""The names and values of all options.
.. versionadded:: 3.1
"""
return dict(
(opt.name, opt.value()) for name, opt in self._options.items())
def define(self, name, default=None, type=None, help=None, metavar=None,
multiple=False, group=None, callback=None):
"""Defines a new command line option.
If ``type`` is given (one of str, float, int, datetime, or timedelta)
or can be inferred from the ``default``, we parse the command line
arguments based on the given type. If ``multiple`` is True, we accept
comma-separated values, and the option value is always a list.
For multi-value integers, we also accept the syntax ``x:y``, which
turns into ``range(x, y)`` - very useful for long integer ranges.
``help`` and ``metavar`` are used to construct the
automatically generated command line help string. The help
message is formatted like::
--name=METAVAR help string
``group`` is used to group the defined options in logical
groups. By default, command line options are grouped by the
file in which they are defined.
Command line option names must be unique globally. They can be parsed
from the command line with `parse_command_line` or parsed from a
config file with `parse_config_file`.
If a ``callback`` is given, it will be run with the new value whenever
the option is changed. This can be used to combine command-line
and file-based options::
define("config", type=str, help="path to config file",
callback=lambda path: parse_config_file(path, final=False))
With this definition, options in the file specified by ``--config`` will
override options set earlier on the command line, but can be overridden
by later flags.
"""
if name in self._options:
raise Error("Option %r already defined in %s" %
(name, self._options[name].file_name))
frame = sys._getframe(0)
options_file = frame.f_code.co_filename
# Can be called directly, or through top level define() fn, in which
# case, step up above that frame to look for real caller.
if (frame.f_back.f_code.co_filename == options_file and
frame.f_back.f_code.co_name == 'define'):
frame = frame.f_back
file_name = frame.f_back.f_code.co_filename
if file_name == options_file:
file_name = ""
if type is None:
if not multiple and default is not None:
type = default.__class__
else:
type = str
if group:
group_name = group
else:
group_name = file_name
normalized = self._normalize_name(name)
option = _Option(name, file_name=file_name,
default=default, type=type, help=help,
metavar=metavar, multiple=multiple,
group_name=group_name,
callback=callback)
self._options[normalized] = option
def parse_command_line(self, args=None, final=True):
"""Parses all options given on the command line (defaults to
`sys.argv`).
Note that ``args[0]`` is ignored since it is the program name
in `sys.argv`.
We return a list of all arguments that are not parsed as options.
If ``final`` is ``False``, parse callbacks will not be run.
This is useful for applications that wish to combine configurations
from multiple sources.
"""
if args is None:
args = sys.argv
remaining = []
for i in range(1, len(args)):
# All things after the last option are command line arguments
if not args[i].startswith("-"):
remaining = args[i:]
break
if args[i] == "--":
remaining = args[i + 1:]
break
arg = args[i].lstrip("-")
name, equals, value = arg.partition("=")
name = self._normalize_name(name)
if name not in self._options:
self.print_help()
raise Error('Unrecognized command line option: %r' % name)
option = self._options[name]
if not equals:
if option.type == bool:
value = "true"
else:
raise Error('Option %r requires a value' % name)
option.parse(value)
if final:
self.run_parse_callbacks()
return remaining
def parse_config_file(self, path, final=True):
"""Parses and loads the Python config file at the given path.
If ``final`` is ``False``, parse callbacks will not be run.
This is useful for applications that wish to combine configurations
from multiple sources.
.. versionchanged:: 4.1
Config files are now always interpreted as utf-8 instead of
the system default encoding.
"""
config = {}
with open(path, 'rb') as f:
exec_in(native_str(f.read()), config, config)
for name in config:
normalized = self._normalize_name(name)
if normalized in self._options:
self._options[normalized].set(config[name])
if final:
self.run_parse_callbacks()
def print_help(self, file=None):
"""Prints all the command line options to stderr (or another file)."""
if file is None:
file = sys.stderr
print("Usage: %s [OPTIONS]" % sys.argv[0], file=file)
print("\nOptions:\n", file=file)
by_group = {}
for option in self._options.values():
by_group.setdefault(option.group_name, []).append(option)
for filename, o in sorted(by_group.items()):
if filename:
print("\n%s options:\n" % os.path.normpath(filename), file=file)
o.sort(key=lambda option: option.name)
for option in o:
# Always print names with dashes in a CLI context.
prefix = self._normalize_name(option.name)
if option.metavar:
prefix += "=" + option.metavar
description = option.help or ""
if option.default is not None and option.default != '':
description += " (default %s)" % option.default
lines = textwrap.wrap(description, 79 - 35)
if len(prefix) > 30 or len(lines) == 0:
lines.insert(0, '')
print(" --%-30s %s" % (prefix, lines[0]), file=file)
for line in lines[1:]:
print("%-34s %s" % (' ', line), file=file)
print(file=file)
def _help_callback(self, value):
if value:
self.print_help()
sys.exit(0)
def add_parse_callback(self, callback):
"""Adds a parse callback, to be invoked when option parsing is done."""
self._parse_callbacks.append(stack_context.wrap(callback))
def run_parse_callbacks(self):
for callback in self._parse_callbacks:
callback()
def mockable(self):
"""Returns a wrapper around self that is compatible with
`mock.patch <unittest.mock.patch>`.
The `mock.patch <unittest.mock.patch>` function (included in
the standard library `unittest.mock` package since Python 3.3,
or in the third-party ``mock`` package for older versions of
Python) is incompatible with objects like ``options`` that
override ``__getattr__`` and ``__setattr__``. This function
returns an object that can be used with `mock.patch.object
<unittest.mock.patch.object>` to modify option values::
with mock.patch.object(options.mockable(), 'name', value):
assert options.name == value
"""
return _Mockable(self)
class _Mockable(object):
"""`mock.patch` compatible wrapper for `OptionParser`.
As of ``mock`` version 1.0.1, when an object uses ``__getattr__``
hooks instead of ``__dict__``, ``patch.__exit__`` tries to delete
the attribute it set instead of setting a new one (assuming that
the object does not catpure ``__setattr__``, so the patch
created a new attribute in ``__dict__``).
_Mockable's getattr and setattr pass through to the underlying
OptionParser, and delattr undoes the effect of a previous setattr.
"""
def __init__(self, options):
# Modify __dict__ directly to bypass __setattr__
self.__dict__['_options'] = options
self.__dict__['_originals'] = {}
def __getattr__(self, name):
return getattr(self._options, name)
def __setattr__(self, name, value):
assert name not in self._originals, "don't reuse mockable objects"
self._originals[name] = getattr(self._options, name)
setattr(self._options, name, value)
def __delattr__(self, name):
setattr(self._options, name, self._originals.pop(name))
class _Option(object):
UNSET = object()
def __init__(self, name, default=None, type=basestring_type, help=None,
metavar=None, multiple=False, file_name=None, group_name=None,
callback=None):
if default is None and multiple:
default = []
self.name = name
self.type = type
self.help = help
self.metavar = metavar
self.multiple = multiple
self.file_name = file_name
self.group_name = group_name
self.callback = callback
self.default = default
self._value = _Option.UNSET
def value(self):
return self.default if self._value is _Option.UNSET else self._value
def parse(self, value):
_parse = {
datetime.datetime: self._parse_datetime,
datetime.timedelta: self._parse_timedelta,
bool: self._parse_bool,
basestring_type: self._parse_string,
}.get(self.type, self.type)
if self.multiple:
self._value = []
for part in value.split(","):
if issubclass(self.type, numbers.Integral):
# allow ranges of the form X:Y (inclusive at both ends)
lo, _, hi = part.partition(":")
lo = _parse(lo)
hi = _parse(hi) if hi else lo
self._value.extend(range(lo, hi + 1))
else:
self._value.append(_parse(part))
else:
self._value = _parse(value)
if self.callback is not None:
self.callback(self._value)
return self.value()
def set(self, value):
if self.multiple:
if not isinstance(value, list):
raise Error("Option %r is required to be a list of %s" %
(self.name, self.type.__name__))
for item in value:
if item is not None and not isinstance(item, self.type):
raise Error("Option %r is required to be a list of %s" %
(self.name, self.type.__name__))
else:
if value is not None and not isinstance(value, self.type):
raise Error("Option %r is required to be a %s (%s given)" %
(self.name, self.type.__name__, type(value)))
self._value = value
if self.callback is not None:
self.callback(self._value)
# Supported date/time formats in our options
_DATETIME_FORMATS = [
"%a %b %d %H:%M:%S %Y",
"%Y-%m-%d %H:%M:%S",
"%Y-%m-%d %H:%M",
"%Y-%m-%dT%H:%M",
"%Y%m%d %H:%M:%S",
"%Y%m%d %H:%M",
"%Y-%m-%d",
"%Y%m%d",
"%H:%M:%S",
"%H:%M",
]
def _parse_datetime(self, value):
for format in self._DATETIME_FORMATS:
try:
return datetime.datetime.strptime(value, format)
except ValueError:
pass
raise Error('Unrecognized date/time format: %r' % value)
_TIMEDELTA_ABBREV_DICT = {
'h': 'hours',
'm': 'minutes',
'min': 'minutes',
's': 'seconds',
'sec': 'seconds',
'ms': 'milliseconds',
'us': 'microseconds',
'd': 'days',
'w': 'weeks',
}
_FLOAT_PATTERN = r'[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?'
_TIMEDELTA_PATTERN = re.compile(
r'\s*(%s)\s*(\w*)\s*' % _FLOAT_PATTERN, re.IGNORECASE)
def _parse_timedelta(self, value):
try:
sum = datetime.timedelta()
start = 0
while start < len(value):
m = self._TIMEDELTA_PATTERN.match(value, start)
if not m:
raise Exception()
num = float(m.group(1))
units = m.group(2) or 'seconds'
units = self._TIMEDELTA_ABBREV_DICT.get(units, units)
sum += datetime.timedelta(**{units: num})
start = m.end()
return sum
except Exception:
raise
def _parse_bool(self, value):
return value.lower() not in ("false", "0", "f")
def _parse_string(self, value):
return _unicode(value)
options = OptionParser()
"""Global options object.
All defined options are available as attributes on this object.
"""
def define(name, default=None, type=None, help=None, metavar=None,
multiple=False, group=None, callback=None):
"""Defines an option in the global namespace.
See `OptionParser.define`.
"""
return options.define(name, default=default, type=type, help=help,
metavar=metavar, multiple=multiple, group=group,
callback=callback)
def parse_command_line(args=None, final=True):
"""Parses global options from the command line.
See `OptionParser.parse_command_line`.
"""
return options.parse_command_line(args, final=final)
def parse_config_file(path, final=True):
"""Parses global options from a config file.
See `OptionParser.parse_config_file`.
"""
return options.parse_config_file(path, final=final)
def print_help(file=None):
"""Prints all the command line options to stderr (or another file).
See `OptionParser.print_help`.
"""
return options.print_help(file)
def add_parse_callback(callback):
"""Adds a parse callback, to be invoked when option parsing is done.
See `OptionParser.add_parse_callback`
"""
options.add_parse_callback(callback)
# Default options
define_logging_options(options)
| 35.692047 | 80 | 0.610126 |
f4c0187f5eb9f98c127b8aab39fcfbe3028dd27d | 2,770 | py | Python | position.py | jorenham/webcam-vr | 4f442b7ac1441cf794decdf243361d93b7315d68 | [
"Apache-2.0"
] | 1 | 2018-04-13T20:28:37.000Z | 2018-04-13T20:28:37.000Z | position.py | jorenham/webcam-vr | 4f442b7ac1441cf794decdf243361d93b7315d68 | [
"Apache-2.0"
] | null | null | null | position.py | jorenham/webcam-vr | 4f442b7ac1441cf794decdf243361d93b7315d68 | [
"Apache-2.0"
] | null | null | null | import numpy as np
from imutils.video import VideoStream
from imutils import face_utils
import argparse
import imutils
import time
import dlib
import cv2
SHAPE_PREDICTOR = 'shape_predictor_5_face_landmarks.dat'
def get_stream():
# initialize the video stream and allow the cammera sensor to warmup
print("[INFO] camera sensor warming up...")
vs = VideoStream().start()
time.sleep(2.0)
return vs
def get_frame(stream: VideoStream, width):
# grab the frame from the threaded video stream, resize it to
# have a maximum width
frame = stream.read()
return imutils.resize(frame, width=width)
def get_eye_position(frame):
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
dims = np.array([frame.shape[1], frame.shape[0]])
# detect faces in the grayscale frame
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor(SHAPE_PREDICTOR)
rects = detector(gray, 0)
# nothing detected
if not rects:
return None
# only use first detected first face
rect = rects[0]
# determine the facial landmarks for the face region, then
# convert the facial landmark (x, y)-coordinates to a NumPy
# array
shape = predictor(gray, rect)
shape = face_utils.shape_to_np(shape)
# use the mean of the first and second two points to find the pupillary
# distance in relative coords
pupils = np.vstack((
(shape[0, :] + shape[1, :]) / 2,
(shape[2, :] + shape[3, :]) / 2,
))
pupils /= dims
pupillary_distance = np.abs(np.diff(pupils))
# find x, y position of eye center
position = (pupils[0, :] + pupils[1, :]) / 2
# append z [0, 1] coordinate based on pd
position = np.append(position, pupillary_distance[1])
return position
def show_frame_with_position(position, frame):
# draw virtual position
position_pixels = (
int((1 - position[0]) * frame.shape[1]),
int((1 - position[1]) * frame.shape[0])
)
size = int((frame.shape[1] / 10) * position[2])
color = (0, 255, 0)
cv2.line(frame, (int(frame.shape[1]/2), int(frame.shape[0]/2)), position_pixels, color)
cv2.circle(frame, position_pixels, size, color, -1)
# show the frame
cv2.imshow("Frame", frame)
if __name__ == '__main__':
vs = get_stream()
try:
prev = None
while True:
f = get_frame(stream=vs, width=1200)
pos = get_eye_position(f)
# use previous value if no face is detected
if pos is None and prev is not None:
pos = prev
show_frame_with_position(pos, f)
except KeyboardInterrupt:
pass
finally:
# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop()
| 26.893204 | 91 | 0.638989 |
8db95063425054bf0e797ecd38120d2e4a65de73 | 1,433 | py | Python | tests/middleware/test_https_redirect.py | deliseev/starlette | 4fe6319211a8766b72d2a603f2ada3faba922914 | [
"BSD-3-Clause"
] | 6,974 | 2018-06-25T13:56:49.000Z | 2022-03-31T21:33:04.000Z | tests/middleware/test_https_redirect.py | deliseev/starlette | 4fe6319211a8766b72d2a603f2ada3faba922914 | [
"BSD-3-Clause"
] | 1,221 | 2018-06-25T15:31:07.000Z | 2022-03-31T09:14:59.000Z | tests/middleware/test_https_redirect.py | deliseev/starlette | 4fe6319211a8766b72d2a603f2ada3faba922914 | [
"BSD-3-Clause"
] | 810 | 2018-06-25T16:07:52.000Z | 2022-03-30T16:34:12.000Z | from starlette.applications import Starlette
from starlette.middleware.httpsredirect import HTTPSRedirectMiddleware
from starlette.responses import PlainTextResponse
def test_https_redirect_middleware(test_client_factory):
app = Starlette()
app.add_middleware(HTTPSRedirectMiddleware)
@app.route("/")
def homepage(request):
return PlainTextResponse("OK", status_code=200)
client = test_client_factory(app, base_url="https://testserver")
response = client.get("/")
assert response.status_code == 200
client = test_client_factory(app)
response = client.get("/", allow_redirects=False)
assert response.status_code == 307
assert response.headers["location"] == "https://testserver/"
client = test_client_factory(app, base_url="http://testserver:80")
response = client.get("/", allow_redirects=False)
assert response.status_code == 307
assert response.headers["location"] == "https://testserver/"
client = test_client_factory(app, base_url="http://testserver:443")
response = client.get("/", allow_redirects=False)
assert response.status_code == 307
assert response.headers["location"] == "https://testserver/"
client = test_client_factory(app, base_url="http://testserver:123")
response = client.get("/", allow_redirects=False)
assert response.status_code == 307
assert response.headers["location"] == "https://testserver:123/"
| 37.710526 | 71 | 0.72575 |
32c5f582df711ef69200a11c6f62889cd36e5d75 | 49,584 | py | Python | neutron/tests/unit/db/test_l3_dvr_db.py | knodir/neutron | ac4e28478ac8a8a0c9f5c5785f6a6bcf532c66b8 | [
"Apache-2.0"
] | null | null | null | neutron/tests/unit/db/test_l3_dvr_db.py | knodir/neutron | ac4e28478ac8a8a0c9f5c5785f6a6bcf532c66b8 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | neutron/tests/unit/db/test_l3_dvr_db.py | knodir/neutron | ac4e28478ac8a8a0c9f5c5785f6a6bcf532c66b8 | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z | # Copyright (c) 2014 OpenStack Foundation, all rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from neutron_lib.api.definitions import portbindings
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import constants as const
from neutron_lib import context
from neutron_lib import exceptions
from neutron_lib.exceptions import l3 as l3_exc
from neutron_lib.plugins import constants as plugin_constants
from neutron_lib.plugins import directory
from neutron_lib.plugins import utils as plugin_utils
from oslo_utils import uuidutils
from neutron.db import agents_db
from neutron.db import common_db_mixin
from neutron.db import l3_dvr_db
from neutron.db import l3_dvrscheduler_db
from neutron.db.models import l3 as l3_models
from neutron.db import models_v2
from neutron.objects import agent as agent_obj
from neutron.objects import l3agent as rb_obj
from neutron.objects import router as router_obj
from neutron.tests.unit.db import test_db_base_plugin_v2
from neutron.tests.unit.extensions import test_l3
_uuid = uuidutils.generate_uuid
class FakeL3Plugin(test_l3.TestL3PluginBaseAttributes,
common_db_mixin.CommonDbMixin,
l3_dvr_db.L3_NAT_with_dvr_db_mixin,
l3_dvrscheduler_db.L3_DVRsch_db_mixin,
agents_db.AgentDbMixin):
pass
class L3DvrTestCase(test_db_base_plugin_v2.NeutronDbPluginV2TestCase):
def setUp(self):
super(L3DvrTestCase, self).setUp(plugin='ml2')
self.core_plugin = directory.get_plugin()
self.ctx = context.get_admin_context()
self.mixin = FakeL3Plugin()
directory.add_plugin(plugin_constants.L3, self.mixin)
def _create_router(self, router):
with self.ctx.session.begin(subtransactions=True):
return self.mixin._create_router_db(self.ctx, router, 'foo_tenant')
def create_port(self, net_id, port_info):
with self.ctx.session.begin(subtransactions=True):
return self._create_port(self.fmt, net_id, **port_info)
def _test__create_router_db(self, expected=False, distributed=None):
router = {'name': 'foo_router', 'admin_state_up': True}
if distributed is not None:
router['distributed'] = distributed
result = self._create_router(router)
self.assertEqual(expected, result.extra_attributes['distributed'])
def test_create_router_db_default(self):
self._test__create_router_db(expected=False)
def test_create_router_db_centralized(self):
self._test__create_router_db(expected=False, distributed=False)
def test_create_router_db_distributed(self):
self._test__create_router_db(expected=True, distributed=True)
def test__validate_router_migration_on_router_update(self):
router = {
'name': 'foo_router',
'admin_state_up': True,
'distributed': True
}
router_db = self._create_router(router)
self.assertFalse(self.mixin._validate_router_migration(
self.ctx, router_db, {'name': 'foo_router_2'}))
def test__validate_router_migration_raise_error(self):
router = {
'name': 'foo_router',
'admin_state_up': True,
'distributed': True
}
router_db = self._create_router(router)
self.assertRaises(exceptions.BadRequest,
self.mixin._validate_router_migration,
self.ctx, router_db, {'distributed': False})
def test_upgrade_active_router_to_distributed_validation_failure(self):
router = {'name': 'foo_router', 'admin_state_up': True}
router_db = self._create_router(router)
update = {'distributed': True}
self.assertRaises(exceptions.BadRequest,
self.mixin._validate_router_migration,
self.ctx, router_db, update)
def test_update_router_db_centralized_to_distributed(self):
router = {'name': 'foo_router', 'admin_state_up': True}
agent = {'id': _uuid()}
distributed = {'distributed': True}
router_db = self._create_router(router)
router_id = router_db['id']
self.assertFalse(router_db.extra_attributes.distributed)
self.mixin._get_router = mock.Mock(return_value=router_db)
self.mixin._validate_router_migration = mock.Mock()
self.mixin._migrate_router_ports = mock.Mock()
self.mixin.list_l3_agents_hosting_router = mock.Mock(
return_value={'agents': [agent]})
self.mixin._unbind_router = mock.Mock()
router_db = self.mixin._update_router_db(
self.ctx, router_id, distributed)
# Assert that the DB value has changed
self.assertTrue(router_db.extra_attributes.distributed)
self.assertEqual(1,
self.mixin._migrate_router_ports.call_count)
def _test_get_device_owner(self, is_distributed=False,
expected=const.DEVICE_OWNER_ROUTER_INTF,
pass_router_id=True):
router = {
'name': 'foo_router',
'admin_state_up': True,
'distributed': is_distributed
}
router_db = self._create_router(router)
router_pass = router_db['id'] if pass_router_id else router_db
with mock.patch.object(self.mixin, '_get_router') as f:
f.return_value = router_db
result = self.mixin._get_device_owner(self.ctx, router_pass)
self.assertEqual(expected, result)
def test_get_device_owner_by_router_id(self):
self._test_get_device_owner()
def test__get_device_owner_centralized(self):
self._test_get_device_owner(pass_router_id=False)
def test__get_device_owner_distributed(self):
self._test_get_device_owner(
is_distributed=True,
expected=const.DEVICE_OWNER_DVR_INTERFACE,
pass_router_id=False)
def _test__is_distributed_router(self, router, expected):
result = l3_dvr_db.is_distributed_router(router)
self.assertEqual(expected, result)
def test__is_distributed_router_by_db_object(self):
router = {'name': 'foo_router', 'admin_state_up': True}
router_db = self._create_router(router)
self.mixin._get_device_owner(mock.ANY, router_db)
def test__is_distributed_router_default(self):
router = {'id': 'foo_router_id'}
self._test__is_distributed_router(router, False)
def test__is_distributed_router_centralized(self):
router = {'id': 'foo_router_id', 'distributed': False}
self._test__is_distributed_router(router, False)
def test__is_distributed_router_distributed(self):
router = {'id': 'foo_router_id', 'distributed': True}
self._test__is_distributed_router(router, True)
def test__get_agent_gw_ports_exist_for_network(self):
plugin = mock.Mock()
directory.add_plugin(plugin_constants.CORE, plugin)
plugin.get_ports.return_value = []
self.mixin._get_agent_gw_ports_exist_for_network(
self.ctx, 'network_id', 'host', 'agent_id')
plugin.get_ports.assert_called_with(self.ctx, {
'network_id': ['network_id'],
'device_id': ['agent_id'],
'device_owner': [const.DEVICE_OWNER_AGENT_GW]})
def _test_prepare_direct_delete_dvr_internal_ports(self, port):
plugin = mock.Mock()
directory.add_plugin(plugin_constants.CORE, plugin)
plugin.get_port.return_value = port
self.mixin._router_exists = mock.Mock(return_value=True)
self.assertRaises(exceptions.ServicePortInUse,
self.mixin.prevent_l3_port_deletion,
self.ctx,
port['id'])
def test_prevent_delete_floatingip_agent_gateway_port(self):
port = {
'id': 'my_port_id',
'fixed_ips': mock.ANY,
'device_id': 'r_id',
'device_owner': const.DEVICE_OWNER_AGENT_GW
}
self._test_prepare_direct_delete_dvr_internal_ports(port)
def test_prevent_delete_csnat_port(self):
port = {
'id': 'my_port_id',
'fixed_ips': mock.ANY,
'device_id': 'r_id',
'device_owner': const.DEVICE_OWNER_ROUTER_SNAT
}
self._test_prepare_direct_delete_dvr_internal_ports(port)
def test__create_gw_port_with_no_gateway(self):
router = {
'name': 'foo_router',
'admin_state_up': True,
'distributed': True,
}
router_db = self._create_router(router)
router_id = router_db['id']
self.assertTrue(router_db.extra_attributes.distributed)
with mock.patch.object(l3_dvr_db.l3_db.L3_NAT_db_mixin,
'_create_gw_port'),\
mock.patch.object(
self.mixin,
'_create_snat_intf_ports_if_not_exists') as cs:
self.mixin._create_gw_port(
self.ctx, router_id, router_db, mock.ANY,
mock.ANY)
self.assertFalse(cs.call_count)
def test_build_routers_list_with_gw_port_mismatch(self):
routers = [{'gw_port_id': 'foo_gw_port_id', 'id': 'foo_router_id'}]
gw_ports = {}
routers = self.mixin._build_routers_list(self.ctx, routers, gw_ports)
self.assertIsNone(routers[0].get('gw_port'))
def _helper_delete_floatingip_agent_gateway_port(self, port_host):
ports = [{
'id': 'my_port_id',
portbindings.HOST_ID: 'foo_host',
'network_id': 'ext_network_id',
'device_owner': const.DEVICE_OWNER_ROUTER_GW
},
{
'id': 'my_new_port_id',
portbindings.HOST_ID: 'my_foo_host',
'network_id': 'ext_network_id',
'device_owner': const.DEVICE_OWNER_ROUTER_GW
}]
plugin = mock.Mock()
directory.add_plugin(plugin_constants.CORE, plugin)
plugin.get_ports.return_value = ports
self.mixin.delete_floatingip_agent_gateway_port(
self.ctx, port_host, 'ext_network_id')
plugin.get_ports.assert_called_with(self.ctx, filters={
'network_id': ['ext_network_id'],
'device_owner': [const.DEVICE_OWNER_AGENT_GW]})
if port_host:
plugin.ipam.delete_port.assert_called_once_with(
self.ctx, 'my_port_id')
else:
plugin.ipam.delete_port.assert_called_with(
self.ctx, 'my_new_port_id')
def test_delete_floatingip_agent_gateway_port_without_host_id(self):
self._helper_delete_floatingip_agent_gateway_port(None)
def test_delete_floatingip_agent_gateway_port_with_host_id(self):
self._helper_delete_floatingip_agent_gateway_port(
'foo_host')
def _setup_delete_current_gw_port_deletes_dvr_internal_ports(
self, port=None, gw_port=True, new_network_id='ext_net_id_2'):
router_db = {
'name': 'foo_router',
'admin_state_up': True,
'distributed': True
}
router = self._create_router(router_db)
if gw_port:
with self.subnet(cidr='10.10.10.0/24') as subnet:
port_dict = {
'device_id': router.id,
'device_owner': const.DEVICE_OWNER_ROUTER_GW,
'admin_state_up': True,
'fixed_ips': [{'subnet_id': subnet['subnet']['id'],
'ip_address': '10.10.10.100'}]
}
net_id = subnet['subnet']['network_id']
port_res = self.create_port(net_id, port_dict)
port_res_dict = self.deserialize(self.fmt, port_res)
with self.ctx.session.begin(subtransactions=True):
port_db = self.ctx.session.query(models_v2.Port).filter_by(
id=port_res_dict['port']['id']).one()
router.gw_port = port_db
router_port = l3_models.RouterPort(
router_id=router.id,
port_id=port_db.id,
port_type=const.DEVICE_OWNER_ROUTER_GW
)
self.ctx.session.add(router)
self.ctx.session.add(router_port)
else:
net_id = None
plugin = mock.Mock()
directory.add_plugin(plugin_constants.CORE, plugin)
with mock.patch.object(l3_dvr_db.l3_db.L3_NAT_db_mixin,
'router_gw_port_has_floating_ips',
return_value=False),\
mock.patch.object(
self.mixin,
'_get_router') as grtr,\
mock.patch.object(
self.mixin,
'delete_csnat_router_interface_ports') as del_csnat_port,\
mock.patch.object(
self.mixin,
'delete_floatingip_agent_gateway_port') as del_agent_gw_port,\
mock.patch.object(
self.mixin.l3_rpc_notifier,
'delete_fipnamespace_for_ext_net') as del_fip:
plugin.get_ports.return_value = port
grtr.return_value = router
self.mixin._delete_current_gw_port(
self.ctx, router['id'], router, new_network_id)
return router, plugin, net_id, del_csnat_port,\
del_agent_gw_port, del_fip
def test_delete_current_gw_port_deletes_fip_agent_gw_port_and_fipnamespace(
self):
rtr, plugin, ext_net_id, d_csnat_port, d_agent_gw_port, del_fip = (
self._setup_delete_current_gw_port_deletes_dvr_internal_ports())
self.assertFalse(d_csnat_port.called)
self.assertTrue(d_agent_gw_port.called)
d_agent_gw_port.assert_called_once_with(mock.ANY, None, ext_net_id)
del_fip.assert_called_once_with(self.ctx, ext_net_id)
def test_delete_current_gw_port_never_calls_delete_fip_agent_gw_port(self):
port = [{
'id': 'my_port_id',
'network_id': 'ext_net_id',
'device_owner': const.DEVICE_OWNER_ROUTER_GW
},
{
'id': 'my_new_port_id',
'network_id': 'ext_net_id',
'device_owner': const.DEVICE_OWNER_ROUTER_GW
}]
rtr, plugin, ext_net_id, d_csnat_port, d_agent_gw_port, del_fip = (
self._setup_delete_current_gw_port_deletes_dvr_internal_ports(
port=port))
self.assertFalse(d_csnat_port.called)
self.assertFalse(d_agent_gw_port.called)
self.assertFalse(del_fip.called)
self.assertIsNotNone(ext_net_id)
def test_delete_current_gw_port_never_calls_delete_fipnamespace(self):
rtr, plugin, ext_net_id, d_csnat_port, d_agent_gw_port, del_fip = (
self._setup_delete_current_gw_port_deletes_dvr_internal_ports(
gw_port=False))
self.assertFalse(d_csnat_port.called)
self.assertFalse(d_agent_gw_port.called)
self.assertFalse(del_fip.called)
self.assertIsNone(ext_net_id)
def test_delete_current_gw_port_deletes_csnat_port(self):
rtr, plugin, ext_net_id, d_csnat_port, d_agent_gw_port, del_fip = (
self._setup_delete_current_gw_port_deletes_dvr_internal_ports(
new_network_id=None))
self.assertTrue(d_csnat_port.called)
self.assertTrue(d_agent_gw_port.called)
d_csnat_port.assert_called_once_with(mock.ANY, rtr)
d_agent_gw_port.assert_called_once_with(mock.ANY, None, ext_net_id)
del_fip.assert_called_once_with(mock.ANY, ext_net_id)
def _floatingip_on_port_test_setup(self, hostid):
router = {'id': 'foo_router_id', 'distributed': True}
floatingip = {
'id': _uuid(),
'port_id': _uuid(),
'router_id': 'foo_router_id',
}
if hostid is not None:
floatingip['host'] = hostid
else:
hostid = 'not_my_host_id'
routers = {
'foo_router_id': router
}
fipagent = {
'id': _uuid()
}
# NOTE: mock.patch is not needed here since self.mixin is created fresh
# for each test. It doesn't work with some methods since the mixin is
# tested in isolation (e.g. _get_agent_by_type_and_host).
self.mixin._get_dvr_service_port_hostid = mock.Mock(
return_value=hostid)
self.mixin._get_agent_by_type_and_host = mock.Mock(
return_value=fipagent)
self.mixin._get_fip_agent_gw_ports = mock.Mock(
return_value='fip_interface')
agent = mock.Mock()
agent.id = fipagent['id']
self.mixin._process_floating_ips_dvr(self.ctx, routers, [floatingip],
hostid, agent)
return (router, floatingip)
def test_floatingip_on_port_no_host_key(self):
router, fip = self._floatingip_on_port_test_setup(None)
self.assertNotIn(const.FLOATINGIP_KEY, router)
def test_floatingip_on_port_with_host(self):
router, fip = self._floatingip_on_port_test_setup(_uuid())
self.assertIn(const.FLOATINGIP_KEY, router)
self.assertIn(fip, router[const.FLOATINGIP_KEY])
def _setup_test_create_floatingip(
self, fip, floatingip_db, router_db):
port = {
'id': '1234',
portbindings.HOST_ID: 'myhost',
'network_id': 'external_net'
}
with mock.patch.object(self.mixin, 'get_router') as grtr,\
mock.patch.object(self.mixin,
'_get_dvr_service_port_hostid') as vmp,\
mock.patch.object(
self.mixin,
'_get_dvr_migrating_service_port_hostid'
) as mvmp,\
mock.patch.object(
self.mixin,
'create_fip_agent_gw_port_if_not_exists') as c_fip,\
mock.patch.object(l3_dvr_db.l3_db.L3_NAT_db_mixin,
'_update_fip_assoc'):
grtr.return_value = router_db
vmp.return_value = 'my-host'
mvmp.return_value = 'my-future-host'
registry.notify(resources.FLOATING_IP, events.AFTER_UPDATE, self,
context=mock.Mock(), router_id=router_db['id'],
fixed_port_id=port['id'], floating_ip_id=fip['id'],
floating_network_id=fip['floating_network_id'],
fixed_ip_address='1.2.3.4')
return c_fip
def test_create_floatingip_agent_gw_port_with_dvr_router(self):
floatingip = {
'id': _uuid(),
'router_id': 'foo_router_id'
}
router = {'id': 'foo_router_id', 'distributed': True}
fip = {
'id': _uuid(),
'floating_network_id': _uuid(),
'port_id': _uuid()
}
create_fip = (
self._setup_test_create_floatingip(
fip, floatingip, router))
self.assertTrue(create_fip.called)
def test_create_fip_agent_gw_port_if_not_exists_with_l3_agent(self):
fport_db = {'id': _uuid()}
self.mixin._get_agent_gw_ports_exist_for_network = mock.Mock(
return_value=fport_db)
fipagent = agent_obj.Agent(
self.ctx,
id=_uuid(),
binary='foo-agent',
host='host',
agent_type='L3 agent',
topic='foo_topic',
configurations={"agent_mode": "dvr_no_external"})
self.mixin._get_agent_by_type_and_host = mock.Mock(
return_value=fipagent)
fport = self.mixin.create_fip_agent_gw_port_if_not_exists(
self.ctx,
'network_id',
'host')
self.assertIsNone(fport)
fipagent = agent_obj.Agent(
self.ctx,
id=_uuid(),
binary='foo-agent',
host='host',
agent_type='L3 agent',
topic='foo_topic',
configurations={"agent_mode": "dvr"})
self.mixin._get_agent_by_type_and_host = mock.Mock(
return_value=fipagent)
fport = self.mixin.create_fip_agent_gw_port_if_not_exists(
self.ctx,
'network_id',
'host')
self.assertIsNotNone(fport)
def test_create_floatingip_agent_gw_port_with_non_dvr_router(self):
floatingip = {
'id': _uuid(),
'router_id': 'foo_router_id'
}
router = {'id': 'foo_router_id', 'distributed': False}
fip = {
'id': _uuid(),
'floating_network_id': _uuid(),
'port_id': _uuid()
}
create_fip = (
self._setup_test_create_floatingip(
fip, floatingip, router))
self.assertFalse(create_fip.called)
def test_update_router_gw_info_external_network_change(self):
router_dict = {'name': 'test_router', 'admin_state_up': True,
'distributed': True}
router = self._create_router(router_dict)
with self.network() as net_ext_1,\
self.network() as net_ext_2,\
self.subnet() as subnet:
ext_net_1_id = net_ext_1['network']['id']
self.core_plugin.update_network(
self.ctx, ext_net_1_id,
{'network': {'router:external': True}})
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'external_gateway_info':
{'network_id': ext_net_1_id}}})
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': subnet['subnet']['id']})
ext_net_2_id = net_ext_2['network']['id']
self.core_plugin.update_network(
self.ctx, ext_net_2_id,
{'network': {'router:external': True}})
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'external_gateway_info':
{'network_id': ext_net_2_id}}})
csnat_filters = {'device_owner': [const.DEVICE_OWNER_ROUTER_SNAT]}
csnat_ports = self.core_plugin.get_ports(
self.ctx, filters=csnat_filters)
self.assertEqual(1, len(csnat_ports))
def _test_csnat_ports_removal(self, ha=False):
router_dict = {'name': 'test_router', 'admin_state_up': True,
'distributed': True}
router = self._create_router(router_dict)
with self.network() as net_ext,\
self.subnet() as subnet:
ext_net_id = net_ext['network']['id']
self.core_plugin.update_network(
self.ctx, ext_net_id,
{'network': {'router:external': True}})
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'external_gateway_info':
{'network_id': ext_net_id}}})
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': subnet['subnet']['id']})
csnat_filters = {'device_owner':
[const.DEVICE_OWNER_ROUTER_SNAT]}
csnat_ports = self.core_plugin.get_ports(
self.ctx, filters=csnat_filters)
self.assertEqual(1, len(csnat_ports))
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'admin_state_up': False}})
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'distributed': False, 'ha': ha}})
csnat_ports = self.core_plugin.get_ports(
self.ctx, filters=csnat_filters)
self.assertEqual(0, len(csnat_ports))
def test_distributed_to_centralized_csnat_ports_removal(self):
self._test_csnat_ports_removal()
def test_distributed_to_ha_csnat_ports_removal(self):
self._test_csnat_ports_removal(ha=True)
def test_update_router_gw_info_csnat_ports_add(self):
router_dict = {'name': 'test_router',
'admin_state_up': True,
'distributed': True}
router = self._create_router(router_dict)
with self.network() as net_ext,\
self.network() as net_int,\
self.subnet(
network=net_int,
cidr='2001:db8:1::/64',
gateway_ip='2001:db8:1::1',
ip_version=const.IP_VERSION_6) as v6_subnet1,\
self.subnet(
network=net_int,
cidr='2001:db8:2::/64',
gateway_ip='2001:db8:2::1',
ip_version=const.IP_VERSION_6) as v6_subnet2,\
self.subnet(
network=net_int,
cidr='10.10.10.0/24') as v4_subnet:
self.core_plugin.update_network(
self.ctx, net_ext['network']['id'],
{'network': {'router:external': True}})
# Add router interface, then set router gateway
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': v6_subnet1['subnet']['id']})
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': v6_subnet2['subnet']['id']})
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': v4_subnet['subnet']['id']})
dvr_filters = {'device_owner':
[const.DEVICE_OWNER_DVR_INTERFACE]}
dvr_ports = self.core_plugin.get_ports(
self.ctx, filters=dvr_filters)
# One for IPv4, one for two IPv6 subnets
self.assertEqual(2, len(dvr_ports))
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'external_gateway_info':
{'network_id': net_ext['network']['id']}}})
csnat_filters = {'device_owner':
[const.DEVICE_OWNER_ROUTER_SNAT]}
csnat_ports = self.core_plugin.get_ports(
self.ctx, filters=csnat_filters)
# One for IPv4, one for two IPv6 subnets
self.assertEqual(2, len(csnat_ports))
# Remove v4 subnet interface from router
self.mixin.remove_router_interface(
self.ctx, router['id'],
{'subnet_id': v4_subnet['subnet']['id']})
dvr_ports = self.core_plugin.get_ports(
self.ctx, filters=dvr_filters)
self.assertEqual(1, len(dvr_ports))
csnat_ports = self.core_plugin.get_ports(
self.ctx, filters=csnat_filters)
self.assertEqual(1, len(csnat_ports))
self.assertEqual(2, len(csnat_ports[0]['fixed_ips']))
def _test_update_router_interface_port_ip_not_allowed(self, device_owner):
router, subnet_v4, subnet_v6 = self._setup_router_with_v4_and_v6()
device_filter = {'device_owner': [device_owner]}
ports = self.core_plugin.get_ports(self.ctx, filters=device_filter)
self.assertRaises(
exceptions.BadRequest,
self.core_plugin.update_port,
self.ctx, ports[0]['id'],
{'port': {'fixed_ips': [
{'ip_address': "20.0.0.100",
'subnet_id': subnet_v4['subnet']['id']},
{'ip_address': "20.0.0.101",
'subnet_id': subnet_v4['subnet']['id']}]}})
def test_update_router_centralized_snat_port_ip_not_allowed(self):
self._test_update_router_interface_port_ip_not_allowed(
const.DEVICE_OWNER_ROUTER_SNAT)
def test_update_router_interface_distributed_port_ip_not_allowed(self):
self._test_update_router_interface_port_ip_not_allowed(
const.DEVICE_OWNER_DVR_INTERFACE)
def test_remove_router_interface_csnat_ports_removal(self):
router_dict = {'name': 'test_router', 'admin_state_up': True,
'distributed': True}
router = self._create_router(router_dict)
with self.network() as net_ext,\
self.subnet() as subnet1,\
self.subnet(cidr='20.0.0.0/24') as subnet2:
ext_net_id = net_ext['network']['id']
self.core_plugin.update_network(
self.ctx, ext_net_id,
{'network': {'router:external': True}})
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'external_gateway_info':
{'network_id': ext_net_id}}})
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': subnet1['subnet']['id']})
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': subnet2['subnet']['id']})
csnat_filters = {'device_owner':
[const.DEVICE_OWNER_ROUTER_SNAT]}
csnat_ports = self.core_plugin.get_ports(
self.ctx, filters=csnat_filters)
self.assertEqual(2, len(csnat_ports))
dvr_filters = {'device_owner':
[const.DEVICE_OWNER_DVR_INTERFACE]}
dvr_ports = self.core_plugin.get_ports(
self.ctx, filters=dvr_filters)
self.assertEqual(2, len(dvr_ports))
self.mixin.remove_router_interface(
self.ctx, router['id'], {'port_id': dvr_ports[0]['id']})
csnat_ports = self.core_plugin.get_ports(
self.ctx, filters=csnat_filters)
self.assertEqual(1, len(csnat_ports))
self.assertEqual(dvr_ports[1]['fixed_ips'][0]['subnet_id'],
csnat_ports[0]['fixed_ips'][0]['subnet_id'])
dvr_ports = self.core_plugin.get_ports(
self.ctx, filters=dvr_filters)
self.assertEqual(1, len(dvr_ports))
def _setup_router_with_v4_and_v6(self):
router_dict = {'name': 'test_router', 'admin_state_up': True,
'distributed': True}
router = self._create_router(router_dict)
with self.network() as net_ext, self.network() as net_int:
ext_net_id = net_ext['network']['id']
self.core_plugin.update_network(
self.ctx, ext_net_id,
{'network': {'router:external': True}})
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'external_gateway_info':
{'network_id': ext_net_id}}})
with self.subnet(
network=net_int, cidr='20.0.0.0/24') as subnet_v4,\
self.subnet(network=net_int, cidr='fe80::/64',
gateway_ip='fe80::1', ip_version=const.IP_VERSION_6
) as subnet_v6:
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': subnet_v4['subnet']['id']})
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': subnet_v6['subnet']['id']})
return router, subnet_v4, subnet_v6
def test_undo_router_interface_change_on_csnat_error(self):
self._test_undo_router_interface_change_on_csnat_error(False)
def test_undo_router_interface_change_on_csnat_error_revert_failure(self):
self._test_undo_router_interface_change_on_csnat_error(True)
def _test_undo_router_interface_change_on_csnat_error(self, fail_revert):
router, subnet_v4, subnet_v6 = self._setup_router_with_v4_and_v6()
net = {'network': {'id': subnet_v6['subnet']['network_id'],
'tenant_id': subnet_v6['subnet']['tenant_id']}}
orig_update = self.mixin._core_plugin.update_port
def update_port(*args, **kwargs):
# 1st port update is the interface, 2nd is csnat, 3rd is revert
# we want to simulate errors after the 1st
update_port.calls += 1
if update_port.calls == 2:
raise RuntimeError('csnat update failure')
if update_port.calls == 3 and fail_revert:
# this is to ensure that if the revert fails, the original
# exception is raised (not this ValueError)
raise ValueError('failure from revert')
return orig_update(*args, **kwargs)
update_port.calls = 0
self.mixin._core_plugin.update_port = update_port
with self.subnet(network=net, cidr='fe81::/64',
gateway_ip='fe81::1', ip_version=const.IP_VERSION_6
) as subnet2_v6:
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': subnet2_v6['subnet']['id']})
if fail_revert:
# a revert failure will mean the interface is still added
# so we can't re-add it
return
# starting over should work if first interface was cleaned up
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': subnet2_v6['subnet']['id']})
def test_remove_router_interface_csnat_ports_removal_with_ipv6(self):
router, subnet_v4, subnet_v6 = self._setup_router_with_v4_and_v6()
csnat_filters = {'device_owner':
[const.DEVICE_OWNER_ROUTER_SNAT]}
csnat_ports = self.core_plugin.get_ports(
self.ctx, filters=csnat_filters)
self.assertEqual(2, len(csnat_ports))
dvr_filters = {'device_owner':
[const.DEVICE_OWNER_DVR_INTERFACE]}
dvr_ports = self.core_plugin.get_ports(
self.ctx, filters=dvr_filters)
self.assertEqual(2, len(dvr_ports))
self.mixin.remove_router_interface(
self.ctx, router['id'],
{'subnet_id': subnet_v4['subnet']['id']})
csnat_ports = self.core_plugin.get_ports(
self.ctx, filters=csnat_filters)
self.assertEqual(1, len(csnat_ports))
self.assertEqual(
subnet_v6['subnet']['id'],
csnat_ports[0]['fixed_ips'][0]['subnet_id'])
dvr_ports = self.core_plugin.get_ports(
self.ctx, filters=dvr_filters)
self.assertEqual(1, len(dvr_ports))
def test__validate_router_migration_notify_advanced_services(self):
router = {'name': 'foo_router', 'admin_state_up': False}
router_db = self._create_router(router)
with mock.patch.object(l3_dvr_db.registry, 'notify') as mock_notify:
self.mixin._validate_router_migration(
self.ctx, router_db, {'distributed': True})
kwargs = {'context': self.ctx, 'router': router_db}
mock_notify.assert_called_once_with(
'router', 'before_update', self.mixin, **kwargs)
def test_validate_add_router_interface_by_subnet_notify_advanced_services(
self):
router = {'name': 'foo_router', 'admin_state_up': False}
router_db = self._create_router(router)
with self.network() as net, \
self.subnet(network={'network': net['network']}) as sub, \
mock.patch.object(
self.mixin,
'_notify_attaching_interface') as mock_notify:
interface_info = {'subnet_id': sub['subnet']['id']}
self.mixin.add_router_interface(self.ctx, router_db.id,
interface_info)
mock_notify.assert_called_once_with(self.ctx, router_db=router_db,
port=mock.ANY,
interface_info=interface_info)
def test_validate_add_router_interface_by_port_notify_advanced_services(
self):
router = {'name': 'foo_router', 'admin_state_up': False}
router_db = self._create_router(router)
with self.network() as net, \
self.subnet(network={'network': net['network']}) as sub, \
self.port(subnet=sub) as port, \
mock.patch.object(
self.mixin,
'_notify_attaching_interface') as mock_notify:
interface_info = {'port_id': port['port']['id']}
self.mixin.add_router_interface(self.ctx, router_db.id,
interface_info)
mock_notify.assert_called_once_with(self.ctx, router_db=router_db,
port=mock.ANY,
interface_info=interface_info)
def test__generate_arp_table_and_notify_agent(self):
fixed_ip = {
'ip_address': '1.2.3.4',
'subnet_id': _uuid()}
mac_address = "00:11:22:33:44:55"
expected_arp_table = {
'ip_address': fixed_ip['ip_address'],
'subnet_id': fixed_ip['subnet_id'],
'mac_address': mac_address}
notifier = mock.Mock()
ports = [{'id': _uuid(), 'device_id': 'router_1'},
{'id': _uuid(), 'device_id': 'router_2'}]
with mock.patch.object(self.core_plugin, "get_ports",
return_value=ports):
self.mixin._generate_arp_table_and_notify_agent(
self.ctx, fixed_ip, mac_address, notifier)
notifier.assert_has_calls([
mock.call(self.ctx, "router_1", expected_arp_table),
mock.call(self.ctx, "router_2", expected_arp_table)])
def _test_update_arp_entry_for_dvr_service_port(
self, device_owner, action):
router_dict = {'name': 'test_router', 'admin_state_up': True,
'distributed': True}
router = self._create_router(router_dict)
plugin = mock.Mock()
directory.add_plugin(plugin_constants.CORE, plugin)
l3_notify = self.mixin.l3_rpc_notifier = mock.Mock()
port = {
'id': 'my_port_id',
'fixed_ips': [
{'subnet_id': '51edc9e0-24f9-47f2-8e1e-2a41cb691323',
'ip_address': '10.0.0.11'},
{'subnet_id': '2b7c8a07-6f8e-4937-8701-f1d5da1a807c',
'ip_address': '10.0.0.21'},
{'subnet_id': '48534187-f077-4e81-93ff-81ec4cc0ad3b',
'ip_address': 'fd45:1515:7e0:0:f816:3eff:fe1a:1111'}],
'mac_address': 'my_mac',
'device_owner': device_owner
}
dvr_port = {
'id': 'dvr_port_id',
'fixed_ips': mock.ANY,
'device_owner': const.DEVICE_OWNER_DVR_INTERFACE,
'device_id': router['id']
}
plugin.get_ports.return_value = [dvr_port]
if action == 'add':
self.mixin.update_arp_entry_for_dvr_service_port(
self.ctx, port)
self.assertEqual(3, l3_notify.add_arp_entry.call_count)
elif action == 'del':
self.mixin.delete_arp_entry_for_dvr_service_port(
self.ctx, port)
self.assertEqual(3, l3_notify.del_arp_entry.call_count)
def test_update_arp_entry_for_dvr_service_port_added(self):
action = 'add'
device_owner = const.DEVICE_OWNER_LOADBALANCER
self._test_update_arp_entry_for_dvr_service_port(device_owner, action)
def test_update_arp_entry_for_dvr_service_port_deleted(self):
action = 'del'
device_owner = const.DEVICE_OWNER_LOADBALANCER
self._test_update_arp_entry_for_dvr_service_port(device_owner, action)
def test_add_router_interface_csnat_ports_failure(self):
router_dict = {'name': 'test_router', 'admin_state_up': True,
'distributed': True}
router = self._create_router(router_dict)
with self.network() as net_ext,\
self.subnet() as subnet:
ext_net_id = net_ext['network']['id']
self.core_plugin.update_network(
self.ctx, ext_net_id,
{'network': {'router:external': True}})
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'external_gateway_info':
{'network_id': ext_net_id}}})
with mock.patch.object(
self.mixin, '_add_csnat_router_interface_port') as f:
f.side_effect = RuntimeError()
self.assertRaises(
l3_exc.RouterInterfaceAttachmentConflict,
self.mixin.add_router_interface,
self.ctx, router['id'],
{'subnet_id': subnet['subnet']['id']})
filters = {
'device_id': [router['id']],
}
router_ports = self.core_plugin.get_ports(self.ctx, filters)
self.assertEqual(1, len(router_ports))
self.assertEqual(const.DEVICE_OWNER_ROUTER_GW,
router_ports[0]['device_owner'])
def test_csnat_port_not_created_on_RouterPort_update_exception(self):
router_dict = {'name': 'test_router', 'admin_state_up': True,
'distributed': True}
router = self._create_router(router_dict)
with self.network() as net_ext,\
self.subnet() as subnet:
ext_net_id = net_ext['network']['id']
self.core_plugin.update_network(
self.ctx, ext_net_id,
{'network': {'router:external': True}})
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'external_gateway_info':
{'network_id': ext_net_id}}})
net_id = subnet['subnet']['network_id']
with mock.patch.object(
router_obj.RouterPort, 'create') as rtrport_update:
rtrport_update.side_effect = Exception()
self.assertRaises(
l3_exc.RouterInterfaceAttachmentConflict,
self.mixin.add_router_interface,
self.ctx, router['id'],
{'subnet_id': subnet['subnet']['id']})
filters = {
'network_id': [net_id],
'device_owner': [const.DEVICE_OWNER_ROUTER_SNAT]
}
router_ports = self.core_plugin.get_ports(self.ctx, filters)
self.assertEqual(0, len(router_ports))
def test_add_router_interface_by_port_failure(self):
router_dict = {'name': 'test_router',
'admin_state_up': True,
'distributed': True}
router = self._create_router(router_dict)
with self.subnet(cidr='10.10.10.0/24') as subnet:
port_dict = {
'device_id': '',
'device_owner': '',
'admin_state_up': True,
'fixed_ips': [{'subnet_id': subnet['subnet']['id'],
'ip_address': '10.10.10.100'}]
}
net_id = subnet['subnet']['network_id']
port_res = self.create_port(net_id, port_dict)
port = self.deserialize(self.fmt, port_res)
self.assertIn('port', port, message='Create port failed.')
orig_update_port = self.mixin._core_plugin.update_port
call_info = {'count': 0}
def _fake_update_port(*args, **kwargs):
call_info['count'] += 1
if call_info['count'] == 2:
raise RuntimeError()
else:
return orig_update_port(*args, **kwargs)
# NOTE(trananhkma): expect that update_port() only raises an error
# at the 2nd function call (Update owner after actual process
# again in order).
with mock.patch.object(self.mixin._core_plugin, 'update_port',
side_effect=_fake_update_port):
self.assertRaises(
RuntimeError,
self.mixin.add_router_interface,
self.ctx, router['id'], {'port_id': port['port']['id']})
# expire since we are re-using the session which might have stale
# ports in it
self.ctx.session.expire_all()
port_info = self.core_plugin.get_port(self.ctx, port['port']['id'])
self.assertEqual(port_dict['device_id'], port_info['device_id'])
self.assertEqual(port_dict['device_owner'],
port_info['device_owner'])
def test__get_sync_routers_check_gw_port_host(self):
router_dict = {'name': 'test_router', 'admin_state_up': True,
'distributed': True}
router = self._create_router(router_dict)
with self.network() as public,\
self.subnet() as subnet:
ext_net_1_id = public['network']['id']
self.core_plugin.update_network(
self.ctx, ext_net_1_id,
{'network': {'router:external': True}})
self.mixin.update_router(
self.ctx, router['id'],
{'router': {'external_gateway_info':
{'network_id': ext_net_1_id}}})
self.mixin.add_router_interface(self.ctx, router['id'],
{'subnet_id': subnet['subnet']['id']})
routers = self.mixin._get_sync_routers(self.ctx,
router_ids=[router['id']])
self.assertIsNone(routers[0]['gw_port_host'])
agent = mock.Mock()
agent.host = "fake-host"
bind = mock.Mock()
bind.l3_agent_id = "fake-id"
with mock.patch.object(
rb_obj.RouterL3AgentBinding, 'get_objects',
return_value=[bind]), mock.patch.object(
agent_obj.Agent, 'get_object',
return_value=agent):
routers = self.mixin._get_sync_routers(
self.ctx, router_ids=[router['id']])
self.assertEqual("fake-host", routers[0]['gw_port_host'])
def test_is_router_distributed(self):
router_id = 'router_id'
with mock.patch.object(self.mixin, 'get_router') as \
mock_get_router:
mock_get_router.return_value = {'distributed': True}
self.assertTrue(
self.mixin.is_router_distributed(self.ctx, router_id))
@mock.patch.object(plugin_utils, 'can_port_be_bound_to_virtual_bridge',
return_value=True)
def test__get_assoc_data_valid_vnic_type(self, *args):
with mock.patch.object(self.mixin, '_internal_fip_assoc_data') as \
mock_fip_assoc_data, \
mock.patch.object(self.mixin, '_get_router_for_floatingip') \
as mock_router_fip, \
mock.patch.object(self.mixin, 'is_router_distributed',
return_value=True):
port = {portbindings.VNIC_TYPE: portbindings.VNIC_NORMAL}
mock_fip_assoc_data.return_value = (port, 'subnet_id', 'ip_addr')
mock_router_fip.return_value = 'router_id'
fip = {'port_id': 'port_id'}
self.assertEqual(
('port_id', 'ip_addr', 'router_id'),
self.mixin._get_assoc_data(self.ctx, fip, mock.Mock()))
@mock.patch.object(plugin_utils, 'can_port_be_bound_to_virtual_bridge',
return_value=False)
def test__get_assoc_data_invalid_vnic_type(self, *args):
with mock.patch.object(self.mixin, '_internal_fip_assoc_data') as \
mock_fip_assoc_data, \
mock.patch.object(self.mixin, '_get_router_for_floatingip') \
as mock_router_fip, \
mock.patch.object(self.mixin, 'is_router_distributed',
return_value=True):
port = {portbindings.VNIC_TYPE: portbindings.VNIC_NORMAL}
mock_fip_assoc_data.return_value = (port, 'subnet_id', 'ip_addr')
mock_router_fip.return_value = 'router_id'
self.assertRaises(
exceptions.BadRequest,
self.mixin._get_assoc_data, self.ctx, mock.ANY, mock.Mock())
| 44.589928 | 79 | 0.58892 |
87cf3345eec16cea8cce3c58fb75d517ca7c520d | 5,574 | py | Python | contrib/seeds/makeseeds.py | Blockchain-Solutions-BCS/bsmcoin-core | 4c63fc8cc57caf23629cf5078719841225a04537 | [
"MIT"
] | null | null | null | contrib/seeds/makeseeds.py | Blockchain-Solutions-BCS/bsmcoin-core | 4c63fc8cc57caf23629cf5078719841225a04537 | [
"MIT"
] | null | null | null | contrib/seeds/makeseeds.py | Blockchain-Solutions-BCS/bsmcoin-core | 4c63fc8cc57caf23629cf5078719841225a04537 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 337600
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = set([
"130.211.129.106", "178.63.107.226",
"83.81.130.26", "88.198.17.7", "148.251.238.178", "176.9.46.6",
"54.173.72.127", "54.174.10.182", "54.183.64.54", "54.194.231.211",
"54.66.214.167", "54.66.220.137", "54.67.33.14", "54.77.251.214",
"54.94.195.96", "54.94.200.247"
])
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(\/Satoshi:0\.8\.6\/|\/Satoshi:0\.9\.(2|3|4|5)\/|\/Satoshi:0\.10\.\d{1,2}\/|\/Satoshi:0\.11\.\d{1,2}\/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in hist.items() if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
# Sift out ips by type
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bsmcoin ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print '[%s]:%i' % (ip['ip'], ip['port'])
else:
print '%s:%i' % (ip['ip'], ip['port'])
if __name__ == '__main__':
main()
| 32.788235 | 186 | 0.5601 |
73a5a8ae21b10ab20c1cbf5a95dd79df58b5df0d | 3,284 | py | Python | telegram_text2audio_bot.py | Makeroni/text2audiobot | 3667cbe8d50042eab9005cf72865808a52f7e9cc | [
"Apache-2.0"
] | null | null | null | telegram_text2audio_bot.py | Makeroni/text2audiobot | 3667cbe8d50042eab9005cf72865808a52f7e9cc | [
"Apache-2.0"
] | null | null | null | telegram_text2audio_bot.py | Makeroni/text2audiobot | 3667cbe8d50042eab9005cf72865808a52f7e9cc | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Install FFMpeg: apt-get install ffmpeg
# Install Telebot: git clone https://github.com/eternnoir/pyTelegramBotAPI.git
# Install Espeak apt-get install espeak
# Sustituir BASE_PATH por la ruta dodne se van a guardar los textos y los audio
# Crear una carpeta llamada "text" y otra "audio" en BASE_PATH para guardar los textos enviados y los audios generados
import telebot
import subprocess
import os
from datetime import datetime
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
TOKEN = open(os.path.realpath('..') + "/tokens_telegram_bots/token_text2audio_bot.txt", 'rb').read().rstrip('\n')
BASE_PATH = "/media/HDD2/telegram_bot"
def send_message(cid, text):
bot.send_message(cid, text)
def send_audio(cid, audio):
bot.send_audio(cid, audio)
def reply_to(message, text):
bot.reply_to(message, text)
def touch(fname):
if os.path.exists(fname):
os.utime(fname, None)
def current_time():
date = datetime.now()
final_date = date.strftime('%Y-%m-%d_%H:%M:%S')
return str(final_date)
def textToFile(text):
path_file = BASE_PATH + "/text/speech" + time + ".txt"
touch(path_file)
file = open(path_file, "w")
file.write(str(text) + '\n')
file.close()
return path_file
def convertAudio(input_file, output_file, lang):
language = "english-us"
if (str(lang) == "es"):
language = "spanish"
bashCommand = "/usr/bin/espeak -f " + input_file + " -v " + language + " --stdout | ffmpeg -i - -ar 44100 -ac 2 -ab 192k -f mp3 " + output_file
os.system(bashCommand)
def system_call(command):
p = subprocess.Popen([command], stdout=subprocess.PIPE, shell=True)
out = p.stdout.read()
out = out.replace("\n", "")
return out
time = current_time()
bot = telebot.TeleBot(TOKEN)
@bot.message_handler(commands=['start', 'help'])
def send_welcome(message):
reply_to(message, "This is the text to audio bot. Enter your message and I will send an audio with your message")
reply_to(message, "Use: /convert YOUR TEXT")
reply_to(message, "Spanish language Use: /convert YOUR TEXT es")
@bot.message_handler(commands=['convert'])
def command_convert(message):
cid = message.chat.id
try:
check_string = message.text.replace("/convert", "")
if str(check_string) == '':
send_message(cid, "Please type a text to convert to audio")
else:
language = check_string[-2:].lower()
final_text = check_string
if (language == "es" or language == "en"):
final_text = check_string[:-2]
file_path = textToFile(final_text)
random_string = system_call("/usr/bin/head /dev/urandom | tr -dc A-Za-z0-9 | /usr/bin/head -c 5 | xargs echo").lower()
path_file = BASE_PATH + "/audio/audio_" + time + "_" + random_string + ".mp3"
bot.send_message(cid, "Converting file, please wait...")
convertAudio(file_path, path_file, language)
audio = open(path_file, 'rb')
send_message(cid, "Sending file...")
send_audio(cid, audio)
send_message(cid, "File sended !! Thank you !!")
except ValueError:
send_message(cid, "Bot error, sorry...try again")
bot.polling(none_stop=True, interval=0)
| 35.311828 | 147 | 0.66352 |
e916d64944d2f5af541c91a8a3497e7605a1ff50 | 18,549 | py | Python | py/DREAM/Output/KineticQuantity.py | chalmersplasmatheory/DREAM | 715637ada94f5e35db16f23c2fd49bb7401f4a27 | [
"MIT"
] | 12 | 2020-09-07T11:19:10.000Z | 2022-02-17T17:40:19.000Z | py/DREAM/Output/KineticQuantity.py | chalmersplasmatheory/DREAM | 715637ada94f5e35db16f23c2fd49bb7401f4a27 | [
"MIT"
] | 110 | 2020-09-02T15:29:24.000Z | 2022-03-09T09:50:01.000Z | py/DREAM/Output/KineticQuantity.py | chalmersplasmatheory/DREAM | 715637ada94f5e35db16f23c2fd49bb7401f4a27 | [
"MIT"
] | 3 | 2021-05-21T13:24:31.000Z | 2022-02-11T14:43:12.000Z | # Base class for kinetic (radius + momentum + time) quantities
#
import matplotlib.animation as animation
import matplotlib.pyplot as plt
import numpy as np
import scipy
from matplotlib import animation
from . OutputException import OutputException
from . UnknownQuantity import UnknownQuantity
from .. import GeriMap
from .. Settings.MomentumGrid import TYPE_PXI, TYPE_PPARPPERP
class KineticQuantity(UnknownQuantity):
def __init__(self, name, data, grid, output, momentumgrid=None, attr=list()):
"""
Constructor.
"""
super(KineticQuantity, self).__init__(name=name, data=data, attr=attr, grid=grid, output=output)
self.momentumgrid = momentumgrid
# Cell or flux grid?
if momentumgrid.p1.size == data.shape[3]:
self.p1 = momentumgrid.p1
elif momentumgrid.p1_f.size == data.shape[3]:
self.p1 = momentumgrid.p1_f
else:
raise Exception("Unrecognized shape of data: {}. Expected (nt, nr, np2, np1) = ({}, {}, {}, {}).".format(data.shape, grid.t.size, grid.r.size, momentumgrid.p2.size, momentumgrid.p1.size))
if momentumgrid.p2.size == data.shape[2]:
self.p2 = momentumgrid.p2
elif momentumgrid.p2_f.size == data.shape[2]:
self.p2 = momentumgrid.p2_f
else:
raise Exception("Unrecognized shape of data: {}. Expected (nt, nr, np2, np1) = ({}, {}, {}, {}).".format(data.shape, grid.t.size, grid.r.size, momentumgrid.p2.size, momentumgrid.p1.size))
if grid.r.size == data.shape[1]:
self.radius = grid.r
elif grid.r_f.size == data.shape[1]:
self.radius = grid.r_f
else:
raise Exception("Unrecognized shape of data: {}. Expected (nt, nr, np2, np1) = ({}, {}, {}, {}).".format(data.shape, grid.t.size, grid.r.size, momentumgrid.p2.size, momentumgrid.p1.size))
self.time = self.grid.t
def __repr__(self):
"""
Convert this object to an "official" string.
"""
return self.__str__()
def __str__(self):
"""
Convert this object to a string.
"""
return '({}) Kinetic quantity of size NT x NR x NP2 x NP1 = {} x {} x {} x {}\n:: {}\n:: Evolved using: {}\n{}'.format(self.name, self.data.shape[0], self.data.shape[1], self.data.shape[2], self.data.shape[3], self.description, self.description_eqn, self.data)
def __getitem__(self, index):
"""
Direct access to data.
"""
return self.data[index]
def angleAveraged(self, t=None, r=None, moment='distribution'):
r"""
Returns the angle-averaged distribution function. Depending on
the input parameters, the whole or only some parts of the spatiotemporal
distribution can be angle-averaged.
This method can only be applied to distributions defined on p/xi
momentum grids.
Supported moments:
- ``distribution``: :math:`\left\langle f \right\rangle_{\xi_0}`
- ``density``: :math:`\left\langle V'f\right\rangle_{\xi_0}`
- ``current``: :math:`\left\langle v\xi_0 V' f\right\rangle_{\xi_0}`
- ...or a vector (or scalar) to weight the distribution function with.
where :math:`\left\langle X \right\rangle_{\xi_0} = \int_{-1}^1 X\,\mathrm{d}\xi_0`.
"""
if self.momentumgrid is None or self.momentumgrid.type != TYPE_PXI:
raise OutputException("The angle average can only be calculated on p/xi grids.")
if t is None: t = slice(None)
if r is None: r = slice(None)
data = self.data[t,r,:]
if type(moment) == str:
if moment == 'distribution':
# Divide by range of xi
data = data/2
elif moment == 'density':
data = data * self.momentumgrid.Vprime_VpVol[r,:]
elif moment == 'current':
vPar = self.momentumgrid.getBounceAveragedVpar()
data = data * vPar[r,:] * self.momentumgrid.Vprime_VpVol[r,:] * scipy.constants.e
elif type(moment) == float or type(moment) == np.ndarray:
data = data * moment * self.momentumgrid.Vprime_VpVol[r,:]
else:
raise OutputException("Invalid type of parameter 'moment'.")
favg = np.sum(data * self.momentumgrid.DP2[r,:], axis=data.ndim-2)
return favg
def animate(self, keep=[], r=0, ax=None, repeat=False, repeat_delay=None, speed=None, blit=True, moment='distribution', save=None, dpi=None, **kwargs):
"""
Creates an animation of the time evolution of the angle average of
this kinetic quantity.
:param list keep: List of time indices to keep in the plot.
:param r: Radius to plot angle average for.
:param ax: Axes object to use for plotting.
:param bool repeat: If ``True``, repeats animation after it is finished.
:param int repeat_delay: Number of milliseconds to wait before repeating animation.
:param int speed: Number of milliseconds to show each frame.
:param bool blit: If ``True``, use blitting to optimize drawing.
:param str moment: Moment of distribution function to plot (same values as for :py:meth:`DREAM.Output.KineticQuantity.KineticQuantity.angleAveraged`).
:param str save: If provided, saves the animation to the named file instead of showing it.
:param int dpi: Video resolution (if saving animation to file).
:param kwargs: Keyword arguments passed to ``ax.plot()``.
"""
show = ax is None
fig = None
if ax is None:
fig, ax = plt.subplots()
else:
fig = ax.figure
favg = self.angleAveraged(r=r, moment=moment)
def update_ani(num, kq, ax, lines, lbl, favg, keeplines, tfac, tunit, keep):
lbl.set_text(r't = {:.3f} {}'.format(kq.time[num]*tfac, tunit))
if keep is not None and num in keep:
idx = keep.index(num)
else:
idx = None
# Iterate over radii
n = len(lines)
for i in range(n):
if favg.ndim == 3: d = favg[num,i,:]
else: d = favg[num,:]
line.set_data(kq.p1, d)
# Keep line after time step has passed?
if idx is not None:
keeplines[idx*n+i].set_data(kq.p1, d)
return (line, lbl) + tuple(keeplines)
if speed is None:
speed = 50
# Determine number of radii to plot
if favg.ndim == 3:
nr = favg.shape[1]
elif favg.ndim == 2:
nr = 1
else:
raise OutputException("Invalid number of dimensions selected to animate.")
# Plot at t=0
colors = GeriMap.get(N=favg.ndim)
lines = []
for i in range(nr):
# Select data to plot
if favg.ndim == 3: d = favg[0,i,:]
else: d = favg[0,:]
if 'color' not in kwargs:
line, = ax.semilogy(self.p1, d, color=colors(i/(nr+1)), **kwargs)
else:
line, = ax.semilogy(self.p1, d, **kwargs)
lines.append(line)
# Create placeholders for the 'keep' lines
keeplines = []
if keep is not None:
for i in range(len(keep)):
for j in range(nr):
if 'color' not in kwargs:
l, = ax.plot([], [], linewidth=2, color=colors(j/(nr+1)), **kwargs)
else:
l, = ax.plot([], [], linewidth=2, **kwargs)
keeplines.append(l)
# Set x/y limits
fmax = np.amax(favg)
xmin, xmax = self.p1[0], self.p1[-1]
ymin, ymax = 1e-30*fmax, 10*fmax
ax.set_xlim([xmin, xmax])
ax.set_ylim([ymin ,ymax])
# Determine the relevant time scale
tmax = self.time[-1]
idx = 0
tfac = 1
tunits = ['s', 'ms', 'µs', 'ns', 'ps']
while tmax*tfac < 1 and idx < len(tunits)-1:
idx += 1
tfac = (1e3)**(idx)
xp, yp = 0.60, 0.93
lymin, lymax = np.log10(ymin), np.log10(ymax)
tx = xmin+xp*(xmax-xmin)
ty = lymin+yp*(lymax-lymin)
txt = ax.text(tx, 10**ty, r't = {:.3f} {}'.format(self.time[0]*tfac, tunits[idx]), usetex=False)
ax.set_xlabel(r'$r/a$ (m)')
# Create the animation
ani = animation.FuncAnimation(fig, update_ani, frames=self.time.size,
interval=speed, repeat_delay=repeat_delay, repeat=repeat, blit=blit,
fargs=(self, ax, lines, txt, favg, keeplines, tfac, tunits[idx], keep))
# Save animation?
if save:
writer = animation.FFMpegFileWriter(fps=1000/speed)
writer.setup(fig, save, dpi=dpi)
ani.save(save, writer=writer)
print("Done saving video to '{}'.".format(save))
if show:
plt.show()
return ani
def get(self, t=None, r=None, p2=None, p1=None):
"""
Returns data using the specified indexing. If an argument is ``None``,
this method will return all elements along that dimension.
"""
sel = [slice(None)] * 4
if t is not None: sel[0] = t
if r is not None: sel[1] = r
if p2 is not None: sel[2] = p2
if p1 is not None: sel[3] = p1
return self.data[tuple(sel)]
def moment(self, weight, t=None, r=None):
"""
Evaluate a moment of this distribution function with the given weighting
factor.
"""
if t is None:
t = range(len(self.time))
if r is None:
r = range(len(self.grid.r))
if np.isscalar(t):
t = np.asarray([t])
if np.isscalar(r):
r = np.asarray([r])
if np.ndim(weight) != 4:
_weight = np.ones((self.time.size,self.grid.r.size,self.momentumgrid.p2.size,self.momentumgrid.p1.size))
if np.ndim(weight) == 0:
weight = _weight*weight
if np.ndim(weight) == 1:
weight = _weight*weight[np.newaxis,np.newaxis,np.newaxis,:]
elif np.ndim(weight) == 2:
weight = _weight*weight[np.newaxis,np.newaxis,:]
elif np.ndim(weight) == 3:
weight = _weight*weight[np.newaxis,:]
q = []
for iT in range(len(t)):
qr = []
for iR in range(len(r)):
qr.append(self.momentumgrid.integrate2D(self.data[t[iT],r[iR],:] * weight[t[iT],r[iR],:])[0])
q.append(qr)
return np.asarray(q)
def plot(self, t=-1, r=0, ax=None, show=None, logarithmic=False, coordinates=None, **kwargs):
"""
Visualize this kinetic quantity at one time and radius using a filled
contour plot.
:param t: Time index to visualize quantity at.
:param r: Radial index to visualize quantity at.
:param ax: Matplotlib Axes object to draw plot on.
:param show: If ``True``, calls ``matplotlib.pyplot.show()`` with ``block=False`` after plotting the quantity.
:param logarithmic: If ``True``, plots the base-10 logarithm of the quantity.
:param coordinates: Determines which momentum coordinate system to use.
:param kwargs: Keyword arguments passed on to ``matplotlib.Axes.contourf()`` method.
"""
if self.momentumgrid is None:
raise OutputException("Unable to plot kinetic quantity as its momentum grid has not been specified.")
genax = ax is None
if genax:
ax = plt.axes()
if show is None:
show = True
data = None
if logarithmic:
data = np.log10(self.data[t,r,:])
else:
data = self.data[t,r,:]
if data.ndim != 2:
raise OutputException("Data dimensionality is too high. Unable to visualize kinetic quantity.")
if coordinates is None:
cp = ax.contourf(self.p1, self.p2, data, cmap='GeriMap', **kwargs)
ax.set_xlabel(self.momentumgrid.getP1TeXName())
ax.set_ylabel(self.momentumgrid.getP2TeXName())
# Accept 'spherical' or 'spherica' or 'spheric' or ... 's':
elif coordinates == 'spherical'[:len(coordinates)]:
cp = ax.contourf(self.momentumgrid.P, self.momentumgrid.XI, data, cmap='GeriMap', **kwargs)
ax.set_xlabel(r'$p$')
ax.set_ylabel(r'$\xi$')
elif coordinates == 'cylindrical'[:len(coordinates)]:
cp = ax.contourf(self.momentumgrid.PPAR, self.momentumgrid.PPERP, data, cmap='GeriMap', **kwargs)
ax.set_xlabel(r'$p_\parallel$')
ax.set_ylabel(r'$p_\perp$')
else:
raise OutputException("Unrecognized coordinate type: '{}'.".format(coordinates))
cb = None
if genax:
cb = plt.colorbar(mappable=cp, ax=ax)
if show:
plt.show(block=False)
return ax
def plotPolar(self, t=-1, r=0, ax=None, show=None, colorbar=True, displayGrid=False, logarithmic=False, thetaMin=0, thetaMax=np.pi, maxMinScale=True, **kwargs):
"""
Plot this kinetic quantity on a polar axis.
t: Time index to plot
ax: Matplotlib axes object to use for plotting.
show: If 'True', shows the plot immediately via a call to
'matplotlib.pyplot.show()' with 'block=False'. If
'None', this is interpreted as 'True' if 'ax' is
also 'None'.
colorbar: Specify wether or not to include a colorbar
displayGrid: Specify wether or not to display a polar grid in the plot
logarithmic: If 'True', plot logarithm of the data
thetaMin: Minimum pitch angle included in the plot
thetaMax: Maximum pitch angle included in the plot
maxMinScale: If 'True', set tha max and min of the color scale to the
maximum and minimum values of the data stored by this object
over all time steps at the radial grid point specified by r
RETURNS a matplotlib axis object and a colorbar object
(which may be 'None' if not used).
"""
if self.momentumgrid is None:
raise OutputException("Unable to plot kinetic quantity as its momentum grid has not been specified.")
# As we sometimes do not have very many xi-points, the field line parallel direction can
# look "empty" if we just plot for the xi-points in the center of the grid cells
# Therefore, we extend the pitch-angle grid to cover a whole round (with the first and
# last point at the same angle) and then extend the data accordingly, so that contourf
# interpolates over the field line parallel direction
xi=self.momentumgrid.XI
pitch_angle=np.concatenate((-np.arccos(xi),np.flip(np.arccos(xi))))
pitch_angle=np.concatenate([pitch_angle,pitch_angle[:1]])
p=self.momentumgrid.P
p=np.concatenate((p,p))
p=np.concatenate([p,p[:1]])
genax = ax is None
if genax:
ax = plt.subplot(polar=True)
ax.set_facecolor('k')
ax.set_ylim([p[0,0],p[0,-1]])
ax.set_xlim([thetaMin,thetaMax])
if not displayGrid:
ax.grid(None)
ax.set_yticklabels([])
ax.set_xticklabels([])
if show is None:
show = True
data = None
if logarithmic:
data = np.log10(self.data[t,r,:])
else:
data = self.data[t,r,:]
if data.ndim != 2:
raise OutputException("Data dimensionality is too high. Unable to visualize kinetic quantity.")
# Duplicate data in accordance with the extension of the pitch angle grid
data_plot=np.concatenate((data,np.flip(data,0)))
data_plot=np.concatenate((data_plot,data_plot[:1]))
if maxMinScale:
if logarithmic:
cp=ax.contourf(pitch_angle,p,data_plot,cmap='GeriMap',levels=np.linspace(np.log10(np.min(np.abs(self.data[:,r,:]))),np.log10(np.max(np.abs(self.data[:,r,:])))),**kwargs)
else:
cp=ax.contourf(pitch_angle,p,data_plot,cmap='GeriMap',levels=np.linspace(np.min(self.data[:,r,:]),np.max(self.data[:,r,:])),**kwargs)
else:
cp=ax.contourf(pitch_angle,p,data_plot,cmap='GeriMap',**kwargs)
cb = None
if colorbar:
cb = plt.colorbar(mappable=cp, ax=ax)
if show:
plt.show(block=False)
return ax, cb
def animatePolar(self, t=None, repeat=False, repeat_delay=None, speed=None, dpi=100, save=None,**kwargs):
"""
Make an animation of poloidal plots of the present quantity,
including the specified time steps.
:param slice t: time steps to include in the animation
:param bool repeat: If ``True``, repeats the animation.
:param int repeat_delay: Time between consecutive animation runs in milliseconds
:param int speed: delay between frames in milliseconds
:param int dpi: animation resolution
:param str save: title of the file (if any) into which the animation is saved
"""
fig, ax=plt.subplots(1,1)
if t is None:
t=range(len(self.grid.t))
ax,cb=self.plotPolar(show=False,t=0,**kwargs)
def update_ani(t, kq, ax):
ax.clear()
ax=kq.plotPolar(colorbar=False, show=False, t=t,**kwargs)
# Create the animation
ani = animation.FuncAnimation(fig, update_ani, frames=t,
repeat=repeat, repeat_delay=repeat_delay, interval=speed,
fargs=(self, ax))
if save:
# Make animation
writer = animation.FFMpegFileWriter(fps=fps)
writer.setup(fig, save, dpi=dpi)
ani.save(save, writer=writer)
print("Done saving video to '{}'.".format(save))
plt.show()
| 38.166667 | 268 | 0.564181 |
daf05ae792e49e7232a8bfb981d5ef79bf27f2ed | 905 | py | Python | problems/leetcode/lt-912.py | neerajp99/algorithms | 1d6885d2a895821ac511fa8a46913d34db2511ca | [
"MIT"
] | 1 | 2021-06-17T07:59:42.000Z | 2021-06-17T07:59:42.000Z | problems/leetcode/lt-912.py | neerajp99/algorithms | 1d6885d2a895821ac511fa8a46913d34db2511ca | [
"MIT"
] | null | null | null | problems/leetcode/lt-912.py | neerajp99/algorithms | 1d6885d2a895821ac511fa8a46913d34db2511ca | [
"MIT"
] | 1 | 2022-01-13T08:42:31.000Z | 2022-01-13T08:42:31.000Z | # 912. Sort an Array
"""
Given an array of integers nums, sort the array in ascending order.
Input: nums = [5,2,3,1]
Output: [1,2,3,5]
"""
# Using Merge Sort
class Solution:
def sortArray(self, nums: List[int]) -> List[int]:
def merge_sorts(unsortedList):
def merge_list(leftSideArray, rightSideArray):
sorted_array = []
while leftSideArray and rightSideArray:
sorted_array.append(
(leftSideArray if leftSideArray[0] <= rightSideArray[0] else rightSideArray).pop(0))
return sorted_array + leftSideArray + rightSideArray
if len(unsortedList) <= 1:
return unsortedList
midTerm = len(unsortedList) // 2
return merge_list(merge_sorts(unsortedList[:midTerm]), merge_sorts(unsortedList[midTerm:]))
return merge_sorts(nums) | 41.136364 | 108 | 0.607735 |
012c8cb8c097ee1af275eeaf94f8033b99810185 | 10,547 | py | Python | test/hooks/test_hooks.py | bronsen/schemathesis | bf60043af9f9271dddd7bb74a4fa55b94a2fa27e | [
"MIT"
] | null | null | null | test/hooks/test_hooks.py | bronsen/schemathesis | bf60043af9f9271dddd7bb74a4fa55b94a2fa27e | [
"MIT"
] | null | null | null | test/hooks/test_hooks.py | bronsen/schemathesis | bf60043af9f9271dddd7bb74a4fa55b94a2fa27e | [
"MIT"
] | null | null | null | import pytest
from hypothesis import HealthCheck, given, settings
import schemathesis
from schemathesis.hooks import HookContext, HookDispatcher, HookScope
@pytest.fixture(autouse=True)
def reset_hooks():
yield
schemathesis.hooks.unregister_all()
@pytest.fixture(params=["direct", "named"])
def global_hook(request):
if request.param == "direct":
@schemathesis.hooks.register
def before_generate_query(context, strategy):
return strategy.filter(lambda x: x["id"].isdigit())
if request.param == "named":
@schemathesis.hooks.register("before_generate_query")
def hook(context, strategy):
return strategy.filter(lambda x: x["id"].isdigit())
@pytest.fixture
def schema(flask_app):
return schemathesis.from_wsgi("/schema.yaml", flask_app)
@pytest.fixture()
def dispatcher():
return HookDispatcher(scope=HookScope.SCHEMA)
@pytest.mark.hypothesis_nested
@pytest.mark.operations("custom_format")
@pytest.mark.usefixtures("global_hook")
def test_global_query_hook(schema, schema_url):
strategy = schema["/custom_format"]["GET"].as_strategy()
@given(case=strategy)
@settings(max_examples=3)
def test(case):
assert case.query["id"].isdigit()
test()
@pytest.mark.hypothesis_nested
@pytest.mark.operations("payload")
def test_global_body_hook(schema):
@schemathesis.hooks.register
def before_generate_body(context, strategy):
return strategy.filter(lambda x: len(x["name"]) == 5)
strategy = schema["/payload"]["POST"].as_strategy()
@given(case=strategy)
@settings(max_examples=3, suppress_health_check=[HealthCheck.filter_too_much])
def test(case):
assert len(case.body["name"]) == 5
test()
@pytest.mark.hypothesis_nested
@pytest.mark.operations("create_user")
def test_case_hook(schema):
dispatcher = HookDispatcher(scope=HookScope.TEST)
@dispatcher.register
def before_generate_case(context, strategy):
def tune_case(case):
case.body["extra"] = 42
return case
return strategy.map(tune_case)
@schemathesis.hooks.register
def before_generate_case(context, strategy):
def tune_case(case):
case.body["first_name"] = case.body["last_name"]
return case
return strategy.map(tune_case)
strategy = schema["/users/"]["POST"].as_strategy(hooks=dispatcher)
@given(case=strategy)
@settings(max_examples=10, suppress_health_check=[HealthCheck.filter_too_much])
def test(case):
assert case.body["first_name"] == case.body["last_name"]
assert case.body["extra"] == 42
test()
@pytest.mark.hypothesis_nested
@pytest.mark.operations("custom_format")
def test_schema_query_hook(schema, schema_url):
@schema.hooks.register
def before_generate_query(context, strategy):
return strategy.filter(lambda x: x["id"].isdigit())
strategy = schema["/custom_format"]["GET"].as_strategy()
@given(case=strategy)
@settings(max_examples=3)
def test(case):
assert case.query["id"].isdigit()
test()
@pytest.mark.hypothesis_nested
@pytest.mark.usefixtures("global_hook")
@pytest.mark.operations("custom_format")
def test_hooks_combination(schema, schema_url):
@schema.hooks.register("before_generate_query")
def extra(context, st):
assert context.operation == schema["/custom_format"]["GET"]
return st.filter(lambda x: int(x["id"]) % 2 == 0)
strategy = schema["/custom_format"]["GET"].as_strategy()
@given(case=strategy)
@settings(max_examples=3)
def test(case):
assert case.query["id"].isdigit()
assert int(case.query["id"]) % 2 == 0
test()
def test_per_test_hooks(testdir, simple_openapi):
testdir.make_test(
"""
from hypothesis import strategies as st
def replacement(context, strategy):
return st.just({"id": "foobar"})
@schema.hooks.apply(replacement, name="before_generate_query")
@schema.parametrize()
@settings(max_examples=1)
def test_a(case):
assert case.query["id"] == "foobar"
@schema.parametrize()
@schema.hooks.apply(replacement, name="before_generate_query")
@settings(max_examples=1)
def test_b(case):
assert case.query["id"] == "foobar"
def another_replacement(context, strategy):
return st.just({"id": "foobaz"})
def before_generate_headers(context, strategy):
return st.just({"value": "spam"})
@schema.parametrize()
@schema.hooks.apply(another_replacement, name="before_generate_query") # Higher priority
@schema.hooks.apply(replacement, name="before_generate_query")
@schema.hooks.apply(before_generate_headers)
@settings(max_examples=1)
def test_c(case):
assert case.query["id"] == "foobaz"
assert case.headers["value"] == "spam"
@schema.parametrize()
@settings(max_examples=1)
def test_d(case):
assert case.query["id"] != "foobar"
""",
schema=simple_openapi,
)
result = testdir.runpytest()
result.assert_outcomes(passed=4)
def test_hooks_via_parametrize(testdir, simple_openapi):
testdir.make_test(
"""
@schema.hooks.register("before_generate_query")
def extra(context, st):
return st.filter(lambda x: x["id"].isdigit() and int(x["id"]) % 2 == 0)
@schema.parametrize()
@settings(max_examples=1)
def test(case):
assert case.operation.schema.hooks.get_all_by_name("before_generate_query")[0] is extra
assert int(case.query["id"]) % 2 == 0
""",
schema=simple_openapi,
)
result = testdir.runpytest()
result.assert_outcomes(passed=1)
def test_register_invalid_hook_name(dispatcher):
with pytest.raises(TypeError, match="There is no hook with name 'hook'"):
@dispatcher.register
def hook():
pass
def test_register_invalid_hook_spec(dispatcher):
with pytest.raises(TypeError, match="Hook 'before_generate_query' takes 2 arguments but 3 is defined"):
@dispatcher.register
def before_generate_query(a, b, c):
pass
def test_save_test_function(schema):
assert schema.test_function is None
@schema.parametrize()
def test(case):
pass
assert test._schemathesis_test.test_function is test
@pytest.mark.parametrize("apply_first", (True, False))
def test_local_dispatcher(schema, apply_first):
assert schema.hooks.scope == HookScope.SCHEMA
# When there are schema-level hooks
@schema.hooks.register("before_generate_query")
def schema_hook(context, strategy):
return strategy
# And per-test hooks are applied
def local_hook(context, strategy):
return strategy
# And order of decorators is any
apply = schema.hooks.apply(local_hook, name="before_generate_cookies")
parametrize = schema.parametrize()
if apply_first:
wrap = lambda x: parametrize(apply(x))
else:
wrap = lambda x: apply(parametrize(x))
@wrap
def test(case):
pass
# Then a hook dispatcher instance is attached to the test function
assert isinstance(test._schemathesis_hooks, HookDispatcher)
assert test._schemathesis_hooks.scope == HookScope.TEST
# And this dispatcher contains only local hooks
assert test._schemathesis_hooks.get_all_by_name("before_generate_cookies") == [local_hook]
assert test._schemathesis_hooks.get_all_by_name("before_generate_query") == []
# And the schema-level dispatcher still contains only schema-level hooks
assert test._schemathesis_test.hooks.get_all_by_name("before_generate_query") == [schema_hook]
assert test._schemathesis_test.hooks.get_all_by_name("before_generate_cookies") == []
@pytest.mark.hypothesis_nested
@pytest.mark.operations("custom_format")
def test_multiple_hooks_per_spec(schema):
@schema.hooks.register("before_generate_query")
def first_hook(context, strategy):
return strategy.filter(lambda x: x["id"].isdigit())
@schema.hooks.register("before_generate_query")
def second_hook(context, strategy):
return strategy.filter(lambda x: int(x["id"]) % 2 == 0)
assert schema.hooks.get_all_by_name("before_generate_query") == [first_hook, second_hook]
strategy = schema["/custom_format"]["GET"].as_strategy()
@given(case=strategy)
@settings(max_examples=3)
def test(case):
assert case.query["id"].isdigit()
assert int(case.query["id"]) % 2 == 0
test()
@pytest.mark.hypothesis_nested
@pytest.mark.operations("custom_format")
def test_before_process_path_hook(schema):
@schema.hooks.register
def before_process_path(context, path, methods):
methods["get"]["parameters"][0]["name"] = "foo"
methods["get"]["parameters"][0]["enum"] = ["bar"]
strategy = schema["/custom_format"]["GET"].as_strategy()
@given(case=strategy)
@settings(max_examples=3)
def test(case):
assert case.query == {"foo": "bar"}
test()
def test_register_wrong_scope(schema):
with pytest.raises(
ValueError,
match=r"Cannot register hook 'before_load_schema' on SCHEMA scope dispatcher. "
r"Use a dispatcher with GLOBAL scope\(s\) instead",
):
@schema.hooks.register
def before_load_schema(ctx, raw_schema):
pass
def test_before_add_examples(testdir, simple_openapi):
testdir.make_test(
"""
@schema.hooks.register
def before_add_examples(context, examples):
new = schemathesis.models.Case(
operation=context.operation,
query={"foo": "bar"}
)
examples.append(new)
@schema.parametrize()
@settings(phases=[Phase.explicit])
def test_a(case):
assert case.query == {"foo": "bar"}
def another_hook(context, examples):
new = schemathesis.models.Case(
operation=context.operation,
query={"spam": "baz"}
)
examples.append(new)
IDX = 0
@schema.parametrize()
@schema.hooks.apply(another_hook, name="before_add_examples")
@settings(phases=[Phase.explicit])
def test_b(case):
global IDX
if IDX == 0:
assert case.query == {"spam": "baz"}
if IDX == 1:
assert case.query == {"foo": "bar"}
IDX += 1
""",
schema=simple_openapi,
)
result = testdir.runpytest()
result.assert_outcomes(passed=2)
def test_deprecated_attribute():
context = HookContext(1)
with pytest.warns(None) as records:
assert context.endpoint == context.operation == 1
assert str(records[0].message) == (
"Property `endpoint` is deprecated and will be removed in Schemathesis 4.0. Use `operation` instead."
)
| 28.428571 | 109 | 0.690054 |
137e4ec8501c79515e4d19f2c30776ecf5c26293 | 1,044 | py | Python | packer.py | kyReid/Password-Cracker | 2cdc3e23d7bf6ddc924b791458f653d2d3d3c758 | [
"MIT"
] | null | null | null | packer.py | kyReid/Password-Cracker | 2cdc3e23d7bf6ddc924b791458f653d2d3d3c758 | [
"MIT"
] | null | null | null | packer.py | kyReid/Password-Cracker | 2cdc3e23d7bf6ddc924b791458f653d2d3d3c758 | [
"MIT"
] | null | null | null | import packer.bruteforce
import packer.dictionary
import packer.parse
args = packer.parse.get_args()
if args.dict:
if args.plock_file.lower().endswith('.zip'):
packer.dictionary.dictionary_zip(args.wordlist, args.plock_file)
if args.plock_file.lower().endswith('.pdf'):
packer.dictionary.dictionary_pdf(args.wordlist, args.plock_file)
# warning...this can take a long time when using Combo_type is above 12
elif args.brute:
if args.plock_file.lower().endswith('.zip'):
ascii_combo = packer.bruteforce.brute_force_combinations(
args.combo_type)
packer.bruteforce.brute_force_zip(
args.plock_file, args.password_length, ascii_combo)
if args.plock_file.lower().endswith('.pdf'):
ascii_combo = packer.bruteforce.brute_force_combinations(
args.combo_type)
packer.bruteforce.brute_force_pdf(
args.plock_file, args.password_length, ascii_combo)
else:
print("Flag Error: please input an attack flag '-b' or '-d'", "Red")
exit(-1)
| 38.666667 | 72 | 0.704023 |
41b14fe71949899b13ea65dfd2712ae05cc2752e | 345 | py | Python | solutions/1165_single_row_keyboard.py | YiqunPeng/leetcode_pro | 7e6376984f9baec49a5e827d98330fe3d1b656f0 | [
"MIT"
] | null | null | null | solutions/1165_single_row_keyboard.py | YiqunPeng/leetcode_pro | 7e6376984f9baec49a5e827d98330fe3d1b656f0 | [
"MIT"
] | null | null | null | solutions/1165_single_row_keyboard.py | YiqunPeng/leetcode_pro | 7e6376984f9baec49a5e827d98330fe3d1b656f0 | [
"MIT"
] | null | null | null | class Solution:
def calculateTime(self, keyboard: str, word: str) -> int:
"""Hash table.
Running time: O(n) where n is the length of word.
"""
d = {keyboard[i]:i for i in range(26)}
res = 0
p = 0
for w in word:
res += abs(d[w] - p)
p = d[w]
return res
| 24.642857 | 61 | 0.463768 |
670ef9258ef90deaf95d1c423887ea1166b8e654 | 1,205 | py | Python | src/keri/app/cli/commands/sign.py | pfeairheller/keripy | a765caf5f99a0b909ddcd5f0c67df4e14e2cde8f | [
"Apache-2.0"
] | 10 | 2021-06-09T16:15:32.000Z | 2022-03-28T22:14:11.000Z | src/keri/app/cli/commands/sign.py | pfeairheller/keripy | a765caf5f99a0b909ddcd5f0c67df4e14e2cde8f | [
"Apache-2.0"
] | 47 | 2021-06-17T20:00:02.000Z | 2022-03-31T20:20:44.000Z | src/keri/app/cli/commands/sign.py | pfeairheller/keripy | a765caf5f99a0b909ddcd5f0c67df4e14e2cde8f | [
"Apache-2.0"
] | 6 | 2021-06-10T11:24:25.000Z | 2022-01-28T08:07:43.000Z | # -*- encoding: utf-8 -*-
"""
KERI
keri.kli.commands module
"""
import argparse
from keri import kering
from keri.app import habbing
parser = argparse.ArgumentParser(description='Sign an arbitrary string')
parser.set_defaults(handler=lambda args: sign(args))
parser.add_argument('--name', '-n', help='Human readable reference', required=True)
parser.add_argument('--text', '-t', help='Text or file (starts with "@") to sign', required=True)
def sign(args):
name = args.name
try:
with habbing.existingHab(name=name) as hab:
txt = args.text
if txt.startswith("@"):
f = open(txt[1:], "r")
data = f.read()
else:
data = txt
sigers = hab.mgr.sign(ser=data.encode("utf-8"),
verfers=hab.kever.verfers,
indexed=True)
for idx, siger in enumerate(sigers):
print("{}. {}".format(idx+1, siger.qb64))
except kering.ConfigurationError:
print(f"prefix for {name} does not exist, incept must be run first", )
except FileNotFoundError:
print("unable to open file", args.text[1:])
| 28.023256 | 97 | 0.575104 |
473aafca6ff0d9127f70d2fa5765158e0d915024 | 91 | py | Python | src/villages/apps.py | pwelzel/bornhack-website | af794e6a2fba06e09626259c7768feb30ff394be | [
"BSD-3-Clause"
] | null | null | null | src/villages/apps.py | pwelzel/bornhack-website | af794e6a2fba06e09626259c7768feb30ff394be | [
"BSD-3-Clause"
] | null | null | null | src/villages/apps.py | pwelzel/bornhack-website | af794e6a2fba06e09626259c7768feb30ff394be | [
"BSD-3-Clause"
] | null | null | null | from django.apps import AppConfig
class VillagesConfig(AppConfig):
name = 'villages'
| 15.166667 | 33 | 0.758242 |
3e6452b76c59b02a983f7e38a04d42ff6e9c3e5d | 1,069 | py | Python | liminal/runners/airflow/dag/__init__.py | ZionCervello/incubator-liminal | 6be6e6ac8216267b4aeb18cace43a019d2005003 | [
"Apache-2.0"
] | 107 | 2020-07-27T07:26:27.000Z | 2022-03-30T14:29:34.000Z | liminal/runners/airflow/dag/__init__.py | ZionCervello/incubator-liminal | 6be6e6ac8216267b4aeb18cace43a019d2005003 | [
"Apache-2.0"
] | 29 | 2020-10-20T13:13:18.000Z | 2022-01-20T09:25:26.000Z | liminal/runners/airflow/dag/__init__.py | ZionCervello/incubator-liminal | 6be6e6ac8216267b4aeb18cace43a019d2005003 | [
"Apache-2.0"
] | 38 | 2020-07-26T17:07:57.000Z | 2022-03-07T21:56:45.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
from liminal.core import environment as env
from liminal.runners.airflow.dag import liminal_register_dags
BASE_PATH = os.path.join(env.get_airflow_home_dir(), env.DEFAULT_PIPELINES_SUBDIR)
def register_dags():
return liminal_register_dags.register_dags(BASE_PATH)
| 38.178571 | 82 | 0.786717 |
8ece34409021689ec1b4c671613859e6a37bf6c3 | 5,069 | py | Python | scripts/devops_tasks/install_python_version.py | praveenkuttappan/azure-sdk-for-python | 4b79413667b7539750a6c7dde15737013a3d4bd5 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | scripts/devops_tasks/install_python_version.py | v-xuto/azure-sdk-for-python | 9c6296d22094c5ede410bc83749e8df8694ccacc | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | scripts/devops_tasks/install_python_version.py | v-xuto/azure-sdk-for-python | 9c6296d22094c5ede410bc83749e8df8694ccacc | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | import platform
import json
import argparse
import urllib
import urllib.request
from subprocess import check_call, CalledProcessError
import sys
import os
import zipfile
import tarfile
import time
from packaging.version import Version
from packaging.version import parse
from packaging.version import InvalidVersion
# SOURCE OF THIS FILE: https://github.com/actions/python-versions
# this is the official mapping file for gh-actions to retrieve python installers
MANIFEST_LOCATION = "https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json"
MAX_INSTALLER_RETRY = 3
CURRENT_UBUNTU_VERSION = "20.04" # full title is ubuntu-20.04
MAX_PRECACHED_VERSION = "3.9.0"
UNIX_INSTALL_ARRAY = ["sh", "setup.sh"]
WIN_INSTALL_ARRAY = ["pwsh", "setup.ps1"]
def download_installer(remote_path, local_path):
retries = 0
while True:
try:
urllib.request.urlretrieve(remote_path, local_path)
break
except Exception as e:
print(e)
retries += 1
if retries >= MAX_INSTALLER_RETRY:
print(
"Unable to recover after attempting to download {} {} times".format(
remote_path, retries
)
)
exit(1)
time.sleep(10)
def install_selected_python_version(installer_url, installer_folder):
current_plat = platform.system().lower()
installer_folder = os.path.normpath(os.path.abspath(installer_folder))
if not os.path.exists(installer_folder):
os.mkdir(installer_folder)
local_installer_ref = os.path.join(
installer_folder,
"local" + (".zip" if installer_folder.endswith("zip") else ".tar.gz"),
)
download_installer(installer_url, local_installer_ref)
if current_plat == "windows":
with zipfile.ZipFile(local_installer_ref, "r") as zip_file:
zip_file.extractall(installer_folder)
try:
check_call(WIN_INSTALL_ARRAY, cwd=installer_folder)
except CalledProcessError as err:
print(err)
exit(1)
else:
with tarfile.open(local_installer_ref) as tar_file:
tar_file.extractall(installer_folder)
try:
check_call(UNIX_INSTALL_ARRAY, cwd=installer_folder)
except CalledProcessError as err:
print(err)
exit(1)
def get_installer_url(requested_version, version_manifest):
current_plat = platform.system().lower()
print("Current Platform Is {}".format(platform.platform()))
if version_manifest[requested_version]:
found_installers = version_manifest[requested_version]["files"]
# filter anything that's not x64. we don't care.
x64_installers = [
file_def for file_def in found_installers if file_def["arch"] == "x64"
]
if current_plat == "windows":
return [
installer
for installer in x64_installers
if installer["platform"] == "win32"
][0]
elif current_plat == "darwin":
return [
installer
for installer in x64_installers
if installer["platform"] == current_plat
][0]
else:
return [
installer
for installer in x64_installers
if installer["platform"] == "linux"
and installer["platform_version"] == CURRENT_UBUNTU_VERSION
][0]
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="This python script ensures that a requested python version is present in the hostedtoolcache on azure devops agents. It does this by retrieving new versions of python from the gh-actions python manifest."
)
parser.add_argument(
"version_spec",
nargs="?",
help=("The version specifier passed in to the UsePythonVersion extended task."),
)
parser.add_argument(
"--installer_folder",
dest="installer_folder",
help=(
"The folder where the found installer will be extracted into and run from."
),
)
args = parser.parse_args()
max_precached_version = Version(MAX_PRECACHED_VERSION)
try:
version_from_spec = Version(args.version_spec)
except InvalidVersion:
print("Invalid Version Spec. Skipping custom install.")
exit(0)
with urllib.request.urlopen(MANIFEST_LOCATION) as url:
version_manifest = json.load(url)
version_dict = {i["version"]: i for i in version_manifest}
if version_from_spec > max_precached_version:
print(
"Requested version {} is newer than versions pre-cached on agent. Invoking.".format(
args.version_spec
)
)
install_file_details = get_installer_url(args.version_spec, version_dict)
install_selected_python_version(
install_file_details["download_url"], args.installer_folder
)
| 32.082278 | 225 | 0.641152 |
0c2fcc40de4ed66c94578835f1c5fc5cce57edbc | 531 | py | Python | app1/models/roles.py | loitd/myflask | 276db36c698c440dc4f1af42ca34961294234ab3 | [
"Apache-2.0"
] | null | null | null | app1/models/roles.py | loitd/myflask | 276db36c698c440dc4f1af42ca34961294234ab3 | [
"Apache-2.0"
] | 4 | 2021-02-08T20:55:21.000Z | 2022-01-06T22:51:00.000Z | app1/models/roles.py | loitd/myflask | 276db36c698c440dc4f1af42ca34961294234ab3 | [
"Apache-2.0"
] | null | null | null | from datetime import datetime
from sqlalchemy import Sequence, text
from sqlalchemy.orm import sessionmaker
from app1 import db, loginmgr
class Role(db.Model):
__tablename__ = 'tbl_roles'
__bind_key__ = None # No Bind Key -> default db
id = db.Column(db.Integer, Sequence('role_id_seq'), primary_key=True, autoincrement=True)
role = db.Column(db.String(255), unique=True, nullable=False)
description = db.Column(db.String(500), nullable=True)
def __repr__(self):
return '<ROLE %r>' % self.role | 37.928571 | 93 | 0.717514 |
0bb56ab1daff26f4340ad6feb01e65c269bd8374 | 74 | py | Python | examples/3-python/hello.py | skipperkongen/hello-docker | 177993cfbcba1c32b3dd6fd4a0468005b70f3865 | [
"Apache-2.0"
] | null | null | null | examples/3-python/hello.py | skipperkongen/hello-docker | 177993cfbcba1c32b3dd6fd4a0468005b70f3865 | [
"Apache-2.0"
] | null | null | null | examples/3-python/hello.py | skipperkongen/hello-docker | 177993cfbcba1c32b3dd6fd4a0468005b70f3865 | [
"Apache-2.0"
] | null | null | null | from time import sleep
while True:
print('Hello world')
sleep(1) | 14.8 | 24 | 0.662162 |
30993eb79dea40176653280c9ea4718afb4ba319 | 9,191 | py | Python | platform/server/detect.py | funexpected/godot | c9a4e5cdbdf5e38d0c9bcc36abeebbb09fdd8398 | [
"MIT",
"Apache-2.0",
"CC-BY-4.0",
"Unlicense"
] | 2 | 2019-10-23T11:27:37.000Z | 2021-12-06T12:02:31.000Z | platform/server/detect.py | funexpected/godot | c9a4e5cdbdf5e38d0c9bcc36abeebbb09fdd8398 | [
"MIT",
"Apache-2.0",
"CC-BY-4.0",
"Unlicense"
] | 8 | 2020-03-03T13:35:53.000Z | 2021-08-19T10:57:47.000Z | platform/server/detect.py | funexpected/godot | c9a4e5cdbdf5e38d0c9bcc36abeebbb09fdd8398 | [
"MIT",
"Apache-2.0",
"CC-BY-4.0",
"Unlicense"
] | 2 | 2021-04-08T18:14:27.000Z | 2022-02-28T10:52:34.000Z | import os
import platform
import sys
# This file is mostly based on platform/x11/detect.py.
# If editing this file, make sure to apply relevant changes here too.
def is_active():
return True
def get_name():
return "Server"
def get_program_suffix():
if sys.platform == "darwin":
return "osx"
return "x11"
def can_build():
if os.name != "posix":
return False
return True
def get_opts():
from SCons.Variables import BoolVariable, EnumVariable
return [
BoolVariable("use_llvm", "Use the LLVM compiler", False),
BoolVariable("use_static_cpp", "Link libgcc and libstdc++ statically for better portability", True),
BoolVariable("use_ubsan", "Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)", False),
BoolVariable("use_asan", "Use LLVM/GCC compiler address sanitizer (ASAN))", False),
BoolVariable("use_lsan", "Use LLVM/GCC compiler leak sanitizer (LSAN))", False),
BoolVariable("use_tsan", "Use LLVM/GCC compiler thread sanitizer (TSAN))", False),
BoolVariable("debug_symbols", "Add debugging symbols to release/release_debug builds", True),
BoolVariable("use_msan", "Use LLVM/GCC compiler memory sanitizer (MSAN))", False),
BoolVariable("separate_debug_symbols", "Create a separate file containing debugging symbols", False),
BoolVariable("execinfo", "Use libexecinfo on systems where glibc is not available", False),
]
def get_flags():
return []
def configure(env):
## Build type
if env["target"] == "release":
if env["optimize"] == "speed": # optimize for speed (default)
env.Prepend(CCFLAGS=["-O3"])
elif env["optimize"] == "size": # optimize for size
env.Prepend(CCFLAGS=["-Os"])
if env["debug_symbols"]:
env.Prepend(CCFLAGS=["-g2"])
elif env["target"] == "release_debug":
if env["optimize"] == "speed": # optimize for speed (default)
env.Prepend(CCFLAGS=["-O2"])
elif env["optimize"] == "size": # optimize for size
env.Prepend(CCFLAGS=["-Os"])
env.Prepend(CPPDEFINES=["DEBUG_ENABLED"])
if env["debug_symbols"]:
env.Prepend(CCFLAGS=["-g2"])
elif env["target"] == "debug":
env.Prepend(CCFLAGS=["-g3"])
env.Prepend(CPPDEFINES=["DEBUG_ENABLED"])
env.Append(LINKFLAGS=["-rdynamic"])
## Architecture
is64 = sys.maxsize > 2 ** 32
if env["bits"] == "default":
env["bits"] = "64" if is64 else "32"
## Compiler configuration
if "CXX" in env and "clang" in os.path.basename(env["CXX"]):
# Convenience check to enforce the use_llvm overrides when CXX is clang(++)
env["use_llvm"] = True
if env["use_llvm"]:
if "clang++" not in os.path.basename(env["CXX"]):
env["CC"] = "clang"
env["CXX"] = "clang++"
env.extra_suffix = ".llvm" + env.extra_suffix
env.Append(LIBS=["atomic"])
if env["use_ubsan"] or env["use_asan"] or env["use_lsan"] or env["use_tsan"] or env["use_msan"]:
env.extra_suffix += "s"
if env["use_ubsan"]:
env.Append(CCFLAGS=["-fsanitize=undefined"])
env.Append(LINKFLAGS=["-fsanitize=undefined"])
if env["use_asan"]:
env.Append(CCFLAGS=["-fsanitize=address"])
env.Append(LINKFLAGS=["-fsanitize=address"])
if env["use_lsan"]:
env.Append(CCFLAGS=["-fsanitize=leak"])
env.Append(LINKFLAGS=["-fsanitize=leak"])
if env["use_tsan"]:
env.Append(CCFLAGS=["-fsanitize=thread"])
env.Append(LINKFLAGS=["-fsanitize=thread"])
if env["use_msan"]:
env.Append(CCFLAGS=["-fsanitize=memory"])
env.Append(LINKFLAGS=["-fsanitize=memory"])
if env["use_lto"]:
env.Append(CCFLAGS=["-flto"])
if not env["use_llvm"] and env.GetOption("num_jobs") > 1:
env.Append(LINKFLAGS=["-flto=" + str(env.GetOption("num_jobs"))])
else:
env.Append(LINKFLAGS=["-flto"])
if not env["use_llvm"]:
env["RANLIB"] = "gcc-ranlib"
env["AR"] = "gcc-ar"
env.Append(CCFLAGS=["-pipe"])
env.Append(LINKFLAGS=["-pipe"])
## Dependencies
# FIXME: Check for existence of the libs before parsing their flags with pkg-config
# freetype depends on libpng and zlib, so bundling one of them while keeping others
# as shared libraries leads to weird issues
if env["builtin_freetype"] or env["builtin_libpng"] or env["builtin_zlib"]:
env["builtin_freetype"] = True
env["builtin_libpng"] = True
env["builtin_zlib"] = True
if not env["builtin_freetype"]:
env.ParseConfig("pkg-config freetype2 --cflags --libs")
if not env["builtin_libpng"]:
env.ParseConfig("pkg-config libpng16 --cflags --libs")
if not env["builtin_bullet"]:
# We need at least version 2.89
import subprocess
bullet_version = subprocess.check_output(["pkg-config", "bullet", "--modversion"]).strip()
if str(bullet_version) < "2.89":
# Abort as system bullet was requested but too old
print(
"Bullet: System version {0} does not match minimal requirements ({1}). Aborting.".format(
bullet_version, "2.89"
)
)
sys.exit(255)
env.ParseConfig("pkg-config bullet --cflags --libs")
if False: # not env['builtin_assimp']:
# FIXME: Add min version check
env.ParseConfig("pkg-config assimp --cflags --libs")
if not env["builtin_enet"]:
env.ParseConfig("pkg-config libenet --cflags --libs")
if not env["builtin_squish"]:
env.ParseConfig("pkg-config libsquish --cflags --libs")
if not env["builtin_zstd"]:
env.ParseConfig("pkg-config libzstd --cflags --libs")
# Sound and video libraries
# Keep the order as it triggers chained dependencies (ogg needed by others, etc.)
if not env["builtin_libtheora"]:
env["builtin_libogg"] = False # Needed to link against system libtheora
env["builtin_libvorbis"] = False # Needed to link against system libtheora
env.ParseConfig("pkg-config theora theoradec --cflags --libs")
else:
list_of_x86 = ["x86_64", "x86", "i386", "i586"]
if any(platform.machine() in s for s in list_of_x86):
env["x86_libtheora_opt_gcc"] = True
if not env["builtin_libvpx"]:
env.ParseConfig("pkg-config vpx --cflags --libs")
if not env["builtin_libvorbis"]:
env["builtin_libogg"] = False # Needed to link against system libvorbis
env.ParseConfig("pkg-config vorbis vorbisfile --cflags --libs")
if not env["builtin_opus"]:
env["builtin_libogg"] = False # Needed to link against system opus
env.ParseConfig("pkg-config opus opusfile --cflags --libs")
if not env["builtin_libogg"]:
env.ParseConfig("pkg-config ogg --cflags --libs")
if not env["builtin_libwebp"]:
env.ParseConfig("pkg-config libwebp --cflags --libs")
if not env["builtin_mbedtls"]:
# mbedTLS does not provide a pkgconfig config yet. See https://github.com/ARMmbed/mbedtls/issues/228
env.Append(LIBS=["mbedtls", "mbedcrypto", "mbedx509"])
if not env["builtin_wslay"]:
env.ParseConfig("pkg-config libwslay --cflags --libs")
if not env["builtin_miniupnpc"]:
# No pkgconfig file so far, hardcode default paths.
env.Prepend(CPPPATH=["/usr/include/miniupnpc"])
env.Append(LIBS=["miniupnpc"])
# On Linux wchar_t should be 32-bits
# 16-bit library shouldn't be required due to compiler optimisations
if not env["builtin_pcre2"]:
env.ParseConfig("pkg-config libpcre2-32 --cflags --libs")
# Embree is only compatible with x86_64. Yet another unreliable hack that will break
# cross-compilation, this will really need to be handle better. Thankfully only affects
# people who disable builtin_embree (likely distro packagers).
if env["tools"] and not env["builtin_embree"] and (is64 and platform.machine() == "x86_64"):
# No pkgconfig file so far, hardcode expected lib name.
env.Append(LIBS=["embree3"])
## Flags
# Linkflags below this line should typically stay the last ones
if not env["builtin_zlib"]:
env.ParseConfig("pkg-config zlib --cflags --libs")
env.Prepend(CPPPATH=["#platform/server"])
env.Append(CPPDEFINES=["SERVER_ENABLED", "UNIX_ENABLED"])
if platform.system() == "Darwin":
env.Append(LINKFLAGS=["-framework", "Cocoa", "-framework", "Carbon", "-lz", "-framework", "IOKit"])
env.Append(LIBS=["pthread"])
if platform.system() == "Linux":
env.Append(LIBS=["dl"])
if platform.system().find("BSD") >= 0:
env["execinfo"] = True
if env["execinfo"]:
env.Append(LIBS=["execinfo"])
# Link those statically for portability (not supported on OSX)
if sys.platform != "darwin" and env["use_static_cpp"]:
env.Append(LINKFLAGS=["-static-libgcc", "-static-libstdc++"])
| 35.624031 | 109 | 0.621151 |
38a0c50df09fff43d9639f961e473f5798d7cb5a | 6,176 | py | Python | reciprocalspaceship/dataseries.py | kmdalton/reciprocalspaceship | 50655f077cb670ee86e88480f54621780c8e9f0d | [
"MIT"
] | null | null | null | reciprocalspaceship/dataseries.py | kmdalton/reciprocalspaceship | 50655f077cb670ee86e88480f54621780c8e9f0d | [
"MIT"
] | 3 | 2021-08-23T19:21:03.000Z | 2021-08-23T21:11:14.000Z | reciprocalspaceship/dataseries.py | JBGreisman/reciprocalspaceship | cf936cca64c5c387ace505416a047318efa9375f | [
"MIT"
] | null | null | null | import pandas as pd
import reciprocalspaceship as rs
class DataSeries(pd.Series):
"""
One-dimensional ndarray with axis labels, representing a slice
of a DataSet. DataSeries objects inherit methods from ``pd.Series``,
and as such have support for statistical methods that automatically
exclude missing data (represented as NaN).
Operations between DataSeries align values on their associated index
values so they do not need to have the same length.
For more information on the attributes and methods available with
DataSeries objects, see the `Pandas documentation
<https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.Series.html>`_.
Parameters
----------
data : array-like, Iterable, dict, or scalar value
data to be stored in DataSeries.
index : array-like or Index
Values must be hashable and have the same length as `data`.
Non-unique index values are allowed. Will default to
RangeIndex (0, 1, 2, ..., n) if not provided. If a dict is provided
as `data` and a `index` is given, `index` will override the keys
found in the dict.
dtype : str, numpy.dtype, or ExtensionDtype, optional
Data type for the DataSeries.
name : str, optional
The name to give to the DataSeries.
copy : bool, default False
Copy input data.
"""
@property
def _constructor(self):
return DataSeries
@property
def _constructor_expanddim(self):
return rs.DataSet
def to_friedel_dtype(self):
"""
Convert dtype of DataSeries to the Friedel equivalent. If there
is not a Friedel equivalent dtype, the DataSeries is returned
unchanged.
Returns
-------
DataSeries
See Also
--------
DataSeries.from_friedel_dtype : Convert dtype of DataSeries from the Friedel equivalent.
Examples
--------
DataSeries has a Friedel equivalent:
>>> s = rs.DataSeries([1, 2, 3], dtype="Intensity")
>>> s
0 1.0
1 2.0
2 3.0
dtype: Intensity
>>> s.to_friedel_dtype()
0 1.0
1 2.0
2 3.0
dtype: FriedelIntensity
DataSeries does not have a Friedel equivalent:
>>> s = rs.DataSeries([1, 2, 3], dtype="HKL")
>>> s
0 1
1 2
2 3
dtype: HKL
>>> s.to_friedel_dtype()
0 1
1 2
2 3
dtype: HKL
"""
if isinstance(self.dtype, rs.StructureFactorAmplitudeDtype):
return self.astype(rs.FriedelStructureFactorAmplitudeDtype())
elif isinstance(self.dtype, rs.IntensityDtype):
return self.astype(rs.FriedelIntensityDtype())
elif (isinstance(self.dtype, rs.StandardDeviationDtype) and
"SIGF" in self.name.upper()):
return self.astype(rs.StandardDeviationFriedelSFDtype())
elif (isinstance(self.dtype, rs.StandardDeviationDtype) and
"SIGI" in self.name.upper()):
return self.astype(rs.StandardDeviationFriedelIDtype())
return self
def from_friedel_dtype(self):
"""
Convert dtype of DataSeries from the Friedel equivalent. If
DataSeries is not a Friedel-related dtype, it is returned
unchanged.
Returns
-------
DataSeries
See Also
--------
DataSeries.to_friedel_dtype : Convert dtype of DataSeries to the Friedel equivalent
Examples
--------
DataSeries has a Friedel equivalent:
>>> s = rs.DataSeries([1, 2, 3], dtype="FriedelIntensity")
>>> s
0 1.0
1 2.0
2 3.0
dtype: FriedelIntensity
>>> s.from_friedel_dtype()
0 1.0
1 2.0
2 3.0
dtype: Intensity
DataSeries does not have a Friedel equivalent:
>>> s = rs.DataSeries([1, 2, 3], dtype="HKL")
>>> s
0 1
1 2
2 3
dtype: HKL
>>> s.from_friedel_dtype()
0 1
1 2
2 3
dtype: HKL
"""
if isinstance(self.dtype, rs.FriedelStructureFactorAmplitudeDtype):
return self.astype(rs.StructureFactorAmplitudeDtype())
elif isinstance(self.dtype, rs.FriedelIntensityDtype):
return self.astype(rs.IntensityDtype())
elif (isinstance(self.dtype, rs.StandardDeviationFriedelIDtype) or
isinstance(self.dtype, rs.StandardDeviationFriedelSFDtype)):
return self.astype(rs.StandardDeviationDtype())
return self
def infer_mtz_dtype(self):
"""
Infer MTZ dtype from column name and underlying data.
If name does not match a common MTZ column, the method will return
an MTZInt or MTZReal depending on whether the data is composed of
integers or floats, respectively. If the data is non-numeric,
the returned dtype will be unchanged. If input dataseries is
already a MTZDtype, it will be returned unchanged.
Returns
-------
DataSeries
See Also
--------
DataSet.infer_mtz_dtypes : Infer MTZ dtypes for columns in DataSet
Examples
--------
Common intensity column name:
>>> s = rs.DataSeries([1, 2, 3], name="I")
>>> s.infer_mtz_dtype()
0 1.0
1 2.0
2 3.0
Name: I, dtype: Intensity
Common intensity column name for anomalous data:
>>> s = rs.DataSeries([1, 2, 3], name="I(+)")
>>> s.infer_mtz_dtype()
0 1.0
1 2.0
2 3.0
Name: I(+), dtype: FriedelIntensity
Ambiguous name case:
>>> s = rs.DataSeries([1, 2, 3], name="Something")
>>> s.infer_mtz_dtype()
0 1
1 2
2 3
Name: Something, dtype: MTZInt
"""
from reciprocalspaceship.dtypes.inference import infer_mtz_dtype
return infer_mtz_dtype(self)
| 30.423645 | 96 | 0.579339 |
3c655c50d249566db0801f2c4b450699cea5865c | 4,403 | py | Python | chunckedparse.py | zstewar1/ALOLoader | ff60adba860734e6ede08f2d5e22b25cd36b094b | [
"MIT"
] | null | null | null | chunckedparse.py | zstewar1/ALOLoader | ff60adba860734e6ede08f2d5e22b25cd36b094b | [
"MIT"
] | null | null | null | chunckedparse.py | zstewar1/ALOLoader | ff60adba860734e6ede08f2d5e22b25cd36b094b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import aloobj
import argparse
import collections
import json
import pprint
import struct
import sys
def load_format(file):
return {int(k): v for k,v in json.load(file).items()}
def parse_chunked(format, buf):
chunk_data = collections.defaultdict(list)
while buf:
chunk_id, size = unpack('<Ii', buf)
sub_chunks = size < 0
# Clear the sign bit (used to indicate if a chunk contains sub-chunks)
size &= 0x7fffffff
chunk_type = format.get(chunk_id)
if chunk_type:
if sub_chunks:
chunk_data[chunk_type['name']].append(parse_chunked(format, buf[:size]))
else:
chunk_data[chunk_type['name']].append(parse_chunk(
chunk_type, buf[:size], chunk_data))
del buf[:size]
return chunk_data
def unpack(format, buf):
"""Both unpack and delete. Convert single-element tuples to their element"""
result = struct.unpack_from(format, buf)
if len(result) == 1:
result = result[0]
del buf[:struct.calcsize(format)]
return result
def unpack_asciiz(buf):
l = buf.find(b'\x00')
if l < 0:
# should not happen (famous last words), but if it does, interpret the whole
# bytearray as a string and delete all of its contents (by setting the end
# character to past the end
l = len(buf)
result = buf[:l].decode(encoding='ascii')
del buf[:l+1]
return result
def parse_chunk(format, buf, parent):
result = {}
content = format['content']
for c in content:
name = c.get('name')
t = c['type']
if c.get('head'):
del buf[:2]
if name is None:
del buf[:struct.calcsize(t)]
continue
ct = c.get('count')
if isinstance(ct, dict):
# always take the first element of the given chunk type.
ct = parent[ct['chunk_name']][0][ct['property']] * ct.get('scale', 1)
if ct is None:
if t == 'asciiz':
result[name] = unpack_asciiz(buf)
elif t == 'struct':
result[name] = parse_chunk(c, buf, parent)
else:
result[name] = unpack(t, buf)
elif ct == 'max':
result[name] = []
while buf:
if t == 'asciiz':
result[name].append(unpack_asciiz(buf))
elif t == 'struct':
result[name].append(parse_chunk(c, buf, parent))
else:
result[name].append(unpack(t, buf))
else:
result[name] = []
for _ in range(ct):
if t == 'asciiz':
result[name].append(unpack_asciiz(buff))
elif t == 'struct':
result[name].append(parse_chunk(c, buf, parent))
else:
result[name].append(unpack(t, buf))
return result
def main(args):
with args.json_file as json_file, args.chunked_file as chunked_file,\
args.output_file as output_file:
format = load_format(json_file)
buf = bytearray(chunked_file.read())
parse_result = parse_chunked(format, buf)
if args.output_format == 'dict':
print(parse_result, file=args.output_file)
elif args.output_format == 'json':
json.dump(parse_result, output_file)
print(file=args.output_file)
elif args.output_format == 'obj':
aloobj.dump(parse_result, output_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Load chunked files based on a json descripton')
parser.add_argument(
'json_file', type=argparse.FileType('r'),
help='The json file which describes the chunked format to be used')
parser.add_argument(
'chunked_file', type=argparse.FileType('rb'),
help='The chunked file to be read using the specified format')
parser.add_argument(
'--output-format', '-f', type=str, choices=('dict', 'json', 'obj'),
default='dict', help='The output format of the resulting data')
parser.add_argument(
'--output-file', '-o', type=argparse.FileType('w'), default=sys.stdout,
help='where to store the output of the operation (default: stdout)')
main(parser.parse_args())
| 31.676259 | 88 | 0.574608 |
e4f9cf5ffe3becb4ec6f34b66a5d9867b47256f1 | 391 | py | Python | WebMirror/management/rss_parser_funcs/feed_parse_extractPankajtranslationWordpressCom.py | fake-name/ReadableWebProxy | ed5c7abe38706acc2684a1e6cd80242a03c5f010 | [
"BSD-3-Clause"
] | 193 | 2016-08-02T22:04:35.000Z | 2022-03-09T20:45:41.000Z | WebMirror/management/rss_parser_funcs/feed_parse_extractPankajtranslationWordpressCom.py | fake-name/ReadableWebProxy | ed5c7abe38706acc2684a1e6cd80242a03c5f010 | [
"BSD-3-Clause"
] | 533 | 2016-08-23T20:48:23.000Z | 2022-03-28T15:55:13.000Z | WebMirror/management/rss_parser_funcs/feed_parse_extractPankajtranslationWordpressCom.py | rrosajp/ReadableWebProxy | ed5c7abe38706acc2684a1e6cd80242a03c5f010 | [
"BSD-3-Clause"
] | 19 | 2015-08-13T18:01:08.000Z | 2021-07-12T17:13:09.000Z |
def extractPankajtranslationWordpressCom(item):
'''
Parser for 'pankajtranslation.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
if "WATTT" in item['tags']:
return buildReleaseMessageWithType(item, "WATTT", vol, chp, frag=frag, postfix=postfix)
return False
| 26.066667 | 89 | 0.7289 |
97d82ea697363cb8eaa6f850e72458c2a1140940 | 3,858 | py | Python | commands/preupgrade/__init__.py | brianjmurrell/leapp-repository | 0060a1c0f0502e33ed7b0bb14506d7274304b647 | [
"Apache-2.0"
] | null | null | null | commands/preupgrade/__init__.py | brianjmurrell/leapp-repository | 0060a1c0f0502e33ed7b0bb14506d7274304b647 | [
"Apache-2.0"
] | null | null | null | commands/preupgrade/__init__.py | brianjmurrell/leapp-repository | 0060a1c0f0502e33ed7b0bb14506d7274304b647 | [
"Apache-2.0"
] | null | null | null | import os
import sys
import uuid
from leapp.cli.commands import command_utils
from leapp.cli.commands.upgrade import breadcrumbs, util
from leapp.config import get_config
from leapp.exceptions import CommandError, LeappError
from leapp.logger import configure_logger
from leapp.utils.audit import Execution
from leapp.utils.clicmd import command, command_opt
from leapp.utils.output import beautify_actor_exception, report_errors, report_info, report_inhibitors
@command('preupgrade', help='Generate preupgrade report')
@command_opt('whitelist-experimental', action='append', metavar='ActorName', help='Enables experimental actors')
@command_opt('debug', is_flag=True, help='Enable debug mode', inherit=False)
@command_opt('verbose', is_flag=True, help='Enable verbose logging', inherit=False)
@command_opt('no-rhsm', is_flag=True, help='Use only custom repositories and skip actions'
' with Red Hat Subscription Manager')
@command_opt('enablerepo', action='append', metavar='<repoid>',
help='Enable specified repository. Can be used multiple times.')
@command_opt('channel',
help='Set preferred channel for the IPU target.',
choices=['ga', 'tuv', 'e4s', 'eus', 'aus'],
value_type=str.lower) # This allows the choices to be case insensitive
@command_opt('target', choices=command_utils.get_supported_target_versions(),
help='Specify RHEL version to upgrade to for {} detected upgrade flavour'.format(
command_utils.get_upgrade_flavour()))
@command_opt('report-schema', help='Specify report schema version for leapp-report.json', choices=['1.0.0', '1.1.0'],
default=get_config().get('report', 'schema'))
@breadcrumbs.produces_breadcrumbs
def preupgrade(args, breadcrumbs):
context = str(uuid.uuid4())
cfg = get_config()
util.handle_output_level(args)
configuration = util.prepare_configuration(args)
answerfile_path = cfg.get('report', 'answerfile')
userchoices_path = cfg.get('report', 'userchoices')
report_schema = util.process_report_schema(args, cfg)
if os.getuid():
raise CommandError('This command has to be run under the root user.')
e = Execution(context=context, kind='preupgrade', configuration=configuration)
e.store()
util.archive_logfiles()
logger = configure_logger('leapp-preupgrade.log')
os.environ['LEAPP_EXECUTION_ID'] = context
try:
repositories = util.load_repositories()
except LeappError as exc:
raise CommandError(exc.message)
workflow = repositories.lookup_workflow('IPUWorkflow')()
util.warn_if_unsupported(configuration)
util.process_whitelist_experimental(repositories, workflow, configuration, logger)
with beautify_actor_exception():
workflow.load_answers(answerfile_path, userchoices_path)
until_phase = 'ReportsPhase'
logger.info('Executing workflow until phase: %s', until_phase)
# Set the locale, so that the actors parsing command outputs that might be localized will not fail
os.environ['LC_ALL'] = 'en_US.UTF-8'
os.environ['LANG'] = 'en_US.UTF-8'
workflow.run(context=context, until_phase=until_phase, skip_dialogs=True)
logger.info("Answerfile will be created at %s", answerfile_path)
workflow.save_answers(answerfile_path, userchoices_path)
util.generate_report_files(context, report_schema)
report_errors(workflow.errors)
report_inhibitors(context)
report_files = util.get_cfg_files('report', cfg)
log_files = util.get_cfg_files('logs', cfg)
report_info(report_files, log_files, answerfile_path, fail=workflow.failure)
if workflow.failure:
sys.exit(1)
def register(base_command):
"""
Registers `leapp preupgrade`
"""
base_command.add_sub(preupgrade)
| 45.388235 | 117 | 0.722395 |
f1abaa3dfb6f716d937cefdc1f756d398cab2b25 | 1,303 | py | Python | bufrtools/util/bitmath.py | ocefpaf/bufrtools | c9d71532cdbb0d5bc1e2402d3387b5470d4a0546 | [
"MIT"
] | null | null | null | bufrtools/util/bitmath.py | ocefpaf/bufrtools | c9d71532cdbb0d5bc1e2402d3387b5470d4a0546 | [
"MIT"
] | null | null | null | bufrtools/util/bitmath.py | ocefpaf/bufrtools | c9d71532cdbb0d5bc1e2402d3387b5470d4a0546 | [
"MIT"
] | 1 | 2021-06-17T00:09:38.000Z | 2021-06-17T00:09:38.000Z | #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""Utility functions for bit mangling."""
def shift_uint(value: int, full_bitlength: int, bit_offset: int, bitlen: int) -> bytes:
"""Shifts an unsigned integer and returns the byte array of the shifted value."""
shift = full_bitlength - bitlen - bit_offset
output = bytearray(full_bitlength // 8)
for i in range(len(output)):
byteshift = full_bitlength - ((i + 1) * 8)
relshift = shift - byteshift
if relshift < 0:
output[i] = 0xFF & (value >> (-relshift))
elif relshift < 8:
output[i] = 0XFF & (value << relshift)
return output
def encode_uint(data: bytes, value: int, bit_offset: int, bitlen: int) -> bytearray:
"""Returns a bytearray that embeds the value in the stream of bytes.
This function embeds the unsigned integer `value` in `data` after `bit_offset` bits and ensures
that `value` occupies `bitlen` bits.
"""
full_bitlength = len(data) * 8
mask = shift_uint((1 << bitlen) - 1, full_bitlength, bit_offset, bitlen)
shifted_value = shift_uint(value, full_bitlength, bit_offset, bitlen)
output = bytearray(len(data))
for i in range(len(data)):
output[i] = data[i] ^ ((data[i] ^ shifted_value[i]) & mask[i])
return output
| 38.323529 | 99 | 0.642364 |
4377496113f3b19aaad009cefb3975de774cb73c | 41,881 | py | Python | zulip_bots/zulip_bots/game_handler.py | dimisjim/python-zulip-api | 095d5d8f4e1854a98025b8ee92d004fe373d6a88 | [
"Apache-2.0"
] | 351 | 2017-07-07T22:44:48.000Z | 2022-03-30T04:52:12.000Z | zulip_bots/zulip_bots/game_handler.py | dimisjim/python-zulip-api | 095d5d8f4e1854a98025b8ee92d004fe373d6a88 | [
"Apache-2.0"
] | 631 | 2017-07-06T18:56:47.000Z | 2022-03-16T20:32:50.000Z | zulip_bots/zulip_bots/game_handler.py | dimisjim/python-zulip-api | 095d5d8f4e1854a98025b8ee92d004fe373d6a88 | [
"Apache-2.0"
] | 371 | 2017-07-06T19:56:29.000Z | 2022-03-31T16:07:30.000Z | import json
import logging
import random
import re
from copy import deepcopy
from typing import Any, Dict, List, Tuple
from zulip_bots.lib import BotHandler
class BadMoveException(Exception):
def __init__(self, message: str) -> None:
self.message = message
def __str__(self) -> str:
return self.message
class SamePlayerMove(Exception):
def __init__(self, message: str) -> None:
self.message = message
def __str__(self) -> str:
return self.message
class GameAdapter:
"""
Class that serves as a template to easily
create multiplayer games.
This class handles all commands, and creates
GameInstances which run the actual game logic.
"""
def __init__(
self,
game_name: str,
bot_name: str,
move_help_message: str,
move_regex: str,
model: Any,
gameMessageHandler: Any,
rules: str,
max_players: int = 2,
min_players: int = 2,
supports_computer: bool = False,
) -> None:
self.game_name = game_name
self.bot_name = bot_name
self.move_help_message = move_help_message
self.move_regex = re.compile(move_regex)
self.model = model
self.max_players = max_players
self.min_players = min_players
self.is_single_player = self.min_players == self.max_players == 1
self.supports_computer = supports_computer
self.gameMessageHandler = gameMessageHandler()
self.invites = {} # type: Dict[str, Dict[str, str]]
self.instances = {} # type: Dict[str, Any]
self.user_cache = {} # type: Dict[str, Dict[str, Any]]
self.pending_subject_changes = [] # type: List[str]
self.stream = "games"
self.rules = rules
# Values are [won, lost, drawn, total] new values can be added, but MUST be added to the end of the list.
def add_user_statistics(self, user: str, values: Dict[str, int]) -> None:
self.get_user_cache()
current_values = {} # type: Dict[str, int]
if "stats" in self.get_user_by_email(user).keys():
current_values = self.user_cache[user]["stats"]
for key, value in values.items():
if key not in current_values.keys():
current_values.update({key: 0})
current_values[key] += value
self.user_cache[user].update({"stats": current_values})
self.put_user_cache()
def help_message(self) -> str:
return """** {} Bot Help:**
*Preface all commands with @**{}***
* To start a game in a stream (*recommended*), type
`start game`
* To start a game against another player, type
`start game with @<player-name>`{}
* To play game with the current number of players, type
`play game`
* To quit a game at any time, type
`quit`
* To end a game with a draw, type
`draw`
* To forfeit a game, type
`forfeit`
* To see the leaderboard, type
`leaderboard`
* To withdraw an invitation, type
`cancel game`
* To see rules of this game, type
`rules`
{}""".format(
self.game_name,
self.get_bot_username(),
self.play_with_computer_help(),
self.move_help_message,
)
def help_message_single_player(self) -> str:
return """** {} Bot Help:**
*Preface all commands with @**{}***
* To start a game in a stream, type
`start game`
* To quit a game at any time, type
`quit`
* To see rules of this game, type
`rules`
{}""".format(
self.game_name, self.get_bot_username(), self.move_help_message
)
def get_commands(self) -> Dict[str, str]:
action = self.help_message_single_player()
return {
"accept": action,
"decline": action,
"register": action,
"draw": action,
"forfeit": action,
"leaderboard": action,
"join": action,
}
def manage_command(self, command: str, message: Dict[str, Any]) -> int:
commands = self.get_commands()
if command not in commands:
return 1
action = commands[command]
self.send_reply(message, action)
return 0
def already_in_game_message(self) -> str:
return "You are already in a game. Type `quit` to leave."
def confirm_new_invitation(self, opponent: str) -> str:
return (
"You've sent an invitation to play "
+ self.game_name
+ " with @**"
+ self.get_user_by_email(opponent)["full_name"]
+ "**"
)
def play_with_computer_help(self) -> str:
if self.supports_computer:
return "\n* To start a game with the computer, type\n`start game with` @**{}**".format(
self.get_bot_username()
)
return ""
def alert_new_invitation(self, game_id: str) -> str:
# Since the first player invites, the challenger is always the first player
player_email = self.get_players(game_id)[0]
sender_name = self.get_username_by_email(player_email)
return (
"**"
+ sender_name
+ " has invited you to play a game of "
+ self.game_name
+ ".**\n"
+ self.get_formatted_game_object(game_id)
+ "\n\n"
+ "Type ```accept``` to accept the game invitation\n"
+ "Type ```decline``` to decline the game invitation."
)
def confirm_invitation_accepted(self, game_id: str) -> str:
host = self.invites[game_id]["host"]
return "Accepted invitation to play **{}** from @**{}**.".format(
self.game_name, self.get_username_by_email(host)
)
def confirm_invitation_declined(self, game_id: str) -> str:
host = self.invites[game_id]["host"]
return "Declined invitation to play **{}** from @**{}**.".format(
self.game_name, self.get_username_by_email(host)
)
def send_message(self, to: str, content: str, is_private: bool, subject: str = "") -> None:
self.bot_handler.send_message(
dict(
type="private" if is_private else "stream", to=to, content=content, subject=subject
)
)
def send_reply(self, original_message: Dict[str, Any], content: str) -> None:
self.bot_handler.send_reply(original_message, content)
def usage(self) -> str:
return (
"""
Bot that allows users to play another user
or the computer in a game of """
+ self.game_name
+ """
To see the entire list of commands, type
@bot-name help
"""
)
def initialize(self, bot_handler: BotHandler) -> None:
self.bot_handler = bot_handler
self.get_user_cache()
self.email = self.bot_handler.email
self.full_name = self.bot_handler.full_name
def handle_message(self, message: Dict[str, Any], bot_handler: BotHandler) -> None:
try:
self.bot_handler = bot_handler
content = message["content"].strip()
sender = message["sender_email"].lower()
message["sender_email"] = message["sender_email"].lower()
if self.email not in self.user_cache.keys() and self.supports_computer:
self.add_user_to_cache(
{"sender_email": self.email, "sender_full_name": self.full_name}
)
if sender == self.email:
return
if sender not in self.user_cache.keys():
self.add_user_to_cache(message)
logging.info(f"Added {sender} to user cache")
if self.is_single_player:
if content.lower().startswith("start game with") or content.lower().startswith(
"play game"
):
self.send_reply(message, self.help_message_single_player())
return
else:
val = self.manage_command(content.lower(), message)
if val == 0:
return
if content.lower() == "help" or content == "":
if self.is_single_player:
self.send_reply(message, self.help_message_single_player())
else:
self.send_reply(message, self.help_message())
return
elif content.lower() == "rules":
self.send_reply(message, self.rules)
elif content.lower().startswith("start game with "):
self.command_start_game_with(message, sender, content)
elif content.lower() == "start game":
self.command_start_game(message, sender, content)
elif content.lower().startswith("play game"):
self.command_play(message, sender, content)
elif content.lower() == "accept":
self.command_accept(message, sender, content)
elif content.lower() == "decline":
self.command_decline(message, sender, content)
elif content.lower() == "quit":
self.command_quit(message, sender, content)
elif content.lower() == "register":
self.send_reply(
message,
"Hello @**{}**. Thanks for registering!".format(message["sender_full_name"]),
)
elif content.lower() == "leaderboard":
self.command_leaderboard(message, sender, content)
elif content.lower() == "join":
self.command_join(message, sender, content)
elif self.is_user_in_game(sender) != "":
self.parse_message(message)
elif (
self.move_regex.match(content) is not None
or content.lower() == "draw"
or content.lower() == "forfeit"
):
self.send_reply(
message, "You are not in a game at the moment. Type `help` for help."
)
else:
if self.is_single_player:
self.send_reply(message, self.help_message_single_player())
else:
self.send_reply(message, self.help_message())
except Exception as e:
logging.exception(str(e))
self.bot_handler.send_reply(message, f"Error {e}.")
def is_user_in_game(self, user_email: str) -> str:
for instance in self.instances.values():
if user_email in instance.players:
return instance.game_id
return ""
def command_start_game_with(self, message: Dict[str, Any], sender: str, content: str) -> None:
if not self.is_user_not_player(sender, message):
self.send_reply(message, self.already_in_game_message())
return
users = content.replace("start game with ", "").strip().split(", ")
self.create_game_lobby(message, users)
def command_start_game(self, message: Dict[str, Any], sender: str, content: str) -> None:
if message["type"] == "private":
if self.is_single_player:
self.send_reply(message, "You are not allowed to play games in private messages.")
return
else:
self.send_reply(
message,
"If you are starting a game in private messages, you must invite players. Type `help` for commands.",
)
if not self.is_user_not_player(sender, message):
self.send_reply(message, self.already_in_game_message())
return
self.create_game_lobby(message)
if self.is_single_player:
self.command_play(message, sender, content)
def command_accept(self, message: Dict[str, Any], sender: str, content: str) -> None:
if not self.is_user_not_player(sender, message):
self.send_reply(message, self.already_in_game_message())
return
game_id = self.set_invite_by_user(sender, True, message)
if game_id == "":
self.send_reply(message, "No active invites. Type `help` for commands.")
return
if message["type"] == "private":
self.send_reply(message, self.confirm_invitation_accepted(game_id))
self.broadcast(
game_id,
f"@**{self.get_username_by_email(sender)}** has accepted the invitation.",
)
self.start_game_if_ready(game_id)
def create_game_lobby(self, message: Dict[str, Any], users: List[str] = []) -> None:
if self.is_game_in_subject(message["subject"], message["display_recipient"]):
self.send_reply(message, "There is already a game in this stream.")
return
if len(users) > 0:
users = self.verify_users(users, message=message)
if len(users) + 1 < self.min_players:
self.send_reply(
message,
"You must have at least {} players to play.\nGame cancelled.".format(
self.min_players
),
)
return
if len(users) + 1 > self.max_players:
self.send_reply(
message,
f"The maximum number of players for this game is {self.max_players}.",
)
return
game_id = self.generate_game_id()
stream_subject = "###private###"
if message["type"] == "stream":
stream_subject = message["subject"]
self.invites[game_id] = {
"host": message["sender_email"].lower(),
"subject": stream_subject,
"stream": message["display_recipient"],
}
if message["type"] == "private":
self.invites[game_id]["stream"] = "games"
for user in users:
self.send_invite(game_id, user, message)
if message["type"] == "stream":
if len(users) > 0:
self.broadcast(
game_id,
'If you were invited, and you\'re here, type "@**{}** accept" to accept the invite!'.format(
self.get_bot_username()
),
include_private=False,
)
if len(users) + 1 < self.max_players:
self.broadcast(
game_id,
"**{}** wants to play **{}**. Type @**{}** join to play them!".format(
self.get_username_by_email(message["sender_email"]),
self.game_name,
self.get_bot_username(),
),
)
if self.is_single_player:
self.broadcast(
game_id,
"**{}** is now going to play {}!".format(
self.get_username_by_email(message["sender_email"]), self.game_name
),
)
if self.email in users:
self.broadcast(game_id, "Wait... That's me!", include_private=True)
if message["type"] == "stream":
self.broadcast(
game_id, f"@**{self.get_bot_username()}** accept", include_private=False
)
game_id = self.set_invite_by_user(self.email, True, {"type": "stream"})
self.start_game_if_ready(game_id)
def command_decline(self, message: Dict[str, Any], sender: str, content: str) -> None:
if not self.is_user_not_player(sender, message):
self.send_reply(message, self.already_in_game_message())
return
game_id = self.set_invite_by_user(sender, False, message)
if game_id == "":
self.send_reply(message, "No active invites. Type `help` for commands.")
return
self.send_reply(message, self.confirm_invitation_declined(game_id))
self.broadcast(
game_id,
f"@**{self.get_username_by_email(sender)}** has declined the invitation.",
)
if len(self.get_players(game_id, parameter="")) < self.min_players:
self.cancel_game(game_id)
def command_quit(self, message: Dict[str, Any], sender: str, content: str) -> None:
game_id = self.get_game_id_by_email(sender)
if message["type"] == "private" and self.is_single_player:
self.send_reply(message, "You are not allowed to play games in private messages.")
return
if game_id == "":
self.send_reply(message, "You are not in a game. Type `help` for all commands.")
sender_name = self.get_username_by_email(sender)
self.cancel_game(game_id, reason=f"**{sender_name}** quit.")
def command_join(self, message: Dict[str, Any], sender: str, content: str) -> None:
if not self.is_user_not_player(sender, message):
self.send_reply(message, self.already_in_game_message())
return
if message["type"] == "private":
self.send_reply(
message, "You cannot join games in private messages. Type `help` for all commands."
)
return
game_id = self.get_invite_in_subject(message["subject"], message["display_recipient"])
if game_id == "":
self.send_reply(
message, "There is not a game in this subject. Type `help` for all commands."
)
return
self.join_game(game_id, sender, message)
def command_play(self, message: Dict[str, Any], sender: str, content: str) -> None:
game_id = self.get_invite_in_subject(message["subject"], message["display_recipient"])
if game_id == "":
self.send_reply(
message, "There is not a game in this subject. Type `help` for all commands."
)
return
num_players = len(self.get_players(game_id))
if num_players >= self.min_players and num_players <= self.max_players:
self.start_game(game_id)
else:
self.send_reply(
message,
f"Join {self.max_players - num_players} more players to start the game",
)
def command_leaderboard(self, message: Dict[str, Any], sender: str, content: str) -> None:
stats = self.get_sorted_player_statistics()
num = 5 if len(stats) > 5 else len(stats)
top_stats = stats[0:num]
response = "**Most wins**\n\n"
raw_headers = ["games_won", "games_drawn", "games_lost", "total_games"]
headers = ["Player"] + [key.replace("_", " ").title() for key in raw_headers]
response += " | ".join(headers)
response += "\n" + " | ".join(" --- " for header in headers)
for player, stat in top_stats:
response += f"\n **{self.get_username_by_email(player)}** | "
values = [str(stat[key]) for key in raw_headers]
response += " | ".join(values)
self.send_reply(message, response)
return
def get_sorted_player_statistics(self) -> List[Tuple[str, Dict[str, int]]]:
players = []
for user_name, u in self.user_cache.items():
if "stats" in u.keys():
players.append((user_name, u["stats"]))
return sorted(
players,
key=lambda player: (
player[1]["games_won"],
player[1]["games_drawn"],
player[1]["total_games"],
),
reverse=True,
)
def send_invite(self, game_id: str, user_email: str, message: Dict[str, Any] = {}) -> None:
self.invites[game_id].update({user_email.lower(): "p"})
self.send_message(user_email, self.alert_new_invitation(game_id), True)
if message != {}:
self.send_reply(message, self.confirm_new_invitation(user_email))
def cancel_game(self, game_id: str, reason: str = "") -> None:
if game_id in self.invites.keys():
self.broadcast(game_id, "Game cancelled.\n" + reason)
del self.invites[game_id]
return
if game_id in self.instances.keys():
self.instances[game_id].broadcast("Game ended.\n" + reason)
del self.instances[game_id]
return
def start_game_if_ready(self, game_id: str) -> None:
players = self.get_players(game_id)
if len(players) == self.max_players:
self.start_game(game_id)
def start_game(self, game_id: str) -> None:
players = self.get_players(game_id)
subject = game_id
stream = self.invites[game_id]["stream"]
if self.invites[game_id]["subject"] != "###private###":
subject = self.invites[game_id]["subject"]
self.instances[game_id] = GameInstance(self, False, subject, game_id, players, stream)
self.broadcast(
game_id,
f"The game has started in #{stream} {self.instances[game_id].subject}"
+ "\n"
+ self.get_formatted_game_object(game_id),
)
del self.invites[game_id]
self.instances[game_id].start()
def get_formatted_game_object(self, game_id: str) -> str:
object = """> **Game `{}`**
> {}
> {}/{} players""".format(
game_id, self.game_name, self.get_number_of_players(game_id), self.max_players
)
if game_id in self.instances.keys():
instance = self.instances[game_id]
if not self.is_single_player:
object += "\n> **[Join Game](/#narrow/stream/{}/topic/{})**".format(
instance.stream, instance.subject
)
return object
def join_game(self, game_id: str, user_email: str, message: Dict[str, Any] = {}) -> None:
if len(self.get_players(game_id)) >= self.max_players:
if message != {}:
self.send_reply(message, "This game is full.")
return
self.invites[game_id].update({user_email: "a"})
self.broadcast(
game_id, f"@**{self.get_username_by_email(user_email)}** has joined the game"
)
self.start_game_if_ready(game_id)
def get_players(self, game_id: str, parameter: str = "a") -> List[str]:
if game_id in self.invites.keys():
players = [] # type: List[str]
if (
self.invites[game_id]["subject"] == "###private###" and "p" in parameter
) or "p" not in parameter:
players = [self.invites[game_id]["host"]]
for player, accepted in self.invites[game_id].items():
if player == "host" or player == "subject" or player == "stream":
continue
if parameter in accepted:
players.append(player)
return players
if game_id in self.instances.keys() and "p" not in parameter:
players = self.instances[game_id].players
return players
return []
def get_game_info(self, game_id: str) -> Dict[str, Any]:
game_info = {} # type: Dict[str, Any]
if game_id in self.instances.keys():
instance = self.instances[game_id]
game_info = {
"game_id": game_id,
"type": "instance",
"stream": instance.stream,
"subject": instance.subject,
"players": self.get_players(game_id),
}
if game_id in self.invites.keys():
invite = self.invites[game_id]
game_info = {
"game_id": game_id,
"type": "invite",
"stream": invite["stream"],
"subject": invite["subject"],
"players": self.get_players(game_id),
}
return game_info
def get_user_by_name(self, name: str) -> Dict[str, Any]:
name = name.strip()
for user in self.user_cache.values():
if "full_name" in user.keys():
if user["full_name"].lower() == name.lower():
return user
return {}
def get_number_of_players(self, game_id: str) -> int:
num = len(self.get_players(game_id))
return num
def parse_message(self, message: Dict[str, Any]) -> None:
game_id = self.is_user_in_game(message["sender_email"])
game = self.get_game_info(game_id)
if message["type"] == "private":
if self.is_single_player:
self.send_reply(message, self.help_message_single_player())
return
self.send_reply(
message,
"Join your game using the link below!\n\n{}".format(
self.get_formatted_game_object(game_id)
),
)
return
if game["subject"] != message["subject"] or game["stream"] != message["display_recipient"]:
if game_id not in self.pending_subject_changes:
self.send_reply(
message,
"Your current game is not in this subject. \n\
To move subjects, send your message again, otherwise join the game using the link below.\n\n\
{}".format(
self.get_formatted_game_object(game_id)
),
)
self.pending_subject_changes.append(game_id)
return
self.pending_subject_changes.remove(game_id)
self.change_game_subject(
game_id, message["display_recipient"], message["subject"], message
)
self.instances[game_id].handle_message(message["content"], message["sender_email"])
def change_game_subject(
self, game_id: str, stream_name: str, subject_name: str, message: Dict[str, Any] = {}
) -> None:
if self.get_game_instance_by_subject(stream_name, subject_name) is not None:
if message != {}:
self.send_reply(message, "There is already a game in this subject.")
return
if game_id in self.instances.keys():
self.instances[game_id].change_subject(stream_name, subject_name)
if game_id in self.invites.keys():
invite = self.invites[game_id]
invite["stream"] = stream_name
invite["subject"] = stream_name
def set_invite_by_user(
self, user_email: str, is_accepted: bool, message: Dict[str, Any]
) -> str:
user_email = user_email.lower()
for game, users in self.invites.items():
if user_email in users.keys():
if is_accepted:
if message["type"] == "private":
users[user_email] = "pa"
else:
users[user_email] = "a"
else:
users.pop(user_email)
return game
return ""
def add_user_to_cache(self, message: Dict[str, Any]) -> None:
user = {
"email": message["sender_email"].lower(),
"full_name": message["sender_full_name"],
"stats": {"total_games": 0, "games_won": 0, "games_lost": 0, "games_drawn": 0},
}
self.user_cache.update({message["sender_email"].lower(): user})
self.put_user_cache()
def put_user_cache(self) -> Dict[str, Any]:
user_cache_str = json.dumps(self.user_cache)
self.bot_handler.storage.put("users", user_cache_str)
return self.user_cache
def get_user_cache(self) -> Dict[str, Any]:
try:
user_cache_str = self.bot_handler.storage.get("users")
except KeyError:
return {}
self.user_cache = json.loads(user_cache_str)
return self.user_cache
def verify_users(self, users: List[str], message: Dict[str, Any] = {}) -> List[str]:
verified_users = []
failed = False
for u in users:
user = u.strip().lstrip("@**").rstrip("**")
if (
user == self.get_bot_username() or user == self.email
) and not self.supports_computer:
self.send_reply(message, "You cannot play against the computer in this game.")
if "@" not in user:
user_obj = self.get_user_by_name(user)
if user_obj == {}:
self.send_reply(
message,
"I don't know {}. Tell them to say @**{}** register".format(
u, self.get_bot_username()
),
)
failed = True
continue
user = user_obj["email"]
if self.is_user_not_player(user, message):
verified_users.append(user)
else:
failed = True
if failed:
return []
else:
return verified_users
def get_game_instance_by_subject(self, subject_name: str, stream_name: str) -> Any:
for instance in self.instances.values():
if instance.subject == subject_name and instance.stream == stream_name:
return instance
return None
def get_invite_in_subject(self, subject_name: str, stream_name: str) -> str:
for key, invite in self.invites.items():
if invite["subject"] == subject_name and invite["stream"] == stream_name:
return key
return ""
def is_game_in_subject(self, subject_name: str, stream_name: str) -> bool:
return (
self.get_invite_in_subject(subject_name, stream_name) != ""
or self.get_game_instance_by_subject(subject_name, stream_name) is not None
)
def is_user_not_player(self, user_email: str, message: Dict[str, Any] = {}) -> bool:
user = self.get_user_by_email(user_email)
if user == {}:
if message != {}:
self.send_reply(
message,
"I don't know {}. Tell them to use @**{}** register".format(
user_email, self.get_bot_username()
),
)
return False
for instance in self.instances.values():
if user_email in instance.players:
return False
for invite in self.invites.values():
for u in invite.keys():
if u == "host":
if user_email == invite["host"]:
return False
if u == user_email and "a" in invite[u]:
return False
return True
def generate_game_id(self) -> str:
id = ""
valid_characters = "abcdefghijklmnopqrstuvwxyz0123456789"
for i in range(6):
id += valid_characters[random.randrange(0, len(valid_characters))]
return id
def broadcast(self, game_id: str, content: str, include_private: bool = True) -> bool:
if include_private:
private_recipients = self.get_players(game_id, parameter="p")
if private_recipients is not None:
for user in private_recipients:
self.send_message(user, content, True)
if game_id in self.invites.keys():
if self.invites[game_id]["subject"] != "###private###":
self.send_message(
self.invites[game_id]["stream"],
content,
False,
self.invites[game_id]["subject"],
)
return True
if game_id in self.instances.keys():
self.send_message(
self.instances[game_id].stream, content, False, self.instances[game_id].subject
)
return True
return False
def get_username_by_email(self, user_email: str) -> str:
return self.get_user_by_email(user_email)["full_name"]
def get_user_by_email(self, user_email: str) -> Dict[str, Any]:
if user_email in self.user_cache:
return self.user_cache[user_email]
return {}
def get_game_id_by_email(self, user_email: str) -> str:
for instance in self.instances.values():
if user_email in instance.players:
return instance.game_id
for game_id in self.invites.keys():
players = self.get_players(game_id)
if user_email in players:
return game_id
return ""
def get_bot_username(self) -> str:
return self.bot_handler.full_name
class GameInstance:
"""
The GameInstance class handles the game logic for a certain game,
and is associated with a certain stream.
It gets player info from GameAdapter
It only runs when the game is being played, not in the invite
or waiting states.
"""
def __init__(
self,
gameAdapter: GameAdapter,
is_private: bool,
subject: str,
game_id: str,
players: List[str],
stream: str,
) -> None:
self.gameAdapter = gameAdapter
self.is_private = is_private
self.subject = subject
self.game_id = game_id
self.players = players
self.stream = stream
self.model = deepcopy(self.gameAdapter.model())
self.board = self.model.current_board
self.turn = random.randrange(0, len(players)) - 1
self.current_draw = {} # type: Dict[str, bool]
self.current_messages = [] # type: List[str]
self.is_changing_subject = False
def start(self) -> None:
self.current_messages.append(self.get_start_message())
self.current_messages.append(self.parse_current_board())
self.next_turn()
def change_subject(self, stream: str, subject: str) -> None:
self.stream = stream
self.subject = subject
self.current_messages.append(self.parse_current_board())
self.broadcast_current_message()
def get_player_text(self) -> str:
player_text = ""
for player in self.players:
player_text += f" @**{self.gameAdapter.get_username_by_email(player)}**"
return player_text
def get_start_message(self) -> str:
start_message = "Game `{}` started.\n*Remember to start your message with* @**{}**".format(
self.game_id, self.gameAdapter.get_bot_username()
)
if not self.is_private:
player_text = "\n**Players**"
player_text += self.get_player_text()
start_message += player_text
start_message += "\n" + self.gameAdapter.gameMessageHandler.game_start_message()
return start_message
def handle_message(self, content: str, player_email: str) -> None:
if content == "forfeit":
player_name = self.gameAdapter.get_username_by_email(player_email)
self.broadcast(f"**{player_name}** forfeited!")
self.end_game("except:" + player_email)
return
if content == "draw":
if player_email in self.current_draw.keys():
self.current_draw[player_email] = True
else:
self.current_draw = {p: False for p in self.players}
self.broadcast(
"**{}** has voted for a draw!\nType `draw` to accept".format(
self.gameAdapter.get_username_by_email(player_email)
)
)
self.current_draw[player_email] = True
if self.check_draw():
self.end_game("draw")
return
if self.is_turn_of(player_email):
self.handle_current_player_command(content)
else:
if self.gameAdapter.is_single_player:
self.broadcast("It's your turn")
else:
self.broadcast(
"It's **{}**'s ({}) turn.".format(
self.gameAdapter.get_username_by_email(self.players[self.turn]),
self.gameAdapter.gameMessageHandler.get_player_color(self.turn),
)
)
def broadcast(self, content: str) -> None:
self.gameAdapter.broadcast(self.game_id, content)
def check_draw(self) -> bool:
for d in self.current_draw.values():
if not d:
return False
return len(self.current_draw.values()) > 0
def handle_current_player_command(self, content: str) -> None:
re_result = self.gameAdapter.move_regex.match(content)
if re_result is None:
self.broadcast(self.gameAdapter.move_help_message)
return
self.make_move(content, False)
def make_move(self, content: str, is_computer: bool) -> None:
try:
self.model.make_move(content, self.turn, is_computer)
# Keep the turn of the same player
except SamePlayerMove as smp:
self.same_player_turn(content, smp.message, is_computer)
return
except BadMoveException as e:
self.broadcast(e.message)
self.broadcast(self.parse_current_board())
return
if not is_computer:
self.current_messages.append(
self.gameAdapter.gameMessageHandler.alert_move_message(
"**{}**".format(
self.gameAdapter.get_username_by_email(self.players[self.turn])
),
content,
)
)
self.current_messages.append(self.parse_current_board())
game_over = self.model.determine_game_over(self.players)
if game_over:
self.broadcast_current_message()
if game_over == "current turn":
game_over = self.players[self.turn]
self.end_game(game_over)
return
self.next_turn()
def is_turn_of(self, player_email: str) -> bool:
return self.players[self.turn].lower() == player_email.lower()
def same_player_turn(self, content: str, message: str, is_computer: bool) -> None:
if not is_computer:
self.current_messages.append(
self.gameAdapter.gameMessageHandler.alert_move_message(
"**{}**".format(
self.gameAdapter.get_username_by_email(self.players[self.turn])
),
content,
)
)
self.current_messages.append(self.parse_current_board())
# append custom message the game wants to give for the next move
self.current_messages.append(message)
game_over = self.model.determine_game_over(self.players)
if game_over:
self.broadcast_current_message()
if game_over == "current turn":
game_over = self.players[self.turn]
self.end_game(game_over)
return
self.current_messages.append(
"It's **{}**'s ({}) turn.".format(
self.gameAdapter.get_username_by_email(self.players[self.turn]),
self.gameAdapter.gameMessageHandler.get_player_color(self.turn),
)
)
self.broadcast_current_message()
if self.players[self.turn] == self.gameAdapter.email:
self.make_move("", True)
def next_turn(self) -> None:
self.turn += 1
if self.turn >= len(self.players):
self.turn = 0
if self.gameAdapter.is_single_player:
self.current_messages.append("It's your turn.")
else:
self.current_messages.append(
"It's **{}**'s ({}) turn.".format(
self.gameAdapter.get_username_by_email(self.players[self.turn]),
self.gameAdapter.gameMessageHandler.get_player_color(self.turn),
)
)
self.broadcast_current_message()
if self.players[self.turn] == self.gameAdapter.email:
self.make_move("", True)
def broadcast_current_message(self) -> None:
content = "\n\n".join(self.current_messages)
self.broadcast(content)
self.current_messages = []
def parse_current_board(self) -> Any:
return self.gameAdapter.gameMessageHandler.parse_board(self.model.current_board)
def end_game(self, winner: str) -> None:
loser = ""
if winner == "draw":
self.broadcast("It was a draw!")
elif winner.startswith("except:"):
loser = winner.lstrip("except:")
else:
winner_name = self.gameAdapter.get_username_by_email(winner)
self.broadcast(f"**{winner_name}** won! :tada:")
for u in self.players:
values = {"total_games": 1, "games_won": 0, "games_lost": 0, "games_drawn": 0}
if loser == "":
if u == winner:
values.update({"games_won": 1})
elif winner == "draw":
values.update({"games_drawn": 1})
else:
values.update({"games_lost": 1})
else:
if u == loser:
values.update({"games_lost": 1})
else:
values.update({"games_won": 1})
self.gameAdapter.add_user_statistics(u, values)
if self.gameAdapter.email in self.players:
self.send_win_responses(winner)
self.gameAdapter.cancel_game(self.game_id)
def send_win_responses(self, winner: str) -> None:
if winner == self.gameAdapter.email:
self.broadcast("I won! Well Played!")
elif winner == "draw":
self.broadcast("It was a draw! Well Played!")
else:
self.broadcast("You won! Nice!")
| 39.43597 | 121 | 0.561734 |
8936995facc903360636de6fd7c56bd02cd425f4 | 6,303 | py | Python | src/pymap3d/tests/test_geodetic.py | wrlssqi/pymap3d | bd91a5ff4e9066eb33fead3006ba9de191e2c5e5 | [
"BSD-2-Clause"
] | 116 | 2020-02-23T02:04:18.000Z | 2022-03-29T00:19:37.000Z | src/pymap3d/tests/test_geodetic.py | wrlssqi/pymap3d | bd91a5ff4e9066eb33fead3006ba9de191e2c5e5 | [
"BSD-2-Clause"
] | 19 | 2020-03-02T08:13:46.000Z | 2022-03-30T17:50:00.000Z | src/pymap3d/tests/test_geodetic.py | wrlssqi/pymap3d | bd91a5ff4e9066eb33fead3006ba9de191e2c5e5 | [
"BSD-2-Clause"
] | 28 | 2020-02-24T11:56:03.000Z | 2022-03-29T02:29:37.000Z | import pytest
from pytest import approx
from math import radians, nan, sqrt, isnan
import pymap3d as pm
lla0 = (42, -82, 200)
rlla0 = (radians(lla0[0]), radians(lla0[1]), lla0[2])
xyz0 = (660675.2518247, -4700948.68316, 4245737.66222)
ELL = pm.Ellipsoid()
A = ELL.semimajor_axis
B = ELL.semiminor_axis
atol_dist = 1e-6 # 1 micrometer
@pytest.mark.parametrize("lla", [(42, -82, 200), ([42], [-82], [200])], ids=("scalar", "list"))
def test_scalar_geodetic2ecef(lla):
"""
verify we can handle the wide variety of input data type users might use
"""
if isinstance(lla[0], list):
pytest.importorskip("numpy")
x0, y0, z0 = pm.geodetic2ecef(*lla)
assert (x0, y0, z0) == approx(xyz0)
def test_3d_geodetic2ecef():
np = pytest.importorskip("numpy")
lla = (np.atleast_3d(42), np.atleast_3d(-82), np.atleast_3d(200))
x0, y0, z0 = pm.geodetic2ecef(*lla)
assert (x0, y0, z0) == approx(xyz0)
def test_scalar_ecef2geodetic():
"""
verify we can handle the wide variety of input data type users might use
"""
lat, lon, alt = pm.ecef2geodetic(xyz0[0], xyz0[1], xyz0[2])
assert [lat, lon, alt] == approx(lla0, rel=1e-4)
def test_3d_ecef2geodetic():
np = pytest.importorskip("numpy")
xyz = (np.atleast_3d(xyz0[0]), np.atleast_3d(xyz0[1]), np.atleast_3d(xyz0[2]))
lat, lon, alt = pm.ecef2geodetic(*xyz)
assert [lat, lon, alt] == approx(lla0, rel=1e-4)
def test_array_ecef2geodetic():
"""
tests ecef2geodetic can handle numpy array data in addition to singular floats
"""
np = pytest.importorskip("numpy")
# test values with no points inside ellipsoid
lla0_array = (
np.array([lla0[0], lla0[0]]),
np.array([lla0[1], lla0[1]]),
np.array([lla0[2], lla0[2]]),
)
xyz = pm.geodetic2ecef(*lla0_array)
lats, lons, alts = pm.ecef2geodetic(*xyz)
assert lats == approx(lla0_array[0])
assert lons == approx(lla0_array[1])
assert alts == approx(lla0_array[2])
# test values with some (but not all) points inside ellipsoid
lla0_array_inside = (
np.array([lla0[0], lla0[0]]),
np.array([lla0[1], lla0[1]]),
np.array([lla0[2], -lla0[2]]),
)
xyz = pm.geodetic2ecef(*lla0_array_inside)
lats, lons, alts = pm.ecef2geodetic(*xyz)
assert lats == approx(lla0_array_inside[0])
assert lons == approx(lla0_array_inside[1])
assert alts == approx(lla0_array_inside[2])
def test_xarray():
xarray = pytest.importorskip("xarray")
xr_lla = xarray.DataArray(list(lla0))
xyz = pm.geodetic2ecef(*xr_lla)
assert xyz == approx(xyz0)
# %%
xr_xyz = xarray.DataArray(list(xyz0))
lla = pm.ecef2geodetic(*xr_xyz)
assert lla == approx(lla0)
def test_pandas():
pandas = pytest.importorskip("pandas")
pd_lla = pandas.Series(lla0)
xyz = pm.geodetic2ecef(*pd_lla)
assert xyz == approx(xyz0)
# %% dataframe degenerates to series
pd_lla = pandas.DataFrame([[*lla0], [*lla0]], columns=["lat", "lon", "alt_m"])
xyz = pm.geodetic2ecef(pd_lla["lat"], pd_lla["lon"], pd_lla["alt_m"])
assert xyz[0].values == approx(xyz0[0])
assert xyz[1].values == approx(xyz0[1])
assert xyz[2].values == approx(xyz0[2])
def test_ecef():
xyz = pm.geodetic2ecef(*lla0)
assert xyz == approx(xyz0)
x, y, z = pm.geodetic2ecef(*rlla0, deg=False)
assert x == approx(xyz[0])
assert y == approx(xyz[1])
assert z == approx(xyz[2])
with pytest.raises(ValueError):
pm.geodetic2ecef(-100, lla0[1], lla0[2])
assert pm.ecef2geodetic(*xyz) == approx(lla0)
assert pm.ecef2geodetic(*xyz, deg=False) == approx(rlla0)
assert pm.ecef2geodetic((A - 1) / sqrt(2), (A - 1) / sqrt(2), 0) == approx([0, 45, -1])
@pytest.mark.parametrize(
"lla, xyz",
[
((0, 0, -1), (A - 1, 0, 0)),
((0, 90, -1), (0, A - 1, 0)),
((0, -90, -1), (0, -A + 1, 0)),
((90, 0, -1), (0, 0, B - 1)),
((90, 15, -1), (0, 0, B - 1)),
((-90, 0, -1), (0, 0, -B + 1)),
],
)
def test_geodetic2ecef(lla, xyz):
assert pm.geodetic2ecef(*lla) == approx(xyz, abs=atol_dist)
@pytest.mark.parametrize(
"xyz, lla",
[
((A - 1, 0, 0), (0, 0, -1)),
((0, A - 1, 0), (0, 90, -1)),
((0, 0, B - 1), (90, 0, -1)),
((0, 0, -B + 1), (-90, 0, -1)),
((-A + 1, 0, 0), (0, 180, -1)),
],
)
def test_ecef2geodetic(xyz, lla):
lat, lon, alt = pm.ecef2geodetic(*xyz)
assert lat == approx(lla[0])
assert lon == approx(lla[1])
assert alt == approx(lla[2])
@pytest.mark.parametrize(
"aer,lla,lla0",
[
((33, 77, 1000), (42.0016981935, -81.99852, 1174.374035), (42, -82, 200)),
((0, 90, 10000), (0, 0, 10000), (0, 0, 0)),
],
)
def test_aer_geodetic(aer, lla, lla0):
lat1, lon1, alt1 = pm.aer2geodetic(*aer, *lla0)
assert lat1 == approx(lla[0])
assert lon1 == approx(lla[1])
assert alt1 == approx(lla[2])
assert isinstance(lat1, float)
assert isinstance(lon1, float)
assert isinstance(alt1, float)
raer = (radians(aer[0]), radians(aer[1]), aer[2])
rlla0 = (radians(lla0[0]), radians(lla0[1]), lla0[2])
assert pm.aer2geodetic(*raer, *rlla0, deg=False) == approx(
(radians(lla[0]), radians(lla[1]), lla[2])
)
with pytest.raises(ValueError):
pm.aer2geodetic(aer[0], aer[1], -1, *lla0)
assert pm.geodetic2aer(*lla, *lla0) == approx(aer, rel=1e-3)
assert pm.geodetic2aer(radians(lla[0]), radians(lla[1]), lla[2], *rlla0, deg=False) == approx(
raer, rel=1e-3
)
def test_scalar_nan():
a, e, r = pm.geodetic2aer(nan, nan, nan, *lla0)
assert isnan(a) and isnan(e) and isnan(r)
lat, lon, alt = pm.aer2geodetic(nan, nan, nan, *lla0)
assert isnan(lat) and isnan(lon) and isnan(alt)
def test_allnan():
np = pytest.importorskip("numpy")
anan = np.empty((10, 10))
anan.fill(nan)
assert np.isnan(pm.geodetic2aer(anan, anan, anan, *lla0)).all()
assert np.isnan(pm.aer2geodetic(anan, anan, anan, *lla0)).all()
def test_somenan():
np = pytest.importorskip("numpy")
xyz = np.stack((xyz0, (nan, nan, nan)))
lat, lon, alt = pm.ecef2geodetic(xyz[:, 0], xyz[:, 1], xyz[:, 2])
assert (lat[0], lon[0], alt[0]) == approx(lla0)
| 28.138393 | 98 | 0.592734 |
4886bbbb6fa5ef88b71363b9bfc9104b6a1fa2e9 | 2,616 | py | Python | tests/codegen/allocation_lifetime_test.py | tobiasholenstein/dace | 38fb56d12b59aa8dfe8bb1ff0068e29c5c75efc9 | [
"BSD-3-Clause"
] | null | null | null | tests/codegen/allocation_lifetime_test.py | tobiasholenstein/dace | 38fb56d12b59aa8dfe8bb1ff0068e29c5c75efc9 | [
"BSD-3-Clause"
] | null | null | null | tests/codegen/allocation_lifetime_test.py | tobiasholenstein/dace | 38fb56d12b59aa8dfe8bb1ff0068e29c5c75efc9 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2019-2020 ETH Zurich and the DaCe authors. All rights reserved.
""" Tests different allocation lifetimes. """
import dace
import numpy as np
N = dace.symbol('N')
def test_alloc_persistent_register():
""" Tries to allocate persistent register array. Should fail. """
@dace.program
def lifetimetest(input: dace.float64[N]):
tmp = dace.ndarray([1], input.dtype)
return tmp + 1
sdfg: dace.SDFG = lifetimetest.to_sdfg()
sdfg.arrays['tmp'].storage = dace.StorageType.Register
sdfg.arrays['tmp'].lifetime = dace.AllocationLifetime.Persistent
try:
sdfg.validate()
raise AssertionError('SDFG should not be valid')
except dace.sdfg.InvalidSDFGError:
print('Exception caught, test passed')
def test_alloc_persistent():
@dace.program
def persistentmem(output: dace.int32[1]):
tmp = dace.ndarray([1],
output.dtype,
lifetime=dace.AllocationLifetime.Persistent)
if output[0] == 1.0:
tmp[0] = 0
else:
tmp[0] += 3
output[0] = tmp[0]
# Repeatedly invoke program. Since memory is persistent, output is expected
# to increase with each call
csdfg = persistentmem.compile()
value = np.ones([1], dtype=np.int32)
csdfg(output=value)
assert value[0] == 1
value[0] = 2
csdfg(output=value)
assert value[0] == 3
csdfg(output=value)
assert value[0] == 6
del csdfg
def test_alloc_persistent_threadlocal():
@dace.program
def persistentmem(output: dace.int32[1]):
tmp = dace.ndarray([1],
output.dtype,
storage=dace.StorageType.CPU_ThreadLocal,
lifetime=dace.AllocationLifetime.Persistent)
if output[0] == 1.0:
for i in dace.map[0:1]:
tmp[i] = 0
else:
for i in dace.map[0:1]:
tmp[i] += 3
output[i] = tmp[i]
# Repeatedly invoke program. Since memory is persistent, output is expected
# to increase with each call
csdfg = persistentmem.compile()
value = np.ones([1], dtype=np.int32)
csdfg(output=value)
assert value[0] == 1
value[0] = 2
csdfg(output=value)
assert value[0] == 3
csdfg(output=value)
assert value[0] == 6
del csdfg
if __name__ == '__main__':
test_alloc_persistent_register()
test_alloc_persistent()
test_alloc_persistent_threadlocal()
| 29.727273 | 80 | 0.580657 |
28ee0316bb0fbc3d3715e9cbdcc24c63bdd82d0f | 475 | py | Python | scripts/table_reader_demo.py | kekec14/pyLSV2 | 9280e04364816bdadd5d20282d76c52a133a96e1 | [
"MIT"
] | null | null | null | scripts/table_reader_demo.py | kekec14/pyLSV2 | 9280e04364816bdadd5d20282d76c52a133a96e1 | [
"MIT"
] | null | null | null | scripts/table_reader_demo.py | kekec14/pyLSV2 | 9280e04364816bdadd5d20282d76c52a133a96e1 | [
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import logging
import pathlib
from pyLSV2 import TableReader
logging.basicConfig(level=logging.DEBUG)
if __name__ == "__main__":
data_dir = pathlib.Path('../data/')
for subfolder in ('table_itnc530', 'table_pilot640', 'table_tnc640'):
current_folder = data_dir.joinpath(subfolder)
for table_path in current_folder.glob('*.*'):
tr = TableReader()
tr.parse_table(table_path)
| 22.619048 | 73 | 0.665263 |
e9d41e539b436c9200856ad9d4b0db7c5ddd9d2a | 27,866 | py | Python | venv/Lib/site-packages/mysql-connector-2.2.9/tests/__init__.py | hyd2018/Python-site-packages | be02187ce7773ad735c1f0ad7a73d59e4578ffdb | [
"MIT"
] | null | null | null | venv/Lib/site-packages/mysql-connector-2.2.9/tests/__init__.py | hyd2018/Python-site-packages | be02187ce7773ad735c1f0ad7a73d59e4578ffdb | [
"MIT"
] | null | null | null | venv/Lib/site-packages/mysql-connector-2.2.9/tests/__init__.py | hyd2018/Python-site-packages | be02187ce7773ad735c1f0ad7a73d59e4578ffdb | [
"MIT"
] | null | null | null | # MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Unittests
"""
import os
import sys
import re
import socket
import datetime
import inspect
import platform
import unittest
import logging
import shutil
import subprocess
import errno
import traceback
from imp import load_source
from functools import wraps
from pkgutil import walk_packages
LOGGER_NAME = "myconnpy_tests"
LOGGER = logging.getLogger(LOGGER_NAME)
PY2 = sys.version_info[0] == 2
_CACHED_TESTCASES = []
try:
from unittest.util import strclass
except ImportError:
# Python v2
from unittest import _strclass as strclass # pylint: disable=E0611
try:
from unittest.case import SkipTest
except ImportError:
if sys.version_info[0:2] == (3, 1):
from unittest import SkipTest
elif sys.version_info[0:2] == (2, 6):
# Support skipping tests for Python v2.6
from tests.py26 import test_skip, test_skip_if, SkipTest
unittest.skip = test_skip
unittest.skipIf = test_skip_if
else:
LOGGER.error("Could not initialize Python's unittest module")
sys.exit(1)
from lib.cpy_distutils import get_mysql_config_info
SSL_AVAILABLE = True
try:
import ssl
except ImportError:
SSL_AVAILABLE = False
# Note that IPv6 support for Python is checked here, but it can be disabled
# when the bind_address of MySQL was not set to '::1'.
IPV6_AVAILABLE = socket.has_ipv6
OLD_UNITTEST = sys.version_info[0:2] in [(2, 6)]
if os.name == 'nt':
WINDOWS_VERSION = platform.win32_ver()[1]
WINDOWS_VERSION_INFO = [0] * 2
for i, value in enumerate(WINDOWS_VERSION.split('.')[0:2]):
WINDOWS_VERSION_INFO[i] = int(value)
WINDOWS_VERSION_INFO = tuple(WINDOWS_VERSION_INFO)
else:
WINDOWS_VERSION = None
WINDOWS_VERSION_INFO = ()
# Following dictionary holds messages which were added by test cases
# but only logged at the end.
MESSAGES = {
'WARNINGS': [],
'INFO': [],
'SKIPPED': [],
}
OPTIONS_INIT = False
MYSQL_SERVERS_NEEDED = 1
MYSQL_SERVERS = []
MYSQL_VERSION = ()
MYSQL_VERSION_TXT = ''
MYSQL_DUMMY = None
MYSQL_DUMMY_THREAD = None
SSL_DIR = os.path.join('tests', 'data', 'ssl')
SSL_CA = os.path.abspath(os.path.join(SSL_DIR, 'tests_CA_cert.pem'))
SSL_CERT = os.path.abspath(os.path.join(SSL_DIR, 'tests_client_cert.pem'))
SSL_KEY = os.path.abspath(os.path.join(SSL_DIR, 'tests_client_key.pem'))
TEST_BUILD_DIR = None
MYSQL_CAPI = None
DJANGO_VERSION = None
FABRIC_CONFIG = None
__all__ = [
'MySQLConnectorTests',
'MySQLxTests',
'get_test_names', 'printmsg',
'LOGGER_NAME',
'DummySocket',
'SSL_DIR',
'get_test_modules',
'MESSAGES',
'setup_logger',
'install_connector',
'TEST_BUILD_DIR',
]
class DummySocket(object):
"""Dummy socket class
This class helps to test socket connection without actually making any
network activity. It is a proxy class using socket.socket.
"""
def __init__(self, *args):
self._socket = socket.socket(*args)
self._server_replies = bytearray(b'')
self._client_sends = []
self._raise_socket_error = 0
def __getattr__(self, attr):
return getattr(self._socket, attr)
def raise_socket_error(self, err=errno.EPERM):
self._raise_socket_error = err
def recv(self, bufsize=4096, flags=0):
if self._raise_socket_error:
raise socket.error(self._raise_socket_error)
res = self._server_replies[0:bufsize]
self._server_replies = self._server_replies[bufsize:]
return res
def recv_into(self, buffer_, nbytes=0, flags=0):
if self._raise_socket_error:
raise socket.error(self._raise_socket_error)
if nbytes == 0:
nbytes = len(buffer_)
try:
buffer_[0:nbytes] = self._server_replies[0:nbytes]
except (IndexError, TypeError) as err:
return 0
except ValueError:
pass
self._server_replies = self._server_replies[nbytes:]
return len(buffer_)
def send(self, string, flags=0):
if self._raise_socket_error:
raise socket.error(self._raise_socket_error)
self._client_sends.append(bytearray(string))
return len(string)
def sendall(self, string, flags=0):
self._client_sends.append(bytearray(string))
return None
def add_packet(self, packet):
self._server_replies += packet
def add_packets(self, packets):
for packet in packets:
self._server_replies += packet
def reset(self):
self._raise_socket_error = 0
self._server_replies = bytearray(b'')
self._client_sends = []
def get_address(self):
return 'dummy'
def get_test_modules():
"""Get list of Python modules containing tests
This function scans the tests/ folder for Python modules which name
start with 'test_'. It will return the dotted name of the module with
submodules together with the first line of the doc string found in
the module.
The result is a sorted list of tuples and each tuple is
(name, module_dotted_path, description)
For example:
('cext_connection', 'tests.cext.cext_connection', 'This module..')
Returns a list of tuples.
"""
global _CACHED_TESTCASES
if _CACHED_TESTCASES:
return _CACHED_TESTCASES
testcases = []
pattern = re.compile('.*test_(.*)')
for finder, name, is_pkg in walk_packages(__path__, prefix=__name__+'.'):
if ('.test_' not in name or
('django' in name and not DJANGO_VERSION) or
('fabric' in name and not FABRIC_CONFIG) or
('cext' in name and not MYSQL_CAPI)):
continue
module_path = os.path.join(finder.path, name.split('.')[-1] + '.py')
dsc = '(description not available)'
try:
mod = load_source(name, module_path)
except IOError as exc:
# Not Python source files
continue
except ImportError as exc:
check_c_extension(exc)
else:
try:
dsc = mod.__doc__.splitlines()[0]
except AttributeError:
# No description available
pass
testcases.append((pattern.match(name).group(1), name, dsc))
testcases.sort(key=lambda x: x[0], reverse=False)
# 'Unimport' modules so they can be correctly imported when tests run
for _, module, _ in testcases:
sys.modules.pop(module, None)
_CACHED_TESTCASES = testcases
return testcases
def get_test_names():
"""Get test names
This functions gets the names of Python modules containing tests. The
name is parsed from files prefixed with 'test_'. For example,
'test_cursor.py' has name 'cursor'.
Returns a list of strings.
"""
pattern = re.compile('.*test_(.*)')
return [mod[0] for mod in get_test_modules()]
def set_nr_mysql_servers(number):
"""Set the number of MySQL servers needed
This functions sets how much MySQL servers are needed for running the
unit tests. The number argument should be a integer between 1 and
16 (16 being the hard limit).
The set_nr_mysql_servers() function is used in test modules, usually at
the very top (after imports).
Raises AttributeError on errors.
"""
global MYSQL_SERVERS_NEEDED # pylint: disable=W0603
if not isinstance(number, int) or (number < 1 or number > 16):
raise AttributeError(
"number of MySQL servers should be a value between 1 and 16")
if number > MYSQL_SERVERS_NEEDED:
MYSQL_SERVERS_NEEDED = number
def fake_hostname():
"""Return a fake hostname
This function returns a string which can be used in the creation of
fake hostname. Note that we do not add a domain name.
Returns a string.
"""
if PY2:
return ''.join(["%02x" % ord(c) for c in os.urandom(4)])
else:
return ''.join(["%02x" % c for c in os.urandom(4)])
def get_mysqlx_config(name=None, index=None):
"""Get MySQLx enabled server configuration for running MySQL server
If no name is given, then we will return the configuration of the
first added.
"""
if not name and not index:
return MYSQL_SERVERS[0].xplugin_config.copy()
if name:
for server in MYSQL_SERVERS:
if server.name == name:
return server.xplugin_config.copy()
elif index:
return MYSQL_SERVERS[index].xplugin_config.copy()
return None
def get_mysql_config(name=None, index=None):
"""Get MySQL server configuration for running MySQL server
If no name is given, then we will return the configuration of the
first added.
"""
if not name and not index:
return MYSQL_SERVERS[0].client_config.copy()
if name:
for server in MYSQL_SERVERS:
if server.name == name:
return server.client_config.copy()
elif index:
return MYSQL_SERVERS[index].client_config.copy()
return None
def have_engine(cnx, engine):
"""Check support for given storage engine
This function checks if the MySQL server accessed through cnx has
support for the storage engine.
Returns True or False.
"""
have = False
engine = engine.lower()
cur = cnx.cursor()
# Should use INFORMATION_SCHEMA, but play nice with v4.1
cur.execute("SHOW ENGINES")
rows = cur.fetchall()
for row in rows:
if row[0].lower() == engine:
if row[1].lower() == 'yes':
have = True
break
cur.close()
return have
def cmp_result(result1, result2):
"""Compare results (list of tuples) coming from MySQL
For certain results, like SHOW VARIABLES or SHOW WARNINGS, the
order is unpredictable. To check if what is expected in the
tests, we need to compare each row.
Returns True or False.
"""
try:
if len(result1) != len(result2):
return False
for row in result1:
if row not in result2:
return False
except:
return False
return True
class UTCTimeZone(datetime.tzinfo):
"""UTC"""
def __init__(self):
pass
def utcoffset(self, dt):
return datetime.timedelta(0)
def dst(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return 'UTC'
class TestTimeZone(datetime.tzinfo):
"""Test time zone"""
def __init__(self, hours=0):
self._offset = datetime.timedelta(hours=hours)
def utcoffset(self, dt):
return self._offset
def dst(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return 'TestZone'
def cnx_config(**extra_config):
def _cnx_config(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
if not hasattr(self, 'config'):
self.config = get_mysql_config()
if extra_config:
for key, value in extra_config.items():
self.config[key] = value
func(self, *args, **kwargs)
return wrapper
return _cnx_config
def foreach_cnx(*cnx_classes, **extra_config):
def _use_cnx(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
if not hasattr(self, 'config'):
self.config = get_mysql_config()
if extra_config:
for key, value in extra_config.items():
self.config[key] = value
for cnx_class in cnx_classes or self.all_cnx_classes:
try:
self.cnx = cnx_class(**self.config)
self._testMethodName = "{0} (using {1})".format(
func.__name__, cnx_class.__name__)
except Exception as exc:
if hasattr(self, 'cnx'):
# We will rollback/close later
pass
else:
traceback.print_exc(file=sys.stdout)
raise exc
try:
func(self, *args, **kwargs)
except Exception as exc:
traceback.print_exc(file=sys.stdout)
raise exc
finally:
try:
self.cnx.rollback()
self.cnx.close()
except:
# Might already be closed.
pass
return wrapper
return _use_cnx
class MySQLConnectorTests(unittest.TestCase):
def __init__(self, methodName='runTest'):
from mysql.connector import connection
self.all_cnx_classes = [connection.MySQLConnection]
self.maxDiff = 64
try:
import _mysql_connector
from mysql.connector import connection_cext
except ImportError:
self.have_cext = False
else:
self.have_cext = True
self.all_cnx_classes.append(connection_cext.CMySQLConnection)
super(MySQLConnectorTests, self).__init__(methodName=methodName)
def __str__(self):
classname = strclass(self.__class__)
return "{classname}.{method}".format(
method=self._testMethodName,
classname=re.sub(r"tests\d*.test_", "", classname)
)
def check_attr(self, obj, attrname, default):
cls_name = obj.__class__.__name__
self.assertTrue(
hasattr(obj, attrname),
"{name} object has no '{attr}' attribute".format(
name=cls_name, attr=attrname))
self.assertEqual(
default,
getattr(obj, attrname),
"{name} object's '{attr}' should "
"default to {type_} '{default}'".format(
name=cls_name,
attr=attrname,
type_=type(default).__name__,
default=default))
def check_method(self, obj, method):
cls_name = obj.__class__.__name__
self.assertTrue(
hasattr(obj, method),
"{0} object has no '{1}' method".format(cls_name, method))
self.assertTrue(
inspect.ismethod(getattr(obj, method)),
"{0} object defines {1}, but is not a method".format(
cls_name, method))
def check_args(self, function, supported_arguments):
argspec = inspect.getargspec(function)
function_arguments = dict(zip(argspec[0][1:], argspec[3]))
for argument, default in function_arguments.items():
try:
self.assertEqual(
supported_arguments[argument],
default,
msg="Argument '{0}' has wrong default".format(argument))
except KeyError:
self.fail("Found unsupported or new argument '%s'" % argument)
for argument, default in supported_arguments.items():
if not argument in function_arguments:
self.fail("Supported argument '{0}' fails".format(argument))
if sys.version_info[0:2] >= (3, 4):
def _addSkip(self, result, test_case, reason):
add_skip = getattr(result, 'addSkip', None)
if add_skip:
add_skip(test_case, self._testMethodName + ': ' + reason)
else:
def _addSkip(self, result, reason):
add_skip = getattr(result, 'addSkip', None)
if add_skip:
add_skip(self, self._testMethodName + ': ' + reason)
if sys.version_info[0:2] == (2, 6):
# Backport handy asserts from 2.7
def assertIsInstance(self, obj, cls, msg=None):
if not isinstance(obj, cls):
msg = "{0} is not an instance of {1}".format(
unittest.util.safe_repr(obj), unittest.util.repr(cls))
self.fail(self._formatMessage(msg, msg))
def assertGreater(self, a, b, msg=None):
if not a > b:
msg = "{0} not greater than {1}".format(
unittest.util.safe_repr(a), unittest.util.safe_repr(b))
self.fail(self._formatMessage(msg, msg))
def run(self, result=None):
if sys.version_info[0:2] == (2, 6):
test_method = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(test_method, "__unittest_skip__", False)):
# We skipped a class
try:
why = (
getattr(self.__class__, '__unittest_skip_why__', '')
or
getattr(test_method, '__unittest_skip_why__', '')
)
self._addSkip(result, why)
finally:
result.stopTest(self)
return
if PY2:
return super(MySQLConnectorTests, self).run(result)
else:
return super().run(result)
def check_namedtuple(self, tocheck, attrs):
for attr in attrs:
try:
getattr(tocheck, attr)
except AttributeError:
self.fail("Attribute '{0}' not part of namedtuple {1}".format(
attr, tocheck))
class TestsCursor(MySQLConnectorTests):
def _test_execute_setup(self, cnx, tbl="myconnpy_cursor", engine="MyISAM"):
self._test_execute_cleanup(cnx, tbl)
stmt_create = (
"CREATE TABLE {table} "
"(col1 INT, col2 VARCHAR(30), PRIMARY KEY (col1))"
"ENGINE={engine}").format(
table=tbl, engine=engine)
try:
cur = cnx.cursor()
cur.execute(stmt_create)
except Exception as err: # pylint: disable=W0703
self.fail("Failed setting up test table; {0}".format(err))
cur.close()
def _test_execute_cleanup(self, cnx, tbl="myconnpy_cursor"):
stmt_drop = "DROP TABLE IF EXISTS {table}".format(table=tbl)
try:
cur = cnx.cursor()
cur.execute(stmt_drop)
except Exception as err: # pylint: disable=W0703
self.fail("Failed cleaning up test table; {0}".format(err))
cur.close()
class CMySQLConnectorTests(MySQLConnectorTests):
def connc_connect_args(self, recache=False):
"""Get connection arguments for the MySQL C API
Get the connection arguments suitable for the MySQL C API
from the Connector/Python arguments. This method sets the member
variable connc_kwargs as well as returning a copy of connc_kwargs.
If recache is True, the information stored in connc_kwargs will
be refreshed.
:return: Dictionary containing connection arguments.
:rtype: dict
"""
self.config = get_mysql_config().copy()
if not self.hasattr('connc_kwargs') or recache is True:
connect_args = [
"host", "user", "password", "database",
"port", "unix_socket", "client_flags"
]
self.connc_kwargs = {}
for key, value in self.config.items():
if key in connect_args:
self.connect_kwargs[key] = value
return self.connc_kwargs.copy()
class CMySQLCursorTests(CMySQLConnectorTests):
_cleanup_tables = []
def setUp(self):
self.config = get_mysql_config()
# Import here allowed
from mysql.connector.connection_cext import CMySQLConnection
self.cnx = CMySQLConnection(**self.config)
def tearDown(self):
self.cleanup_tables(self.cnx)
self.cnx.close()
def setup_table(self, cnx, tbl="myconnpy_cursor", engine="InnoDB"):
self.cleanup_table(cnx, tbl)
stmt_create = (
"CREATE TABLE {table} "
"(col1 INT AUTO_INCREMENT, "
"col2 VARCHAR(30), "
"col3 INT NOT NULL DEFAULT 0, "
"PRIMARY KEY (col1))"
"ENGINE={engine}").format(
table=tbl, engine=engine)
try:
cnx.cmd_query(stmt_create)
except Exception as err: # pylint: disable=W0703
cnx.rollback()
self.fail("Failed setting up test table; {0}".format(err))
else:
cnx.commit()
self._cleanup_tables.append(tbl)
def cleanup_table(self, cnx, tbl="myconnpy_cursor"):
stmt_drop = "DROP TABLE IF EXISTS {table}".format(table=tbl)
# Explicit rollback: uncommited changes could otherwise block
cnx.rollback()
try:
cnx.cmd_query(stmt_drop)
except Exception as err: # pylint: disable=W0703
self.fail("Failed cleaning up test table; {0}".format(err))
if tbl in self._cleanup_tables:
self._cleanup_tables.remove(tbl)
def cleanup_tables(self, cnx):
for tbl in self._cleanup_tables:
self.cleanup_table(cnx, tbl)
class MySQLxTests(MySQLConnectorTests):
def __init__(self, methodName="runTest"):
super(MySQLxTests, self).__init__(methodName=methodName)
def run(self, result=None):
if sys.version_info[0:2] == (2, 6):
test_method = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(test_method, "__unittest_skip__", False)):
# We skipped a class
try:
why = (
getattr(self.__class__, '__unittest_skip_why__', '')
or
getattr(test_method, '__unittest_skip_why__', '')
)
self._addSkip(result, why)
finally:
result.stopTest(self)
return
if PY2:
return super(MySQLxTests, self).run(result)
else:
return super().run(result)
def printmsg(msg=None):
if msg is not None:
print(msg)
class SkipTest(Exception):
"""Exception compatible with SkipTest of Python v2.7 and later"""
def _id(obj):
"""Function defined in unittest.case which is needed for decorators"""
return obj
def test_skip(reason):
"""Skip test
This decorator is used by Python v2.6 code to keep compatible with
Python v2.7 (and later) unittest.skip.
"""
def decorator(test):
if not isinstance(test, (type, types.ClassType)):
@wraps(test)
def wrapper(*args, **kwargs):
raise SkipTest(reason)
test = wrapper
test.__unittest_skip__ = True
test.__unittest_skip_why__ = reason
return test
return decorator
def test_skip_if(condition, reason):
"""Skip test if condition is true
This decorator is used by Python v2.6 code to keep compatible with
Python v2.7 (and later) unittest.skipIf.
"""
if condition:
return test_skip(reason)
return _id
def setup_logger(logger, debug=False, logfile=None):
"""Setting up the logger"""
formatter = logging.Formatter(
"%(asctime)s [%(name)s:%(levelname)s] %(message)s")
handler = None
if logfile:
handler = logging.FileHandler(logfile)
else:
handler = logging.StreamHandler()
handler.setFormatter(formatter)
if debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
LOGGER.handlers = [] # We only need one handler
LOGGER.addHandler(handler)
def install_connector(root_dir, install_dir, connc_location=None):
"""Install Connector/Python in working directory
"""
logfile = 'myconnpy_install.log'
LOGGER.info("Installing Connector/Python in {0}".format(install_dir))
try:
# clean up previous run
if os.path.exists(logfile):
os.unlink(logfile)
shutil.rmtree(install_dir)
except OSError:
pass
cmd = [
sys.executable,
'setup.py',
'clean', '--all', # necessary for removing the build/
]
cmd.extend([
'install',
'--root', install_dir,
'--install-lib', '.'
])
if connc_location:
cmd.extend(['--static', '--with-mysql-capi', connc_location])
prc = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stderr=subprocess.STDOUT, stdout=subprocess.PIPE,
cwd=root_dir)
stdout = prc.communicate()[0]
if prc.returncode is not 0:
with open(logfile, 'wb') as logfp:
logfp.write(stdout)
LOGGER.error("Failed installing Connector/Python, see {log}".format(
log=logfile))
sys.exit(1)
def check_c_extension(exc=None):
"""Check whether we can load the C Extension
This function needs the location of the mysql_config tool to
figure out the location of the MySQL Connector/C libraries. On
Windows it would be the installation location of Connector/C.
:param mysql_config: Location of the mysql_config tool
:param exc: An ImportError exception
"""
if not MYSQL_CAPI:
return
if platform.system() == "Darwin":
libpath_var = 'DYLD_LIBRARY_PATH'
elif platform.system() == "Windows":
libpath_var = 'PATH'
else:
libpath_var = 'LD_LIBRARY_PATH'
if not os.path.exists(MYSQL_CAPI):
LOGGER.error("MySQL Connector/C not available using '%s'", MYSQL_CAPI)
if os.name == 'posix':
if os.path.isdir(MYSQL_CAPI):
mysql_config = os.path.join(MYSQL_CAPI, 'bin', 'mysql_config')
else:
mysql_config = MYSQL_CAPI
lib_dir = get_mysql_config_info(mysql_config)['lib_dir']
elif os.path.isdir(MYSQL_CAPI):
lib_dir = os.path.join(MYSQL_CAPI, 'lib')
else:
LOGGER.error("C Extension not supported on %s", os.name)
sys.exit(1)
error_msg = ''
if not exc:
try:
import _mysql_connector
except ImportError as exc:
error_msg = str(exc).strip()
else:
assert(isinstance(exc, ImportError))
error_msg = str(exc).strip()
if not error_msg:
# Nothing to do
return
match = re.match('.*Library not loaded:\s(.+)\n.*', error_msg)
if match:
lib_name = match.group(1)
LOGGER.error(
"MySQL Client library not loaded. Make sure the shared library "
"'%s' can be loaded by Python. Tip: Add folder '%s' to "
"environment variable '%s'.",
lib_name, lib_dir, libpath_var)
sys.exit(1)
else:
LOGGER.error("C Extension not available: %s", error_msg)
sys.exit(1)
| 30.825221 | 79 | 0.608196 |
2d8554e5040a14df82126771ba99a4a0230423a5 | 462 | py | Python | core/base/generator.py | nanato12/private-line-bot | 862ccdb7ed1aa0299d94927b7207133842732fa4 | [
"MIT"
] | 4 | 2021-06-13T20:20:23.000Z | 2022-03-23T01:25:30.000Z | core/base/generator.py | nanato12/private-line-bot | 862ccdb7ed1aa0299d94927b7207133842732fa4 | [
"MIT"
] | null | null | null | core/base/generator.py | nanato12/private-line-bot | 862ccdb7ed1aa0299d94927b7207133842732fa4 | [
"MIT"
] | null | null | null | from abc import ABCMeta, abstractmethod
from ast import Constant, Expr
class BaseGenerator(metaclass=ABCMeta):
ast_body: list
def _ast_add_license(self) -> None:
"""append Expr to ast body"""
with open("LICENSE") as license_file:
self.ast_body.append(
Expr(value=Constant(value=f"\n{license_file.read()}")),
)
@abstractmethod
def generate(self) -> None:
"""generate py file"""
| 24.315789 | 71 | 0.616883 |
9f1a8c668377131709916d202aba9773029de2f1 | 1,423 | py | Python | 301/removeinvalidparentheses.py | cccccccccccccc/Myleetcode | fb3fa6df7c77feb2d252feea7f3507569e057c70 | [
"Apache-2.0"
] | null | null | null | 301/removeinvalidparentheses.py | cccccccccccccc/Myleetcode | fb3fa6df7c77feb2d252feea7f3507569e057c70 | [
"Apache-2.0"
] | null | null | null | 301/removeinvalidparentheses.py | cccccccccccccc/Myleetcode | fb3fa6df7c77feb2d252feea7f3507569e057c70 | [
"Apache-2.0"
] | null | null | null | from typing import List
class Solution:
def isValid(self, s: str) ->bool:
count = 0
for i in s:
if i == '(':
count+=1
if i == ')':
count -=1
if count< 0:
return False
return count == 0
def dfs(self,s: str,start: int,l: int,r: int,ans: List[str]):
if l == 0 and r == 0:
if self.isValid(s):
ans.append(s)
return
for i in range(start,len(s)):
if i != start and s[i] == s[i-1]:
continue
if s[i] == ')' and r>0:
tmpstr = s[0:i]+s[i+1:]
self.dfs(tmpstr,i,l,r-1,ans)
if s[i] == '(' and r == 0 and l > 0:
tmpstr = s[0:i]+s[i+1:]
self.dfs(tmpstr,i,l-1,r,ans)
return
def removeInvalidParentheses(self, s: str) -> List[str]:
ans = []
l = 0
r = 0
start = 0
if self.isValid(s) == True:
ans.append(s)
return ans
for i in s:
if i == '(':
l +=1
if i == ')':
if l == 0:
r+=1
else:
l-=1
self.dfs(s,start,l,r,ans)
return ans
A = Solution()
a = "()())()"
b=")("
c= "x("
print(A.removeInvalidParentheses(c)) | 27.901961 | 65 | 0.367533 |
5602f140263b47b43c11fc9847b020ed726de216 | 3,706 | py | Python | experiments/Sample_Conv_Expt.py | BethanyL/PDEKoopman2 | 0006a312c3dae21e50aa55c7a0b2855b399eb66a | [
"MIT"
] | 10 | 2020-11-03T18:40:18.000Z | 2022-03-14T20:16:08.000Z | experiments/Sample_Conv_Expt.py | BethanyL/PDEKoopman2 | 0006a312c3dae21e50aa55c7a0b2855b399eb66a | [
"MIT"
] | null | null | null | experiments/Sample_Conv_Expt.py | BethanyL/PDEKoopman2 | 0006a312c3dae21e50aa55c7a0b2855b399eb66a | [
"MIT"
] | 4 | 2021-05-27T16:30:23.000Z | 2022-03-14T20:16:09.000Z | """Sample experiment file using convolutional layers for autoencoder."""
import random as r
import sys
from tensorflow import keras
from tensorflow.keras.regularizers import l1_l2
from tensorflow.keras.activations import relu
from utils import run_experiment, getdatasize
# Add the architecture path for the DenseResBlock and rel_mse
sys.path.append("../architecture/")
from ConvResBlock import ConvResBlock
from RelMSE import RelMSE
# Experiment name for saving results
expt_name = 'KS_Expt'
# Prefix of training/validation files - use relative path from this file
data_file_prefix = '../data/KS_Eqn'
# Set size of latent space, and retrieve the size of the data
n_latent = 21
_, len_time, n_inputs = getdatasize(data_file_prefix)
# Set other parameters
data_train_len = 20 # Number of training data files
L_diag = False # Whether the dynamics matrix is forced to be diagonal
num_shifts = 50 # Number of time steps to include in prediction loss
num_shifts_middle = 50 # Number of time steps to include in linearity loss
loss_weights = [1, 1, 1, 1, 1] # Weights of 5 loss functions
# Set up encoder and decoder configuration dict(s)
activation = relu
initializer = keras.initializers.VarianceScaling()
regularizer = l1_l2(0, 1e-8)
convlay_config = {'kernel_size': 4,
'strides': 1,
'padding': 'SAME',
'activation': activation,
'kernel_initializer': initializer,
'kernel_regularizer': regularizer}
poollay_config = {'pool_size': 2,
'strides': 2,
'padding': 'VALID'}
dense_config = {'activation': activation,
'kernel_initializer': initializer,
'kernel_regularizer': regularizer}
output_config = {'activation': None,
'kernel_initializer': initializer,
'kernel_regularizer': regularizer}
outer_config = {'n_inputs': n_inputs,
'num_filters': [8, 16, 32, 64],
'convlay_config': convlay_config,
'poollay_config': poollay_config,
'dense_config': dense_config,
'output_config': output_config}
inner_config = {'kernel_regularizer': regularizer}
# Set up network configuration dict
network_config = {'n_inputs': n_inputs,
'n_latent': n_latent,
'len_time': len_time,
'num_shifts': num_shifts,
'num_shifts_middle': num_shifts_middle,
'outer_encoder': ConvResBlock(**outer_config),
'outer_decoder': ConvResBlock(**outer_config),
'inner_config': inner_config,
'L_diag': L_diag}
# Aggregate all the training options in one dictionary
training_options = {'aec_only_epochs': 3,
'init_full_epochs': 15,
'best_model_epochs': 300,
'num_init_models': 20,
'loss_fn': RelMSE(),
'optimizer': keras.optimizers.Adam,
'optimizer_opts': {},
'batch_size': 32,
'data_train_len': data_train_len,
'loss_weights': loss_weights}
#
# Launch the Experiment
#
# Get a random number generator seed
random_seed = r.randint(0, 10**(10))
# Set the custom objects used in the model (for loading purposes)
custom_objs = {"RelMSE": RelMSE}
# And run the experiment!
run_experiment(random_seed=random_seed,
expt_name=expt_name,
data_file_prefix=data_file_prefix,
training_options=training_options,
network_config=network_config,
custom_objects=custom_objs)
| 34.962264 | 75 | 0.635186 |
490037e9fdeb1cbbbb9ca29a65c6fe940f45ccad | 4,552 | py | Python | parcellearning/gauss/gauss.py | kristianeschenburg/parcellearning | 93811f7d11c1c5583d8f541c7629dbbaa1785304 | [
"BSD-3-Clause"
] | 6 | 2017-07-03T23:11:29.000Z | 2022-01-04T16:41:57.000Z | parcellearning/gauss/gauss.py | kristianeschenburg/parcellearning | 93811f7d11c1c5583d8f541c7629dbbaa1785304 | [
"BSD-3-Clause"
] | null | null | null | parcellearning/gauss/gauss.py | kristianeschenburg/parcellearning | 93811f7d11c1c5583d8f541c7629dbbaa1785304 | [
"BSD-3-Clause"
] | 1 | 2018-04-24T18:01:19.000Z | 2018-04-24T18:01:19.000Z | from parcellearning.conv.gausconv import GAUSConv
import numpy as np
import dgl
from dgl import data
from dgl.data import DGLDataset
import dgl.function as fn
from dgl.nn.pytorch import edge_softmax
import torch
import torch.nn as nn
import torch.nn.functional as F
class GAUSS(nn.Module):
"""
Instantiate a Graph Attention Network model.
Parameters:
- - - - -
num_layers: int
number of layers in network
in_dim: int
input feature dimension
num_hidden: int
number of nodes per hidden layer
num_classes: int
number of output classes
num_kernels: list of length (2)
number of independent kernels per layer (multi-kernel mechanisms)
num_kernels[0] = hidden kernels
num_kernels[1] = output output kernels
feat_drop: float
layer-wise dropout rate [0,1]
krnl_drop: float
mechanism-wise kernel dropout rate [0,1]
negative_slope:
negative slope of leaky ReLU
"""
def __init__(self,
in_dim,
num_layers,
num_hidden,
num_classes,
num_kernels,
activation,
feat_drop,
krnl_drop,
negative_slope,
allow_zero_in_degree=True):
super(GAUSS, self).__init__()
if feat_drop:
assert min(
0, feat_drop) >= 0, 'Dropout rate must be greater or equal to 0'
assert max(1, feat_drop) == 1, 'Dropout rate must be less than 1'
if krnl_drop:
assert min(
0, krnl_drop) >= 0, 'Dropout rate must be greater or equal to 0'
assert max(1, krnl_drop) == 1, 'Dropout rate must be less than 1'
self.num_layers = num_layers
self.num_kernels = num_kernels[0]
self.num_out_kernels = num_kernels[-1]
self.gauss_layers = nn.ModuleList()
self.activation = activation
# input layer
self.gauss_layers.append(GAUSConv(in_feats=in_dim,
out_feats=num_hidden,
num_kernels=self.num_kernels,
feat_drop=feat_drop,
krnl_drop=krnl_drop,
negative_slope=negative_slope,
activation=self.activation,
allow_zero_in_degree=allow_zero_in_degree))
# hidden layers
for l in range(1, num_layers):
# due to multi-head, the in_dim = num_hidden * num_heads
self.gauss_layers.append(GAUSConv(in_feats=num_hidden*self.num_kernels,
out_feats=num_hidden,
num_kernels=self.num_kernels,
feat_drop=feat_drop,
krnl_drop=krnl_drop,
negative_slope=negative_slope,
activation=self.activation,
allow_zero_in_degree=allow_zero_in_degree))
# output projection
self.gauss_layers.append(GAUSConv(in_feats=num_hidden*self.num_kernels,
out_feats=num_classes,
num_kernels=self.num_out_kernels,
feat_drop=feat_drop,
krnl_drop=krnl_drop,
negative_slope=negative_slope,
activation=None,
allow_zero_in_degree=allow_zero_in_degree))
def forward(self, g=None, inputs=None, **kwds):
"""
Parameters:
- - - - -
g: DGL Graph
the graph
inputs: tensor
node features
Returns:
- - - - -
logits: tensor
output layer
"""
h = inputs
for l in range(self.num_layers):
h = self.gauss_layers[l](g, h)
h = h.flatten(1)
# output projection
logits = self.gauss_layers[-1](g,h).mean(1)
return logits
def save(self, filename):
"""
"""
torch.save(self.state_dict(), filename)
| 32.985507 | 89 | 0.496485 |
26c2d54867a46ea3ed773162912b6ef5fb627f17 | 1,978 | py | Python | my_school/my_school/wsgi.py | coderfromanotherearth/School-GIS | 5675972f8dca8f28172da0cdfc3824a87b33d831 | [
"MIT"
] | null | null | null | my_school/my_school/wsgi.py | coderfromanotherearth/School-GIS | 5675972f8dca8f28172da0cdfc3824a87b33d831 | [
"MIT"
] | 1 | 2020-06-05T22:36:02.000Z | 2020-06-05T22:36:02.000Z | my_school/my_school/wsgi.py | joelabrahamkeerickal/School-GIS | 5675972f8dca8f28172da0cdfc3824a87b33d831 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2017 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
"""
WSGI config for my_school project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "my_school.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| 40.367347 | 79 | 0.725986 |
cab4821738bf9a5214d40a91b01d0a0bef67b876 | 38,840 | py | Python | GUI/Mserial.py | manatee-fluidics/Manatee | 7a1da6393589bde69745c89b47853bd4259333f9 | [
"Apache-2.0"
] | null | null | null | GUI/Mserial.py | manatee-fluidics/Manatee | 7a1da6393589bde69745c89b47853bd4259333f9 | [
"Apache-2.0"
] | null | null | null | GUI/Mserial.py | manatee-fluidics/Manatee | 7a1da6393589bde69745c89b47853bd4259333f9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 07 20:57:35 2017
@author: User
"""
from math import sqrt, exp
import struct
import serial
import time
from math import log
import numpy as np
import multiprocessing as mp
import queue
from threading import Thread
from threading import Event as tEvent
"""
overwrite settings:
settings = {}
pumps = [{"pos" : 0,
"target" : 200,
"speed" : 1.0,
"time" : 60,
"volume" : 1.0,
"max_pos" : 9000,
"enabled" : True},
{"pos" : 0,
"target" : 200,
"speed" : 1.0,
"time" : 60,
"volume" : 1.0,
"max_pos" : 9000,
"enabled" : True},
{"pos" : 0,
"target" : 200,
"speed" : 1.0,
"time" : 60,
"volume" : 1.0,
"max_pos" : 9000,
"enabled" : True},
{"pos" : 0,
"target" : 200,
"speed" : 1.0,
"time" : 60,
"volume" : 1.0,
"max_pos" : 9000,
"enabled" : True},
{"pos" : 0,
"target" : 200,
"speed" : 1.0,
"time" : 60,
"volume" : 1.0,
"max_pos" : 9000,
"enabled" : True}
]
settings["pumps"] = pumps
settings["port"] = "COM9"
settings["baud"] = "250000"
pickle.dump( settings, open( "save.p", "wb" ) )
queue_in = mp.Queue()
queue_out = mp.Queue()
M_serial = M_serial(queue_in, queue_out)
M_serial.connect("COM10", 250000)
M_serial.disconnect()
M_serial.set_alarm_value(0, 1, 5)
M_serial.activate_alarm(0,1,1)
M_serial.sound_alarm (0, 1)
M_serial.sound_alarm (0, 0)
M_serial.home(0)
M_serial.set_speed(0,2.5)
M_serial.set_position_rel(0,20)
M_serial.set_position_abs(0,30)
M_serial.set_target(0,10)
M_serial.regulate(0,1)
M_serial.wait_volume(0,5000)
M_serial.regulate(0,0)
M_serial.wait_time(0,50)
M_serial.regulate(0,0)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000011111,0b000000001]) #channel adress (sensor program), slave address, payload
M_serial.send_to_slave(0,8,0x21,[0,0,0]) #channel adress (sensor program), slave address, payload
M_serial.send_to_slave(0,8,[0,0,0b01000001])
M_serial.get_pressure(0)
M_serial.get_position(0)
M_serial.get_speed(0)
M_serial.commands_buffered(0)
M_serial.online(1)
M_serial.set_target(0, 50)
M_serial.start_cycle(0, 0)
M_serial.home(0)
M_serial.regulate(0,1)
M_serial.wait_time(0, 3)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000010000])
M_serial.wait_time(0, 3)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000000000])
M_serial.regulate(0,0)
M_serial.wait_time(0, 3600)
M_serial.stop_cycle(0)
M_serial.online(0)
M_serial.get_position(0)
M_serial.commands_buffered(0)
fixing (1-pfa 2-triton 5-pbs)
M_serial.upload(1)
M_serial.set_target(0, 50)
M_serial.home(0)
M_serial.regulate(0,1)
M_serial.wait_time(0, 60)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000010000])
M_serial.wait_time(0, 120) #60
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000000001])
M_serial.wait_time(0, 120) #60
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000000000])
M_serial.home(0)
M_serial.wait_time(0, 30*60) #30*60
M_serial.regulate(0,1)
M_serial.wait_time(0, 60)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000010000])
M_serial.wait_time(0, 120) #60
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000000010])
M_serial.wait_time(0, 120) #60
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000000000])
M_serial.home(0)
M_serial.wait_time(0, 15*60) #15*60
M_serial.regulate(0,1)
M_serial.wait_time(0, 60)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000010000])
M_serial.wait_time(0, 120) #60
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000000000])
M_serial.home(0)
M_serial.upload(0)
M_serial.upload(0)
M_serial.home(0)
M_serial.set_target(0, 50)
M_serial.regulate(0,1)
M_serial.wait_time(0, 30)
M_serial.start_cycle(0, 0)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000011111,0b000000001]) #channel adress (sensor program), slave address, payload
M_serial.wait_time(0, 30)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000010,0b0000000010])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000001,0b0000000001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000100,0b0000000100])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000001,0b0000000001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000001000,0b0000001000])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000001,0b0000000001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000010000,0b0000000010])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000001,0b0000000001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000010,0b0000000100])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000001,0b0000000001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000100,0b0000001000])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000001,0b0000000001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000001000,0b0000000010])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000001,0b0000000001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000010000,0b0000000100])
M_serial.wait_time(0, 1)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000001,0b0000000001])
M_serial.wait_time(0, 30)
M_serial.stop_cycle(0)
M_serial.upload(1)
M_serial.upload(0)
M_serial.eeprom(1)
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000000001])
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000000010])
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000000100])
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000001000])
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000010000])
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000000000])
M_serial.send_to_slave(0,8,0x21,[0b00000000,0b000000000,0b0000011111])
M_serial.set_target(0, 50)
M_serial.regulate(0,1)
M_serial.regulate(0,0)
M_serial.home(0)
M_serial.get_position(0)
M_serial.upload(1)
M_serial.home(0)
M_serial.set_target(0, 10)
M_serial.regulate(0,1)
M_serial.wait_time(0, 3)
M_serial.start_cycle(0, 3)
M_serial.send_to_slave(0,8,[0,0,0b01000001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,[0,0,0b01110001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,[0,0,0b01110000])
M_serial.wait_time(0, 3)
M_serial.send_to_slave(0,8,[0,0,0b00000000])
M_serial.wait_time(0, 4)
M_serial.send_to_slave(0,8,[0,0,0b00010010])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,[0,0,0b00010000])
M_serial.wait_time(0, 3)
M_serial.send_to_slave(0,8,[0,0,0b01000001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,[0,0,0b00000000])
M_serial.wait_time(0, 3)
M_serial.send_to_slave(0,8,[0,0,0b00100100])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,[0,0,0b00100000])
M_serial.wait_time(0, 3)
M_serial.send_to_slave(0,8,[0,0,0b01000001])
M_serial.wait_time(0, 10)
M_serial.send_to_slave(0,8,[0,0,0b00000000])
M_serial.wait_time(0, 3)
M_serial.stop_cycle(0)
M_serial.upload(0)
M_serial.send_to_slave(0,8,0x21,[0,0,0b01110001])
M_serial.change_setting(0, 0, 0.1)
M_serial.change_setting(0, 1, 0.0001)
M_serial.change_setting(0, 2, 0.0001)
M_serial.change_setting(0, 3, 4000)
M_serial.change_setting(0, 4, 1)
M_serial.change_setting(0, 4, 0.369836)
M_serial.change_setting(0, 5, 33.28524)
M_serial.change_setting(0, 6, 2.5)
M_serial.change_setting(4, 7, 6) #linking
M_serial.change_setting(4, 8, 0.018)
M_serial.change_setting(4, 9, 0.04)
M_serial.change_setting(4, 10, 0)
M_serial.change_setting(0, 7, 1)
M_serial.change_setting(1, 7, 0)
M_serial.change_setting(2, 7, 0)
M_serial.change_setting(3, 7, 0)
M_serial.change_setting(4, 7, 0)
M_serial.get_position(0)
M_serial.regulate(0,1)
M_serial.regulate(0)
M_serial.set_position(2, 70000)
M_serial.home()
M_serial.set_target(0,10)
M_serial.set_valve(1,1)
M_serial.set_valve(1,0)
M_serial.wait(10)
M_serial.get_pressure(0b00000001)
M_serial.clear_buffer()
M_serial.upload(0)
M_serial.set_target(0, 32.14532452462)
M_serial.start_cycle(24)
M_serial.home()
M_serial.regulate(1)
M_serial.wait(60)
M_serial.set_valve(2,1)
M_serial.wait(30)
M_serial.set_valve(2,0)
M_serial.regulate(0)
M_serial.wait(60*60*4)
M_serial.stop_cycle()
M_serial.upload(1)
M_serial.eeprom(1)
M_serial.eeprom(1)
M_serial.eeprom(0)
M_serial.upload(0)
M_serial.upload(1)
M_serial.clear_buffer()
M_serial.upload(0)
M_serial.set_target(0, 10)
M_serial.set_target(1, 20)
M_serial.regulate(1)
M_serial.wait(60)
M_serial.start_cycle(0)
M_serial.set_valve(1,1)
M_serial.wait(30)
M_serial.set_valve(1,0)
M_serial.wait(1)
M_serial.set_valve(2,1)
M_serial.wait(15)
M_serial.set_valve(2,0)
M_serial.wait(1)
M_serial.stop_cycle()
M_serial.upload(1)
M_serial.eeprom(1)
M_serial.upload(0)
M_serial.start_cycle(5)
M_serial.wait(5)
M_serial.set_valve(2,1)
M_serial.wait(5)
M_serial.set_valve(2,0)
M_serial.stop_cycle()
M_serial.upload(1)
M_serial.eeprom(1)
"""
"""
struct {[START_BYTE][START_BYTE][COMMAND][ADDRESS][PAYLOAD3][PAYLOAD2][PAYLOAD1][PAYLOAD0][CHECKSUM1][CHECKSUM0][END_BYTE]}
setTarget(pumpAddress, targetPressure) 0x10, Sets a pump group's desired regulation pressure in sensor value (0-1024). Example: setTarget(2, 512).
setIO(ioAddress, ioValue) 0x11, Sets an IO to be an input or output. Alias for input/output can also be 0/1, respectively. Example: setIO(2, input).
setPosition(pumpAddress, distance) 0x12, Sets a pump's desired position in steps.
getPosition(pumpAddress) 0x13, Returns a pump's current distance from home in steps. A pumpAddress of 5 gets all connected pumps' distances Example: getPosition(2)
startCalibrate(pumpAddress) 0x14, [Not implemented] Begins calibration process, which allows user to change the maximum distance away from home.
movePump(pumpAddress, distance) 0x15, Moves a pump by [distance] steps relative from current position. If the given distance will move the pump more than the maximum allowed distance, the command will not execute. A pumpAddress of 5 moves all connected pumps. Example movePump(3, 1000)
getPressure(pumpAddress) 0x16, Returns a pump's current pressure in sensor read (0-1024). A pumpAddress of 5 shows all connected pumps' pressures. Example: getPressure(2)
getIO(ioAddress) 0x17, Returns current state of an IO - whether it is an input or output, and if an output, whether it is triggered or not. An IOAddress of 5 returns all connected IOs' values. Example: getIO(2)
triggerIO(ioAddress) 0x18, If the IO at ioAddress is an output, commands that IO to send a 1 ms pulse. An ioAddress of 5 triggers all IO's set as outputs. Example: triggerIO(2)
homePump(pumpAddress) 0x19, Homes all pumps.
Regulate(state) 0x20, Start(state=1)/Stop(state=0) regulation of pumps. Example: Regulate(1)
setValve(valveAddress, valveState) 0x21, Manually opens or closes a valve. A pumpAddress of 5 opens/closes all valves. If a valve is already at the desired state, the command effectively does nothing. Alias for close/open can also be 0/1, respectively. Example: setValve(2, open)
setCoupling(couplingState) 0x22, [Not implemented] Manually opens or closes the coupling valve. If the valve is already at the desired state, the command effectively does nothing. Alias for close/open can also be 0/1, respectively. Example: setCoupling(close)
getFlags() 0x23, Returns current status flags in human-readable format. The flag bits, in order, are [reserved_bit1, reserved_bit2, reserved_bit3, is_busy, is_regulating, is_homing, trigger_sent, error]. Example: getFlags()
getError() 0x24, [Not implemented] Returns error code.
primePumps() 0x25, Primes the pumps.
startCycle() 0x26, Start a cycle
stopCycle() 0x27, Primes the pumps.
wait() 0x28, Primes the pumps.
eeprom(state) 0x29, Write commands to(state =1) or read form (state = 0) EEPROM for stand alone program.
upload(state) 0x30, Put controller into upload mode (1), so commands are not executed/(0) finish upload and write to EEPROM
clearBuffer() 0x31, Clear command buffer
setSpeed() 0x32, set motor speed for channel can be - for home position
moveConstant() 0x33, move channel pumps constantly to max or home. set_speed to control speed
readSetting(pumpAddress, settingAddress) 0x34,
changeSetting(pumpAddress, settingAddress, value) 0x35,
"""
class M_serial():
def __init__(self, MT_queue):
self.hex_codes = {
#message bytes
"START_BYTE" : b'\xaa',
"END_BYTE" : b'\x55',
#program commands
"setSpeed" : b'\x10',
"setTarget" : b'\x11',
"setPositionAbs" : b'\x12',
"setPositionRel" : b'\x13',
"setValve" : b'\x14',
"homePump" : b'\x15',
"regulate" : b'\x16',
"sendToSlave" : b'\x17',
"waitTime" : b'\x18',
"waitVol" : b'\x19',
"waitSlave" : b'\x20',
"startCycle" : b'\x21',
"stopCycle" : b'\x22',
"activateAlarm" : b'\x23',
"setAlarmValue" : b'\x24',
"setAlarmAction" : b'\x25',
"soundAlarm" : b'\x26',
#status commands
"upload" : b'\x40',
"online" : b'\x41',
"getPosition" : b'\x42',
"getSpeed" : b'\x43',
"getPressure" : b'\x44',
"clearBuffer" : b'\x45',
"readSettings" : b'\x46',
"changeSetting" : b'\x47',
"commandsBuffered" : b'\x48',
"reset" : b'\x49'} #add codes for responses?
self.ser = None
self.MT_queue = MT_queue
self.flags = [] #need to implement
self.pumps = []
self.serial_buffer = b''
self.machine_setting_link = { 0:"Kps",
1:"Kis",
2:"Kds",
3:"motor_calibs",
4:"volume_factors",
5:"max_steps",
6:"max_speeds",
7:"active",
8:"pressure_coeff_as",
9:"pressure_coeff_bs",
10:"sensor_units"}
self.settings = {}
for k in self.machine_setting_link.values():
self.settings[k] = [0] * 5
self.I2C = {}
for i in range(5):
self.pumps.append(pump(self, i))
#handle queue thread: deals with commands coming from other threads
self.stop_event_queue = tEvent()
self.handleThread = Thread(target = self.handle_queue, args=(self.stop_event_queue, ))
self.handleThread.start()
def connect(self, port, baud):
self.port = port
self.baud = baud
if self.port == 'Test':
self.ser = TestSer()
else:
self.ser = serial.Serial(
port=self.port,
baudrate=self.baud,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS,
timeout=None)
#self.stop_event_monitor = tEvent()
#self.monitorThread = Thread(target = self.monitor_thread, args=(self.stop_event_monitor, ))
#self.monitorThread.start()
#question does the firmware send a response?
def disconnect(self):
if self.ser != None:
self.ser.close()
#self.stop_event_monitor.set()
self.ser = None
def kill(self):
self.stop_event_queue.set()
def checksum(self, message):
"""calculates fletcher checksum from hex string and appends it with the end byte"""
sum1 = sum2 = 0
for v in message:
sum1 = (sum1 + v) % 255
sum2 = (sum2 + sum1) % 255
return message + bytes([sum2]) + bytes([sum1]) + self.hex_codes["END_BYTE"]
def send_serial(self, message):
if self.ser != None:
#print "sent to firmware: "
#print message
#print [bytes([ord(x)) for x in message]
self.ser.write(message)
#time.sleep(10)
else:
print ("Error, not connected!")
def send_serial_delayed(self, delay, messages):
if self.ser != None:
time.sleep(delay)
for message in messages:
self.ser.write(message)
else:
print ("Error, not connected!")
#Serial commands
def set_speed(self, address, target):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["setSpeed"] + bytes([address]) + struct.pack("f", target)
self.send_serial(self.checksum(message))
def set_target(self, address, target):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["setTarget"] + bytes([address]) + struct.pack("f", target)
self.send_serial(self.checksum(message))
def set_position_abs(self, address, target):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["setPositionAbs"] + bytes([address]) + struct.pack("f", target)
self.send_serial(self.checksum(message))
def set_position_rel(self, address, target):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["setPositionRel"] + bytes([address]) + struct.pack("f", target)
self.send_serial(self.checksum(message))
def set_valve(self, valvenumber, state):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["setValve"] + bytes([valvenumber]) + bytes([state]) + b'\x00\x00\x00'
self.send_serial(self.checksum(message))
def home(self, address, direction):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["homePump"] + bytes([address]) + bytes([direction]) + b'\x00\x00\x00'
self.send_serial(self.checksum(message))
def regulate(self, address, state):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["regulate"] + bytes([address]) + struct.pack("f", state)
self.send_serial(self.checksum(message))
def send_to_slave(self, sladdress, command, payload):
#address: first 5 bytes: slave address, last 3 bytes schedulder address
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["sendToSlave"] + bytes([sladdress]) + bytes([command]) + bytes([payload[2]]) + bytes([payload[1]]) + bytes([payload[0]])
self.send_serial(self.checksum(message))
def wait_time(self, seconds):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["waitTime"] + b'\x00' + struct.pack("f", seconds)
self.send_serial(self.checksum(message))
def wait_slave(self, sladdress, input_num):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["waitSlave"] + bytes([sladdress]) + bytes([input_num]) + b'\x00\x00\x00'
self.send_serial(self.checksum(message))
def wait_volume(self, address, volume):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["waitVol"] + bytes([address]) + struct.pack("f", volume)
self.send_serial(self.checksum(message))
def start_cycle(self, n_cycle):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["startCycle"] + b'\x00' + struct.pack("f", n_cycle)
self.send_serial(self.checksum(message))
def stop_cycle(self):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["stopCycle"] + b'\x00\x00\x00\x00\x00'
self.send_serial(self.checksum(message))
def activate_alarm(self, address, alarm_type, state):
#alarm types: 0 max pressure; 1 min pressure; 2 timeout; 3 max speed; 4 min speed
#address: first 5 bytes: alarm type, last 3 bytes schedulder address, state:1 active - 0:inactive
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["activateAlarm"] + bytes([8*alarm_type+address]) + bytes([state]) + b'\x00\x00\x00'
self.send_serial(self.checksum(message))
def set_alarm_value(self, address, alarm_type, value):
#address: first 5 bytes: alarm type, last 3 bytes schedulder address
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["setAlarmValue"] + bytes([8*alarm_type+address]) + struct.pack("f", value)
self.send_serial(self.checksum(message))
def set_alarm_action(self, address, alarm_type, action):
#alarm actions: 0 beep and continue; 1 beep and stop; 2 beep and home;
#address: first 5 bytes: alarm type, last 3 bytes schedulder address, status:1 active - 0:inactive
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["activateAlarm"] + bytes([8*alarm_type+address]) + bytes([action])+ b'\x00\x00\x00'
self.send_serial(self.checksum(message))
def sound_alarm(self, state):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["soundAlarm"] + b'\x00' + bytes([state])+ b'\x00\x00\x00'
self.send_serial(self.checksum(message))
def upload(self, state):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["upload"] + b'\x00' + struct.pack("f", state)
self.send_serial(self.checksum(message))
def online(self, state):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["online"] + b'\x00' + struct.pack("f", state)
self.send_serial(self.checksum(message))
def get_position(self, address):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["getPosition"] + bytes([address]) + b'\x00\x00\x00\x00'
self.send_serial(self.checksum(message))
def get_speed(self, address):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["getSpeed"] + bytes([address]) + b'\x00\x00\x00\x00'
self.send_serial(self.checksum(message))
def get_pressure(self, address):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["getPressure"] + bytes([address]) + b'\x00\x00\x00\x00'
self.send_serial(self.checksum(message))
def clear_buffer(self):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["clearBuffer"] + b'\x00\x00\x00\x00\x00'
self.send_serial(self.checksum(message))
def read_settings(self):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["readSettings"] + b'\x00\x00\x00\x00\x00'
self.send_serial(self.checksum(message))
def change_setting(self, pumpAddress, settingAddress, value):
address = pumpAddress*11 + settingAddress
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["changeSetting"] + bytes([address]) + struct.pack("f", value)
self.send_serial(self.checksum(message))
def commands_buffered(self, address):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["commandsBuffered"] + bytes([address]) + b'\x00\x00\x00\x00'
self.send_serial(self.checksum(message))
def reset(self, address):
message = self.hex_codes["START_BYTE"] + self.hex_codes["START_BYTE"] + self.hex_codes["reset"] + b'\x00\x00\x00\x00\x00'
self.send_serial(self.checksum(message))
def process_serial(self):
line = self.serial_buffer
#print(line)
if len(line) >= 13:
if line[0] == self.hex_codes["START_BYTE"][0] and line[1] == self.hex_codes["START_BYTE"][0] and line[10] == self.hex_codes["END_BYTE"][0]:
if line[2] == self.hex_codes["readSettings"][0]:
pumpAddress = int(line[3]/11)
settingAddress = int(line[3]%11)
value = np.float32(struct.unpack("f", line[4:8]))[0]
self.settings[self.machine_setting_link[settingAddress]][pumpAddress] = value
self.MT_queue.put(["FromController_Settings", self.settings])
elif line[2] == self.hex_codes["getPosition"][0]:
pumpAddress = int(line[3])
value = np.float32(struct.unpack("f", line[4:8]))[0]
self.MT_queue.put(["FromController_Position", [pumpAddress, value]])
elif line[2] == self.hex_codes["getSpeed"][0]:
pumpAddress = int(line[3])
value = np.float32(struct.unpack("f", line[4:8]))[0]
self.MT_queue.put(["FromController_Speed", [pumpAddress, value]])
elif line[2] == self.hex_codes["getPressure"][0]:
pumpAddress = int(line[3])
value = np.float32(struct.unpack("f", line[4:8]))[0]
self.MT_queue.put(["FromController_Pressure", [pumpAddress, value]])
elif line[2] == self.hex_codes["commandsBuffered"][0]:
pumpAddress = int(line[3])
value = np.float32(struct.unpack("f", line[4:8]))[0]
self.MT_queue.put(["FromController_Buffer", [pumpAddress, value]])
elif line[2] == self.hex_codes["sendToSlave"][0]:
slaveAddress = int(line[3])
value = np.float32(struct.unpack("f", line[4:8]))[0]
if value == 1:
device = "24 bit switch board"
elif value == 2:
device = "12 bit TTL board"
elif value == 3:
device = "8 bit multipinch board"
self.I2C[slaveAddress] = device
self.MT_queue.put(["FromController_I2C", self.I2C])
self.serial_buffer = self.serial_buffer[13:]
else:
self.serial_buffer = b''
def handle_queue(self, stop_event):
while (not stop_event.is_set()):
#handle commands coming from threads
try:
while True:
command, q_data = self.MT_queue.get_nowait()
#print (command, q_data)
if "Serial_" in command:
print ("Serial processing ")
if command == "Serial_Connect":
self.connect(q_data[0], int(q_data[1]))
elif command == "Serial_Disconnect":
self.disconnect()
elif command == "Serial_SendSerial":
self.send_serial(q_data)
if command == "Serial_Target":
self.set_target(int(q_data[0]), float(q_data[1]))
elif command == "Serial_GetPressure":
self.get_pressure(int(q_data))
elif command == "Serial_GetPosition":
self.get_position(int(q_data))
elif command == "Serial_GetSpeed":
self.get_speed(int(q_data))
elif command == "Serial_Home":
self.home(int(q_data[0]), int(q_data[1]))
elif command == "Serial_StartAdjust":
self.regulate(int(q_data[0]), 1)
elif command == "Serial_StopAdjust":
self.regulate(int(q_data[0]), 0)
elif command == "Serial_StartConstant":
self.move_constant(int(q_data[0]), 1)
elif command == "Serial_StopConstant":
self.move_constant(int(q_data[0]), 0)
elif command == "Serial_Solenoid":
self.set_valve(int(q_data[0]), int(q_data[1]))
elif command == "Serial_Speed":
self.set_speed(int(q_data[0]), float(q_data[1]))
elif command == "Serial_MoveRel":
self.set_position_rel(int(q_data[0]), float(q_data[1]))
elif command == "Serial_MoveAbs":
self.set_position_abs(int(q_data[0]), float(q_data[1]))
elif command == "Serial_SendSetting":
self.change_setting(int(q_data[0]), int(q_data[1]), float(q_data[2]))
elif command == "Serial_GetSettings":
self.read_settings()
elif command == "Serial_SendSlave":
#slaveaddress = int(line.split(" ")[1])
#command = int(line.split(" ")[2])
#data = [int(x) for x in line.split(" ")[3:6]]
self.send_to_slave(int(q_data[0]), int(q_data[1]), q_data[2])
elif command == "Serial_CommandsBuffered":
self.commands_buffered(int(q_data[0]))
elif command == "Serial_StartCycle":
self.start_cycle(int(q_data[0]))
elif command == "Serial_StopCycle":
self.stop_cycle()
elif command == "Serial_ActivateAlarm":
self.activate_alarm(int(q_data[0]), int(q_data[1]), int(q_data[2]))
elif command == "Serial_SetAlarmValue":
self.set_alarm_value(int(q_data[0]), int(q_data[1]), float(q_data[2]))
elif command == "Serial_SetAlarmAction":
self.set_alarm_action(int(q_data[0]), int(q_data[1]), int(q_data[2]))
elif command == "Serial_SoundAlarm":
self.sound_alarm(int(q_data[0]))
elif command == "Serial_Upload":
self.upload(int(q_data[0]))
elif command == "Serial_Online":
self.online(int(q_data[0]))
#print("online")
elif command == "Serial_WaitTime":
self.wait_time(float(q_data[0]))
elif command == "Serial_WaitSlave":
self.wait_slave(int(q_data[0]), int(q_data[1]))
elif command == "Serial_WaitVolume":
self.wait_volume(int(q_data[0]), float(q_data[1]))
else: #if it's stuff we don't handle put it back
self.MT_queue.put([command, q_data])
except:
pass
#if connected handle serial coming from firmware
if self.ser != None:
while self.ser.inWaiting() > 0:
line = self.ser.readline()
self.serial_buffer = self.serial_buffer+line
self.process_serial()
#print(line)
"""
if line.split(" ")[0]=="START":
self.serial_buffer = []
if 'END\r\n' in line:
self.serial_buffer.append(line)
self.process_serial()
self.serial_buffer.append(line)
"""
#time.sleep(0.01)
class TestSer():
def __init__(self):
self.buffer = [
b'START Manatee connected!\r\n',
b'END\r\n',
b'\xaa\xaaF\x00\xcd\xcc\xcc=\x00\x00U\r\n',
b'\xaa\xaaF\x01\x17\xb7\xd18\x00\x00U\r\n',
b'\xaa\xaaF\x02\x17\xb7\xd18\x00\x00U\r\n',
b'\xaa\xaaF\x03\x00\x00zE\x00\x00U\r\n',
b"\xaa\xaaF\x04'\x9b D\x00\x00U\r\n",
b'\xaa\xaaF\x05\xf4\xb3\x91G\x00\x00U\r\n',
b'\xaa\xaaF\x06\x00\x000@\x00\x00U\r\n',
b'\xaa\xaaF\x07\x00\x00\x80?\x00\x00U\r\n',
b'\xaa\xaaF\x08\xbct\x93<\x00\x00U\r\n',
b'\xaa\xaaF\t\n',
b'\xd7#=\x00\x00U\r\n',
b'\xaa\xaaF\n',
b'\x00\x00\x00\x00\x00\x00U\r\n',
b'\xaa\xaaF\x0b\xcd\xcc\xcc=\x00\x00U\r\n',
b'\xaa\xaaF\x0c\x17\xb7\xd18\x00\x00U\r\n',
b'\xaa\xaaF\r\x17\xb7\xd18\x00\x00U\r\n',
b'\xaa\xaaF\x0e\x00\x00zE\x00\x00U\r\n',
b'\xaa\xaaF\x0f\x02\xeb\xb8C\x00\x00U\r\n',
b'\xaa\xaaF\x10\xf4\t\x02G\x00\x00U\r\n',
b'\xaa\xaaF\x11\x00\x00 @\x00\x00U\r\n',
b'\xaa\xaaF\x12\x00\x00\x80?\x00\x00U\r\n',
b'\xaa\xaaF\x13\xbct\x93<\x00\x00U\r\n',
b'\xaa\xaaF\x14\n',
b'\xd7#=\x00\x00U\r\n',
b'\xaa\xaaF\x15\x00\x00\x00\x00\x00\x00U\r\n',
b'\xaa\xaaF\x16\xcd\xcc\xcc=\x00\x00U\r\n',
b'\xaa\xaaF\x17\x17\xb7\xd18\x00\x00U\r\n',
b'\xaa\xaaF\x18\x00\x00\x00\x00\x00\x00U\r\n',
b'\xaa\xaaF\x19\x00\x00zE\x00\x00U\r\n',
b'\xaa\xaaF\x1a\x02\xeb\xb8C\x00\x00U\r\n',
b'\xaa\xaaF\x1b\xf4\t\x02G\x00\x00U\r\n',
b'\xaa\xaaF\x1c\x00\x00 @\x00\x00U\r\n',
b'\xaa\xaaF\x1d\x00\x00\x80?\x00\x00U\r\n',
b'\xaa\xaaF\x1e\xbct\x93<\x00\x00U\r\n',
b'\xaa\xaaF\x1f\n',
b'\xd7#=\x00\x00U\r\n',
b'\xaa\xaaF \x00\x00\x7fC\x00\x00U\r\n',
b'\xaa\xaaF!\xcd\xcc\xcc=\x00\x00U\r\n',
b'\xaa\xaaF"\x17\xb7\xd18\x00\x00U\r\n',
b'\xaa\xaaF#o\x12\x83:\x00\x00U\r\n',
b'\xaa\xaaF$\x00\x00zE\x00\x00U\r\n',
b'\xaa\xaaF%\x02\xeb\xb8C\x00\x00U\r\n',
b'\xaa\xaaF&\xf4\t\x02G\x00\x00U\r\n',
b"\xaa\xaaF'\x00\x00 @\x00\x00U\r\n",
b'\xaa\xaaF(\x00\x00\x80?\x00\x00U\r\n',
b'\xaa\xaaF)\xbct\x93<\x00\x00U\r\n',
b'\xaa\xaaF*\n',
b'\xd7#=\x00\x00U\r\n',
b'\xaa\xaaF+\x00\x00\x00\x00\x00\x00U\r\n',
b'\xaa\xaaF,\xcd\xcc\xcc=\x00\x00U\r\n',
b'\xaa\xaaF-\x17\xb7\xd18\x00\x00U\r\n',
b'\xaa\xaaF.o\x12\x83:\x00\x00U\r\n',
b'\xaa\xaaF/\x00\x00zE\x00\x00U\r\n',
b'\xaa\xaaF0\x02\xeb\xb8C\x00\x00U\r\n',
b'\xaa\xaaF1\xf4\t\x02G\x00\x00U\r\n',
b'\xaa\xaaF2\x00\x00 @\x00\x00U\r\n',
b'\xaa\xaaF3\x00\x00\x80?\x00\x00U\r\n',
b'\xaa\xaaF4\xbct\x93<\x00\x00U\r\n',
b'\xaa\xaaF5\n',
b'\xd7#=\x00\x00U\r\n',
b'\xaa\xaaF6\x00\x00\x00\x00\x00\x00U\r\n']
self.hex_codes ={
"getPosition" : b'\x42',
"getSpeed" : b'\x43',
"getPressure" : b'\x44'}
# tetser message: b'\xaa\xaaB\x05\x00\x00\x00\x00\xa6\x9cU' 42
# tetser message: b'\xaa\xaaC\x05\x00\x00\x00\x00\xac\x9dU'43
# tetser message: b'\xaa\xaaD\x05\x00\x00\x00\x00\xb2\x9eU'44
def inWaiting(self):
return(len(self.buffer))
def readline(self):
return self.buffer.pop(0)
def write(self, message):
hcode = message[2]
if hcode == self.hex_codes["getPosition"][0]:
self.buffer.append(b'\xaa\xaaB\x00\xf4\xb3\x11G\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaB\x01\xdd\t\x82F\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaB\x02\xdd\t\x82F\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaB\x03\xdd\t\x82F\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaB\x04\xdd\t\x82F\x00\x00U\r\n')
elif hcode == self.hex_codes["getSpeed"][0]:
self.buffer.append(b'\xaa\xaaC\x00\x00\x00\x00\x80\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaC\x01\x00\x00\x00\x80\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaC\x02\x00\x00\x00\x80\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaC\x03\x00\x00\x00\x80\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaC\x04\x00\x00\x00\x80\x00\x00U\r\n')
elif hcode == self.hex_codes["getPressure"][0]:
self.buffer.append(b'\xaa\xaaD\x00*\x18UB\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaD\x01*\x18UB\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaD\x02\xba\x1dUB\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaD\x03\xba\x1dUB\x00\x00U\r\n')
self.buffer.append(b'\xaa\xaaD\x04\xba\x1dUB\x00\x00U\r\n')
#print ("tetser message: %s"%message)
class pump():
def __init__(self, master, number):
self.id = number
self.master = master
self.position = 0
self.target = 0
self.speed = 0
self.time = 0
self.volume = 0
self.max_pos = 0
self.p_history = []
| 38.228346 | 299 | 0.590474 |
2e16920464d4ac6fbdebc9c30bca641521b90d56 | 11,042 | py | Python | gsshapy/orm/snw.py | Aquaveo/gsshapy | dcba14b6e2870524fb743eab00a9ffdd0ad31f40 | [
"BSD-3-Clause"
] | 7 | 2019-05-23T07:33:50.000Z | 2022-02-15T21:44:37.000Z | gsshapy/orm/snw.py | Aquaveo/gsshapy | dcba14b6e2870524fb743eab00a9ffdd0ad31f40 | [
"BSD-3-Clause"
] | 31 | 2016-09-20T17:13:51.000Z | 2019-05-29T11:47:04.000Z | gsshapy/orm/snw.py | Aquaveo/gsshapy | dcba14b6e2870524fb743eab00a9ffdd0ad31f40 | [
"BSD-3-Clause"
] | 12 | 2016-07-08T20:32:00.000Z | 2021-04-19T16:22:40.000Z | """
********************************************************************************
* Name: SnowSimulationModel
* Author: Nathan Swain
* Created On: May 16, 2013
* Copyright: (c) Brigham Young University 2013
* License: BSD 2-Clause
********************************************************************************
"""
from __future__ import unicode_literals
__all__ = ['NwsrfsFile',
'NwsrfsRecord',
'OrographicGageFile',
'OrographicMeasurement']
from datetime import datetime
from sqlalchemy import ForeignKey, Column
from sqlalchemy.types import Integer, Float, DateTime, String
from sqlalchemy.orm import relationship
from . import DeclarativeBase
from ..base.file_base import GsshaPyFileObjectBase
class NwsrfsFile(DeclarativeBase, GsshaPyFileObjectBase):
"""
Object interface for the NWSRFS Snow File.
The contents of this file is abstracted into one supporting object: :class:`.NwsrfsRecord`.
See: `GSSHAWIKI <http://www.gsshawiki.com/Snow_Card_Inputs_-_Optional>`_
"""
__tablename__ = 'snw_nwsrfs_files'
tableName = __tablename__ #: Database tablename
# Primary and Foreign Keys
id = Column(Integer, autoincrement=True, primary_key=True) #: PK
# Value Columns
numBands = Column(Integer, nullable=False) #: INTEGER
fileExtension = Column(String, default='txt') #: STRING
# Relationship Properties
nwsrfsRecords = relationship('NwsrfsRecord', back_populates='nwsrfsFile') #: RELATIONSHIP
projectFile = relationship('ProjectFile', uselist=False, back_populates='nwsrfsFile') #: RELATIONSHIP
def __init__(self):
"""
Constructor
"""
GsshaPyFileObjectBase.__init__(self)
def _read(self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile):
"""
NWSRFS Read from File Method
"""
# Set file extension property
self.fileExtension = extension
# Open file and parse
with open(path, 'r') as nwsrfsFile:
for line in nwsrfsFile:
sline = line.strip().split()
# Cases
if sline[0].lower() == 'number_bands:':
self.numBands = sline[1]
elif sline[0].lower() == 'lower_elevation':
"""DO NOTHING"""
else:
# Create GSSHAPY NwsrfsRecord object
record = NwsrfsRecord(lowerElev=sline[0],
upperElev=sline[1],
mfMin=sline[2],
mfMax=sline[3],
scf=sline[4],
frUse=sline[5],
tipm=sline[6],
nmf=sline[7],
fua=sline[8],
plwhc=sline[9])
# Associate NwsrfsRecord with NwsrfsFile
record.nwsrfsFile = self
def _write(self, session, openFile, replaceParamFile):
"""
NWSRFS Write to File Method
"""
# Write lines
openFile.write('Number_Bands: %s\n' % self.numBands)
openFile.write('Lower_Elevation Upper_Elevation MF_Min MF_Max SCF FR_USE TIPM NMF FUA PCWHC\n')
# Retrieve NwsrfsRecords
records = self.nwsrfsRecords
for record in records:
openFile.write('%s%s%s%s%.1f%s%.1f%s%.1f%s%.1f%s%.1f%s%.1f%s%.1f%s%.1f\n' % (
record.lowerElev,
' ' * (17 - len(str(record.lowerElev))), # Num Spaces
record.upperElev,
' ' * (17 - len(str(record.upperElev))), # Num Spaces
record.mfMin,
' ' * (8 - len(str(record.mfMin))), # Num Spaces
record.mfMax,
' ' * (8 - len(str(record.mfMax))), # Num Spaces
record.scf,
' ' * (5 - len(str(record.scf))), # Num Spaces
record.frUse,
' ' * (8 - len(str(record.frUse))), # Num Spaces
record.tipm,
' ' * (6 - len(str(record.tipm))), # Num Spaces
record.nmf,
' ' * (5 - len(str(record.nmf))), # Num Spaces
record.fua,
' ' * (5 - len(str(record.fua))), # Num Spaces
record.plwhc))
class NwsrfsRecord(DeclarativeBase):
"""
Object containing data for a single NWSRFS record from the NWSRFS snow file.
"""
__tablename__ = 'snw_nwsrfs_records'
tableName = __tablename__ #: Database tablename
# Primary and Foreign Keys
id = Column(Integer, autoincrement=True, primary_key=True) #: PK
nwsrfsFileID = Column(Integer, ForeignKey('snw_nwsrfs_files.id')) #: FK
# Value Columns
lowerElev = Column(Integer, nullable=False) #: INTEGER
upperElev = Column(Integer, nullable=False) #: INTEGER
mfMin = Column(Float, nullable=False) #: FLOAT
mfMax = Column(Float, nullable=False) #: FLOAT
scf = Column(Float, nullable=False) #: FLOAT
frUse = Column(Float, nullable=False) #: FLOAT
tipm = Column(Float, nullable=False) #: FLOAT
nmf = Column(Float, nullable=False) #: FLOAT
fua = Column(Float, nullable=False) #: FLOAT
plwhc = Column(Float, nullable=False) #: FLOAT
# Relationship Properties
nwsrfsFile = relationship('NwsrfsFile', back_populates='nwsrfsRecords') #: RELATIONSHIP
def __init__(self, lowerElev, upperElev, mfMin, mfMax, scf, frUse, tipm, nmf, fua, plwhc):
"""
Constructor
"""
self.lowerElev = lowerElev
self.upperElev = upperElev
self.mfMin = mfMin
self.mfMax = mfMax
self.scf = scf
self.frUse = frUse
self.tipm = tipm
self.nmf = nmf
self.fua = fua
self.plwhc = plwhc
def __repr__(self):
return '<ElevationNWSRFS: LowerElev=%s, UpperElev=%s, MFMin=%s, MFMax=%s, SCF=%s, FRUse=%s, TIPM=%s, NMF=%s, FUA=%s, PLWHC=%s>' % (
self.lowerElev,
self.upperElev,
self.mfMin,
self.mfMax,
self.scf,
self.frUse,
self.tipm,
self.nmf,
self.fua,
self.plwhc)
class OrographicGageFile(DeclarativeBase, GsshaPyFileObjectBase):
"""
Object interface for the Orographic Gage File.
The contents of this file is abstracted into one supporting object: :class:`.OrographicMeasurement`.
See: `GSSHAWIKI <http://www.gsshawiki.com/Snow_Card_Inputs_-_Optional>`_
"""
__tablename__ = 'snw_orographic_gage_files'
tableName = __tablename__ #: Database tablename
# Primary and Foreign Keys
id = Column(Integer, autoincrement=True, primary_key=True) #: PK
# Value Columns
numSites = Column(Integer, nullable=False) #: INTEGER
elevBase = Column(Float, nullable=False) #: FLOAT
elev2 = Column(Float, nullable=False) #: FLOAT
fileExtension = Column(String, default='txt') #: STRING
# Relationship Properties
orographicMeasurements = relationship('OrographicMeasurement', back_populates='orographicGageFile') #: RELATIONSHIP
projectFile = relationship('ProjectFile', uselist=False, back_populates='orographicGageFile') #: RELATIONSHIP
def __init__(self):
"""
Constructor
"""
GsshaPyFileObjectBase.__init__(self)
def _read(self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile):
"""
Orographic Gage File Read from File Method
"""
# Set file extension property
self.fileExtension = extension
# Open file and parse into HmetRecords
with open(path, 'r') as orthoFile:
for line in orthoFile:
sline = line.strip().split()
# Cases
if sline[0].lower() == 'num_sites:':
self.numSites = sline[1]
elif sline[0].lower() == 'elev_base':
self.elevBase = sline[1]
elif sline[0].lower() == 'elev_2':
self.elev2 = sline[1]
elif sline[0].lower() == 'year':
"""DO NOTHING"""
else:
# Create datetime object
dateTime = datetime(year=int(sline[0]),
month=int(sline[1]),
day=int(sline[2]),
hour=int(sline[3]))
# Create GSSHAPY OrthoMeasurement object
measurement = OrographicMeasurement(dateTime=dateTime,
temp2=sline[4])
# Associate OrthoMeasurement with OrthographicGageFile
self.orographicMeasurements.append(measurement)
def _write(self, session, openFile, replaceParamFile):
"""
Orographic Gage File Write to File Method
"""
# Write lines
openFile.write('Num_Sites: %s\n' % self.numSites)
openFile.write('Elev_Base %s\n' % self.elevBase)
openFile.write('Elev_2 %s\n' % self.elev2)
openFile.write('Year Month Day Hour Temp_2\n')
# Retrieve OrographicMeasurements
measurements = self.orographicMeasurements
for measurement in measurements:
dateTime = measurement.dateTime
openFile.write('%s%s%s%s%s%s%s%s%.3f\n' % (
dateTime.year,
' ',
dateTime.month,
' ' * (8 - len(str(dateTime.month))),
dateTime.day,
' ' * (8 - len(str(dateTime.day))),
dateTime.hour,
' ' * (8 - len(str(dateTime.hour))),
measurement.temp2))
class OrographicMeasurement(DeclarativeBase):
"""
Object containing data for a single orographic gage as defined in the orographic gage file.
"""
__tablename__ = 'snw_orographic_measurements'
tableName = __tablename__ #: Database tablename
# Primary and Foreign Keys
id = Column(Integer, autoincrement=True, primary_key=True) #: PK
orthoGageID = Column(Integer, ForeignKey('snw_orographic_gage_files.id')) #: FK
# Value Columns
dateTime = Column(DateTime) #: DATETIME
temp2 = Column(Float) #: FLOAT
# Relationship Properties
orographicGageFile = relationship('OrographicGageFile', back_populates='orographicMeasurements') #: RELATIONSHIP
def __init__(self, dateTime, temp2):
"""
Constructor
"""
self.dateTime = dateTime
self.temp2 = temp2
def __repr__(self):
return '<OroMeasurement: DateTime=%s, Temp2=%s>' % (self.dateTime, self.temp2)
| 36.684385 | 139 | 0.556783 |
a609fd191e01dcb47c50d3130133f42bfedb2e97 | 108,136 | py | Python | src/tests/unit/fixtures/platform/mock_process.py | fslds/carbon-black-cloud-sdk-python | 248a3c63d6b36d6fcdbcb3f51fb7751f062ed372 | [
"MIT"
] | 24 | 2020-10-16T22:07:38.000Z | 2022-03-24T14:58:03.000Z | src/tests/unit/fixtures/platform/mock_process.py | fslds/carbon-black-cloud-sdk-python | 248a3c63d6b36d6fcdbcb3f51fb7751f062ed372 | [
"MIT"
] | 63 | 2020-10-26T18:26:15.000Z | 2022-03-31T17:31:02.000Z | src/tests/unit/fixtures/platform/mock_process.py | fslds/carbon-black-cloud-sdk-python | 248a3c63d6b36d6fcdbcb3f51fb7751f062ed372 | [
"MIT"
] | 10 | 2020-11-09T11:54:23.000Z | 2022-03-24T20:44:00.000Z | """Mock responses for process queries."""
GET_PROCESS_RESP = {}
GET_PROCESS_VALIDATION_RESP = {
"valid": True,
"value_search_query": False
}
GET_PROCESS_VALIDATION_RESP_INVALID = {
"invalid_message": "Invalid Query Parameter",
"valid": False,
"value_search_query": False,
"invalid_trigger_offset": 0
}
POST_PROCESS_SEARCH_JOB_RESP = {
"job_id": "2c292717-80ed-4f0d-845f-779e09470920"
}
POST_TREE_SEARCH_JOB_RESP = {
"job_id": "ee158f11-4dfb-4ae2-8f1a-7707b712226d"
}
GET_TREE_SEARCH_JOB_RESP = {
"contacted": 34,
"completed": 34
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0002b226-00000001-00000000-1d6225bbba74c00procsearchparent",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-0034d5f2-00000ba0-00000000-1d68709850fe522getprocjobres",
"process_hash": [
"5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d",
"c7084336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
5653,
16139
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 616,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_WATCHLIST_ALERT = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0034d5f2-00000284-00000000-1d687097e9cf7b5",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-0002b226-000015bd-00000000-1d6225bbba74c00",
"process_hash": [
"f2c7d894abe8ac0b4c2a597caa6b3efe7ad2bdb4226845798d954c5ab9c9bf15",
"12384336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
3909
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_ZERO = {
"results": [],
"num_found": 616,
"num_available": 1,
"contacted": 0,
"completed": 0
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_STILL_QUERYING = {
"results": [],
"num_found": 616,
"num_available": 1,
"contacted": 10,
"completed": 0
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_1 = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0034d5f2-00000284-00000000-1d687097e9cf7b5",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-003513bc-00000001-00000000-1d640200c9a61d7",
"process_hash": [
"f2c7d894abe8ac0b4c2a597caa6b3efe7ad2bdb4226845798d954c5ab9c9bf15",
"12384336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
3909
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_NO_PARENT_GUID = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-003513bc-00000001-00000000-1d640200c9a61d7",
"process_hash": [
"f2c7d894abe8ac0b4c2a597caa6b3efe7ad2bdb4226845798d954c5ab9c9bf15",
"12384336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
3909
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_NO_PID = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0034d5f2-00000284-00000000-1d687097e9cf7b5",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-003513bc-00000001-00000000-1d640200c9a61d7",
"process_hash": [
"f2c7d894abe8ac0b4c2a597caa6b3efe7ad2bdb4226845798d954c5ab9c9bf15",
"12384336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_2 = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0034d5f2-00000284-00000000-1d687097e9cf7b5",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-003513bc-00000001-00000000-1d640200c9a61d7",
"process_hash": [
"63d423ea882264dbb157a965c200306212fc5e1c6ddb8cbbb0f1d3b51ecd82e6",
"45684336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
788
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESULTS_RESP_3 = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "test-0034d5f2-00000284-00000000-1d687097e9cf7b5",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-003513bc-00000001-00000000-1d640200c9a61d7",
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
788
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_PARENT_JOB_RESULTS_RESP = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"parent_guid": "parentofparent",
"parent_hash": [
"9090e0e44e14709fb09b23b98572e0e61c810189e2de8f7156021bc81c3b1bb6",
"bccc12eb2ef644e662a63a023fb83f9b"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 644,
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-0002b226-00000001-00000000-1d6225bbba74c01",
"process_hash": [
"5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d",
"c7084336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
2976
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_PARENT_JOB_RESULTS_RESP_1 = {
"results": [
{
"backend_timestamp": "2020-09-11T19:35:02.972Z",
"childproc_count": 0,
"crossproc_count": 787,
"device_external_ip": "192.168.0.1",
"device_group_id": 0,
"device_id": 1234567,
"device_internal_ip": "192.168.0.2",
"device_name": "Windows10Device",
"device_os": "WINDOWS",
"device_policy_id": 12345,
"device_timestamp": "2020-09-11T19:32:12.821Z",
"enriched": True,
"enriched_event_type": [
"INJECT_CODE",
"SYSTEM_API_CALL"
],
"event_type": [
"crossproc"
],
"filemod_count": 0,
"ingress_time": 1599852859660,
"legacy": True,
"modload_count": 1,
"netconn_count": 0,
"org_id": "test",
"process_cmdline": [
"\"C:\\Program Files\\VMware\\VMware Tools\\vmtoolsd.exe\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "test-0002b226-00000001-00000000-1d6225bbba74c01",
"process_hash": [
"5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d",
"c7084336325dc8eadfb1e8ff876921c4"
],
"process_name": "c:\\program files\\vmware\\vmware tools\\vmtoolsd.exe",
"process_pid": [
2976
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_username": [
"Username"
],
"regmod_count": 1,
"scriptload_count": 0,
"ttp": [
"ENUMERATE_PROCESSES",
"INJECT_CODE",
"MITRE_T1003_CREDENTIAL_DUMP",
"MITRE_T1005_DATA_FROM_LOCAL_SYS",
"MITRE_T1055_PROCESS_INJECT",
"MITRE_T1057_PROCESS_DISCOVERY",
"RAM_SCRAPING",
"READ_SECURITY_DATA"
]
}
],
"num_found": 6168,
"num_available": 1,
"contacted": 45,
"completed": 45
}
GET_PROCESS_SEARCH_JOB_RESP = {
"contacted": 45,
"completed": 45,
"query": {
"cb.max_backend_timestamp": 1599853172000,
"cb.min_backend_timestamp": 0,
"cb.min_device_timestamp": 0,
"cb.preview_results": 500,
"cb.use_agg": True,
"facet": False,
"fl": "*,parent_hash,parent_name,process_cmdline,backend_timestamp,device_external_ip,device_group,device_internal_ip,device_os,process_effective_reputation,process_reputation,ttp", # noqa: E501
"fq": "{!collapse field=process_collapse_id sort='max(0,legacy) asc,device_timestamp desc'}",
"q": "(process_guid:test-0034d5f2-00000ba0-00000000-1d68709850fe521)",
"rows": 500,
"start": 0
},
"search_initiated_time": 1599853172533,
"connector_id": "ABCDEFGH"
}
GET_PROCESS_SUMMARY_RESP = {
"completed": 30,
"contacted": 30,
"exception": "",
"summary": {
"children": [
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-03T20:33:19.002Z",
"childproc_count": 1,
"crossproc_count": 5,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:31:13.097Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1607027590489,
"modload_count": 18,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wddisable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-0000157c-00000000-1d6c9b339b4a0cd",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
5500
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-03T20:31:05.847Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
},
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-02T05:59:53.548Z",
"childproc_count": 1,
"crossproc_count": 4,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:45:15.950Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1606888776302,
"modload_count": 16,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wdenable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00001d78-00000000-1d6c86e4f3c4a8f",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
7544
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:45:15.531Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
}
],
"parent": {
"_process_filename": "systemd",
"backend_timestamp": "2020-08-28T19:12:07.989Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:10:02.123Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1598641901273,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"process_hash": [
"e4b9902024ac32b3ca37f6b4c9b841e8",
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85"
],
"process_name": "/usr/lib/systemd/systemd",
"process_pid": [
1
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"regmod_count": 0,
"scriptload_count": 0
},
"process": {
"_process_filename": "bash",
"backend_timestamp": "2020-08-28T19:16:11.959Z",
"childproc_count": 333580,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:14:41.231Z",
"filemod_count": 0,
"ingress_time": 1598642141411,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/bin/bash /usr/sbin/ksmtuned"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00001615-00000000-1d6225bbba75e5e",
"process_hash": [
"c7084336325dc8eadfb1e8ff876921c4",
"5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d"
],
"process_name": "/usr/bin/bash",
"process_pid": [
5653,
16139
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "nm-dispatcher",
"backend_timestamp": "2020-08-19T20:55:33.446Z",
"childproc_count": 1,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-19T20:54:44.980Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1597870506825,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/usr/libexec/nm-dispatcher"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00005742-00000000-1d6766af7bedb39",
"process_hash": [
"04b2450579a663c964f3960cd0cf93a8",
"2206d95e0a435aadad6c84b5bce370d076137efecf6adbbf6feddbe0515fb17e"
],
"process_name": "/usr/libexec/nm-dispatcher",
"process_pid": [
22338
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-08-19T20:54:44.909Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
}
}
GET_PROCESS_SUMMARY_RESP_NO_PID = {
"completed": 30,
"contacted": 30,
"exception": "",
"summary": {
"children": [],
"parent": {
"_process_filename": "systemd",
"backend_timestamp": "2020-08-28T19:12:07.989Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:10:02.123Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1598641901273,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"process_hash": [
"e4b9902024ac32b3ca37f6b4c9b841e8",
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85"
],
"process_name": "/usr/lib/systemd/systemd",
"process_pid": [
1
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"regmod_count": 0,
"scriptload_count": 0
},
"process": {
"_process_filename": "bash",
"backend_timestamp": "2020-08-28T19:16:11.959Z",
"childproc_count": 333580,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:14:41.231Z",
"filemod_count": 0,
"ingress_time": 1598642141411,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/bin/bash /usr/sbin/ksmtuned"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00001615-00000000-1d6225bbba75e5e",
"process_hash": [
"c7084336325dc8eadfb1e8ff876921c4",
"5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d"
],
"process_name": "/usr/bin/bash",
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "nm-dispatcher",
"backend_timestamp": "2020-08-19T20:55:33.446Z",
"childproc_count": 1,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-19T20:54:44.980Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1597870506825,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/usr/libexec/nm-dispatcher"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00005742-00000000-1d6766af7bedb39",
"process_hash": [
"04b2450579a663c964f3960cd0cf93a8",
"2206d95e0a435aadad6c84b5bce370d076137efecf6adbbf6feddbe0515fb17e"
],
"process_name": "/usr/libexec/nm-dispatcher",
"process_pid": [
22338
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-08-19T20:54:44.909Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
}
}
GET_PROCESS_SUMMARY_RESP_NO_HASH = {
"completed": 30,
"contacted": 30,
"exception": "",
"summary": {
"children": [],
"parent": {
"_process_filename": "systemd",
"backend_timestamp": "2020-08-28T19:12:07.989Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:10:02.123Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1598641901273,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"process_hash": [
"e4b9902024ac32b3ca37f6b4c9b841e8",
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85"
],
"process_name": "/usr/lib/systemd/systemd",
"process_pid": [
1
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"regmod_count": 0,
"scriptload_count": 0
},
"process": {
"_process_filename": "bash",
"backend_timestamp": "2020-08-28T19:16:11.959Z",
"childproc_count": 333580,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:14:41.231Z",
"filemod_count": 0,
"ingress_time": 1598642141411,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/bin/bash /usr/sbin/ksmtuned"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00001615-00000000-1d6225bbba75e5e",
"process_name": "/usr/bin/bash",
"process_pid": [
5653,
16139
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "nm-dispatcher",
"backend_timestamp": "2020-08-19T20:55:33.446Z",
"childproc_count": 1,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-19T20:54:44.980Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1597870506825,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"process_cmdline": [
"/usr/libexec/nm-dispatcher"
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00005742-00000000-1d6766af7bedb39",
"process_hash": [
"04b2450579a663c964f3960cd0cf93a8",
"2206d95e0a435aadad6c84b5bce370d076137efecf6adbbf6feddbe0515fb17e"
],
"process_name": "/usr/libexec/nm-dispatcher",
"process_pid": [
22338
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-08-19T20:54:44.909Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
}
}
GET_PROCESS_SUMMARY_RESP_ZERO_CONTACTED = {
"completed": 0,
"contacted": 0,
"exception": "",
"summary": {}
}
GET_PROCESS_SUMMARY_RESP_STILL_QUERYING = {
"completed": 5,
"contacted": 10,
"exception": "",
"summary": {}
}
GET_PROCESS_SUMMARY_RESP_1 = {
"exception": "",
"summary": {
"process": {
"_process_filename": "csrss.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:21.866Z",
"filemod_count": 0,
"has_children": False,
"ingress_time": 1607027652665,
"modload_count": 0,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000001f4-00000000-1d6c86e28008165",
"parent_hash": [
"5f48638e3397204c2c63d7b76d025d62302d0e45fc5055c0a692b0bbc7e6b337",
"858e3da84c5389952e1ad3701e410f61"
],
"parent_name": "c:\\windows\\system32\\smss.exe",
"parent_pid": 500,
"process_cmdline": [
"%SystemRoot%\\system32\\csrss.exe ObjectDirectory=\\Windows SharedSection="
"1024,20480,768 Windows=On SubSystemType=Windows ServerDll=basesrv,1 ServerDll"
"=winsrv:UserServerDllInitialization,3 ServerDll=sxssrv,4 ProfileControl=Off MaxRequestThreads=16"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00000204-00000000-1d6c86e2801cd1b",
"process_hash": [
"12384336325dc8eadfb1e8ff876921c4",
"f2c7d894abe8ac0b4c2a597caa6b3efe7ad2bdb4226845798d954c5ab9c9bf15"
],
"process_name": "c:\\windows\\system32\\csrss.exe",
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:09.717Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 0,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "winlogon.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:08.646Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 0,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000001f4-00000000-1d6c86e28008165",
"parent_hash": [
"5f48638e3397204c2c63d7b76d025d62302d0e45fc5055c0a692b0bbc7e6b337",
"858e3da84c5389952e1ad3701e410f61"
],
"parent_name": "c:\\windows\\system32\\smss.exe",
"parent_pid": 500,
"process_cmdline": [
"winlogon.exe"
],
"process_effective_reputation": "LOCAL_WHITE",
"process_guid": "WNEXFKQ7-00050603-0000025c-00000000-1d6c86e280d8ba9",
"process_hash": [
"fd9aad3ea144d4c893eb0ccbff394a83",
"d6df7bbd93e84f5e9aec4f2d36fb04b8168e62010eae617f386c10c73b9136e6"
],
"process_name": "c:\\windows\\system32\\winlogon.exe",
"process_pid": [
604
],
"process_reputation": "ADAPTIVE_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:09.794Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 0,
"scriptload_count": 0
}
],
"parent": {},
"children": [
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-03T20:33:19.002Z",
"childproc_count": 1,
"crossproc_count": 5,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:31:13.097Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1607027590489,
"modload_count": 18,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wddisable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-0000157c-00000000-1d6c9b339b4a0cd",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
5500
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-03T20:31:05.847Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
},
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-02T05:59:53.548Z",
"childproc_count": 1,
"crossproc_count": 4,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:45:15.950Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1606888776302,
"modload_count": 16,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wdenable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00001d78-00000000-1d6c86e4f3c4a8f",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
7544
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:45:15.531Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
},
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-02T05:59:53.548Z",
"childproc_count": 1,
"crossproc_count": 4,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:45:15.950Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1606888776302,
"modload_count": 16,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wdenable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00001d78-00000000-1d6c86e4f3c4a8f",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
7544
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:45:15.531Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
},
"contacted": 34,
"completed": 34
}
GET_PROCESS_SUMMARY_RESP_2 = {
"exception": "",
"summary": {
"process": {
"_process_filename": "svchost.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 3,
"crossproc_count": 40,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:20.912Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 101,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\System32\\svchost.exe -k LocalServiceNetworkRestricted -p -s wscsvc"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"process_hash": [
"45684336325dc8eadfb1e8ff876921c4",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"process_name": "c:\\windows\\system32\\svchost.exe",
"process_pid": [
788
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:12.137Z",
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 5,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "spoolsv.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 2,
"crossproc_count": 35,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:18.693Z",
"filemod_count": 405,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 382,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\System32\\spoolsv.exe"
],
"process_effective_reputation": "LOCAL_WHITE",
"process_guid": "WNEXFKQ7-00050603-00000944-00000000-1d6c86e29169d10",
"process_hash": [
"94170797d822cd195f8f92da9def082f",
"f45ca80e151494a7394dcd1958ee94c0b83fe3f7b9e281fa1e626e71ff6c2604"
],
"process_name": "c:\\windows\\system32\\spoolsv.exe",
"process_pid": [
2372
],
"process_reputation": "COMMON_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:11.531Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 445,
"scriptload_count": 0
},
{
"_process_filename": "wmiapsrv.exe",
"backend_timestamp": "2020-12-02T06:00:54.384Z",
"childproc_count": 0,
"crossproc_count": 3,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:46:50.369Z",
"filemod_count": 0,
"has_children": False,
"ingress_time": 1606888837162,
"modload_count": 21,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\system32\\wbem\\WmiApSrv.exe"
],
"process_effective_reputation": "LOCAL_WHITE",
"process_guid": "WNEXFKQ7-00050603-000020f8-00000000-1d6c86e87b1e2be",
"process_hash": [
"55e21dfb7ec2394903e5ca62fdca21e6",
"55c2021f06d28696843672ff90e242c33c4cf6d30cdf0b2d9dcf07d8282cfc19"
],
"process_name": "c:\\windows\\system32\\wbem\\wmiapsrv.exe",
"process_pid": [
8440
],
"process_reputation": "ADAPTIVE_WHITE_LIST",
"process_start_time": "2020-12-02T05:46:50.254Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 1,
"scriptload_count": 0
}
],
"parent": {
"_process_filename": "services.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 243,
"crossproc_count": 39,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:13.397Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 53,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000001fc-00000000-1d6c86e2801246d",
"parent_hash": [
"d5e122606054fa0b03db3ee8cf9ea7701e523875e2bdb87581ad7232ffc9308e",
"e83650f70459a027aa596e1a73c961a1"
],
"parent_name": "c:\\windows\\system32\\wininit.exe",
"parent_pid": 508,
"process_cmdline": [
"C:\\Windows\\system32\\services.exe"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"process_hash": [
"2bd115a27b60b74bbeb31013519ac199",
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a"
],
"process_name": "c:\\windows\\system32\\services.exe",
"process_pid": [
624
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:09.808Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 254,
"scriptload_count": 0
},
"children": [
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-03T20:33:19.002Z",
"childproc_count": 1,
"crossproc_count": 5,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:31:13.097Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1607027590489,
"modload_count": 18,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wddisable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-0000157c-00000000-1d6c9b339b4a0cd",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
5500
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-03T20:31:05.847Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
},
{
"_process_filename": "mpcmdrun.exe",
"backend_timestamp": "2020-12-02T05:59:53.548Z",
"childproc_count": 1,
"crossproc_count": 4,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:45:15.950Z",
"filemod_count": 1,
"has_children": True,
"hits": False,
"ingress_time": 1606888776302,
"modload_count": 16,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"parent_hash": [
"9520a99e77d6196d0d09833146424113",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 2924,
"process_cmdline": [
"\"C:\\Program Files\\Windows Defender\\mpcmdrun.exe\" -wdenable"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00001d78-00000000-1d6c86e4f3c4a8f",
"process_hash": [
"cc4f6cbde75f08afdcefb95087149a5d",
"885557be148de55f6a127ea26ac457b9415e3e3baf30266d82b9d19b89e78ee4"
],
"process_name": "c:\\program files\\windows defender\\mpcmdrun.exe",
"process_pid": [
7544
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:45:15.531Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
},
"contacted": 34,
"completed": 34
}
GET_PROCESS_TREE_STR = {
"exception": "",
"tree": {
"children": [
{
"_process_filename": "msiexec.exe",
"backend_timestamp": "2020-10-15T05:44:47.387Z",
"device_external_ip": "144.121.3.50",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 199106,
"device_internal_ip": "10.210.161.66",
"device_name": "w10prov1703x86",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-10-15T05:43:45.336Z",
"enriched": True,
"enriched_event_type": "SYSTEM_API_CALL",
"event_type": "crossproc",
"has_children": False,
"ingress_time": 1602740641018,
"legacy": True,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-000309c2-000002c4-00000000-1d6a1c1f161a86a",
"parent_hash": [
"bd3036f60f1438c82900a29221e3a4912a89bfe904d01aad70c781ef514df0b3"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 708,
"process_cmdline":[
"C:\\WINDOWS\\system32\\msiexec.exe /V"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-000309c2-00000454-00000000-1d6a2b6252ba18e",
"process_hash":[
"f9a3eee1c3a4067702bc9a59bc894285",
"8e2aa014d7729cbfee95671717646ee480561f22e2147dae87a75c18d7369d99"
],
"process_name": "c:\\windows\\system32\\msiexec.exe",
"process_pid":[
1108
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-10-15T05:43:44.537Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"ttp":[
"ENUMERATE_PROCESSES",
"MITRE_T1057_PROCESS_DISCOVERY"
]
}
],
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:10:02.123Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1598641901273,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"process_hash": [
"e4b9902024ac32b3ca37f6b4c9b841e8",
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85"
],
"process_name": "/usr/lib/systemd/systemd",
"process_pid": [1],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"regmod_count": 0,
"scriptload_count": 0
}
}
GET_PROCESS_SUMMARY_STR = {
"exception": "",
"summary": {
"process": {
"_process_filename": "svchost.exe",
"backend_timestamp": "2020-10-22T16:27:52.931Z",
"device_external_ip": "144.121.3.50",
"device_group_id": 0,
"device_id": 199106,
"device_internal_ip": "10.210.161.66",
"device_name": "w10prov1703x86",
"device_os": "WINDOWS",
"device_policy": "lyon_test",
"device_policy_id": 30241,
"device_timestamp": "2020-10-22T16:15:05.925Z",
"enriched": True,
"enriched_event_type": "NETWORK",
"event_type": "netconn",
"has_children": True,
"hits": True,
"ingress_time": 1603384040142,
"legacy": True,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-000309c2-000002c4-00000000-1d6a1c1f161a86a",
"parent_hash": [
"bd3036f60f1438c82900a29221e3a4912a89bfe904d01aad70c781ef514df0b3"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 708,
"process_cmdline": [
"C:\\WINDOWS\\system32\\svchost.exe -k netsvcs -p"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-000309c2-00000478-00000000-1d6a1c1f2b02805",
"process_hash": [
"a7296c1245ee76768d581c6330dade06",
"5be0de7f915ba819d4ba048db7a2a87f6f3253fdd4865dc418181a0d6a031caa"
],
"process_name": "c:\\windows\\system32\\svchost.exe",
"process_pid": [
1144
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-10-14T00:35:42.639Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"sensor_action": [
"DENY",
"BLOCK"
],
"ttp": [
"POLICY_DENY"
]
},
"siblings": [
{
"_process_filename": "cb.exe",
"backend_timestamp": "2020-10-22T16:14:38.964Z",
"device_external_ip": "144.121.3.50",
"device_group_id": 0,
"device_id": 199106,
"device_name": "w10prov1703x86",
"device_os": "WINDOWS",
"device_policy": "lyon_test",
"device_policy_id": 30241,
"device_timestamp": "2020-10-22T16:11:30.514Z",
"enriched": True,
"enriched_event_type": "NETWORK",
"event_type": "netconn",
"has_children": False,
"hits": False,
"ingress_time": 1603383254356,
"legacy": True,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-000309c2-000002c4-00000000-1d6a1c1f161a86a",
"parent_hash": [
"bd3036f60f1438c82900a29221e3a4912a89bfe904d01aad70c781ef514df0b3"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 708,
"process_cmdline": [
"C:\\WINDOWS\\CarbonBlack\\cb.exe"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-000309c2-00000980-00000000-1d6a1c1f41ae014",
"process_hash": [
"b5a2c3084251ad5ce53e02f071fa7dc9",
"ae600593a0a6915cf5ecbf96b4cb1d0e1d165339bde136c351bf606127c5dcec"
],
"process_name": "c:\\windows\\carbonblack\\cb.exe",
"process_pid": [
2432
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-10-14T00:35:45.017Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"sensor_action": [
"DENY",
"BLOCK"
],
"ttp": [
"POLICY_DENY"
]
}
],
"parent": {
"_process_filename": "systemd",
"backend_timestamp": "2020-08-28T19:12:07.989Z",
"childproc_count": 0,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_name": "devr-dev",
"device_os": "LINUX",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_timestamp": "2020-08-28T19:10:02.123Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1598641901273,
"modload_count": 0,
"netconn_count": 0,
"org_id": "ABCD1234",
"process_effective_reputation": "NOT_LISTED",
"process_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"process_hash": [
"e4b9902024ac32b3ca37f6b4c9b841e8",
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85"
],
"process_name": "/usr/lib/systemd/systemd",
"process_pid": [
1
],
"process_reputation": "NOT_LISTED",
"process_start_time": "2020-05-04T21:34:03.968Z",
"regmod_count": 0,
"scriptload_count": 0
},
"children": [
{
"_process_filename": "wermgr.exe",
"backend_timestamp": "2020-10-22T16:28:23.556Z",
"device_external_ip": "144.121.3.50",
"device_group_id": 0,
"device_id": 199106,
"device_internal_ip": "10.210.161.66",
"device_name": "w10prov1703x86",
"device_os": "WINDOWS",
"device_policy": "lyon_test",
"device_policy_id": 30241,
"device_timestamp": "2020-10-22T16:15:06.065Z",
"enriched": True,
"enriched_event_type": "NETWORK",
"event_type": "netconn",
"has_children": False,
"hits": True,
"ingress_time": 1603384060202,
"legacy": True,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-000309c2-00000478-00000000-1d6a1c1f2b02805",
"parent_hash": [
"5be0de7f915ba819d4ba048db7a2a87f6f3253fdd4865dc418181a0d6a031caa"
],
"parent_name": "c:\\windows\\system32\\svchost.exe",
"parent_pid": 1144,
"process_cmdline":[
"C:\\WINDOWS\\system32\\wermgr.exe -upload"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-000309c2-000004f8-00000000-1d6a88e80c541a3",
"process_hash":[
"2ae75e810f4dd1fb36607f66e7e1d80b",
"db703055ec0641e7e96e22a62bf075547b480c51ea9e163d94e33452894b885c"
],
"process_name": "c:\\windows\\system32\\wermgr.exe",
"process_pid":[
1272
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-10-22T16:15:05.324Z",
"process_username":[
"NT AUTHORITY\\SYSTEM"
],
"sensor_action":[
"DENY",
"BLOCK"
],
"ttp":[
"POLICY_DENY"
]
}
],
"process_guid": "b31019a5-d69b-4aba-b36d-0b29fe2e7f61"
}
}
GET_PROCESS_SUMMARY_RESP_NO_CHILDREN = {
"exception": "",
"summary": {
"process": {
"_process_filename": "svchost.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 3,
"crossproc_count": 40,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:20.912Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 101,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\System32\\svchost.exe -k LocalServiceNetworkRestricted -p -s wscsvc"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00000b6c-00000000-1d6c86e29731218",
"process_hash": [
"45684336325dc8eadfb1e8ff876921c4",
"dd191a5b23df92e12a8852291f9fb5ed594b76a28a5a464418442584afd1e048"
],
"process_name": "c:\\windows\\system32\\svchost.exe",
"process_pid": [
788
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:12.137Z",
"process_username": [
"NT AUTHORITY\\LOCAL SERVICE"
],
"regmod_count": 5,
"scriptload_count": 0
},
"siblings": [
{
"_process_filename": "spoolsv.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 2,
"crossproc_count": 35,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:18.693Z",
"filemod_count": 405,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 382,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\System32\\spoolsv.exe"
],
"process_effective_reputation": "LOCAL_WHITE",
"process_guid": "WNEXFKQ7-00050603-00000944-00000000-1d6c86e29169d10",
"process_hash": [
"94170797d822cd195f8f92da9def082f",
"f45ca80e151494a7394dcd1958ee94c0b83fe3f7b9e281fa1e626e71ff6c2604"
],
"process_name": "c:\\windows\\system32\\spoolsv.exe",
"process_pid": [
2372
],
"process_reputation": "COMMON_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:11.531Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 445,
"scriptload_count": 0
},
{
"_process_filename": "wmiapsrv.exe",
"backend_timestamp": "2020-12-02T06:00:54.384Z",
"childproc_count": 0,
"crossproc_count": 3,
"device_external_ip": "24.243.76.124",
"device_group": "schumaker-test",
"device_group_id": 1706,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-02T05:46:50.369Z",
"filemod_count": 0,
"has_children": False,
"ingress_time": 1606888837162,
"modload_count": 21,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"parent_hash": [
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a",
"2bd115a27b60b74bbeb31013519ac199"
],
"parent_name": "c:\\windows\\system32\\services.exe",
"parent_pid": 624,
"process_cmdline": [
"C:\\Windows\\system32\\wbem\\WmiApSrv.exe"
],
"process_effective_reputation": "LOCAL_WHITE",
"process_guid": "WNEXFKQ7-00050603-000020f8-00000000-1d6c86e87b1e2be",
"process_hash": [
"55e21dfb7ec2394903e5ca62fdca21e6",
"55c2021f06d28696843672ff90e242c33c4cf6d30cdf0b2d9dcf07d8282cfc19"
],
"process_name": "c:\\windows\\system32\\wbem\\wmiapsrv.exe",
"process_pid": [
8440
],
"process_reputation": "ADAPTIVE_WHITE_LIST",
"process_start_time": "2020-12-02T05:46:50.254Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 1,
"scriptload_count": 0
}
],
"parent": {
"_process_filename": "services.exe",
"backend_timestamp": "2020-12-03T20:34:38.889Z",
"childproc_count": 243,
"crossproc_count": 39,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T20:32:13.397Z",
"filemod_count": 0,
"has_children": True,
"hits": False,
"ingress_time": 1607027652665,
"modload_count": 53,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000001fc-00000000-1d6c86e2801246d",
"parent_hash": [
"d5e122606054fa0b03db3ee8cf9ea7701e523875e2bdb87581ad7232ffc9308e",
"e83650f70459a027aa596e1a73c961a1"
],
"parent_name": "c:\\windows\\system32\\wininit.exe",
"parent_pid": 508,
"process_cmdline": [
"C:\\Windows\\system32\\services.exe"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-00000270-00000000-1d6c86e280fbff8",
"process_hash": [
"2bd115a27b60b74bbeb31013519ac199",
"1b6ffe1f5480675fc618b42247ef49a1c60ca99d2d53271b3472557e3bea2e8a"
],
"process_name": "c:\\windows\\system32\\services.exe",
"process_pid": [
624
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-02T05:44:09.808Z",
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 254,
"scriptload_count": 0
},
"children": None
},
"contacted": 34,
"completed": 34
}
GET_TREE_RESP = {
"exception": "",
"summary": {
"process": None,
"siblings": None,
"parent": None,
"children": None
},
"tree": {
"_process_filename": "ngen.exe",
"backend_timestamp": "2020-12-03T19:47:23.199Z",
"childproc_count": 1,
"children": [
{
"_process_filename": "mscorsvw.exe",
"backend_timestamp": "2020-12-03T19:47:23.199Z",
"childproc_count": 0,
"children": [],
"crossproc_count": 3,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T19:44:25.004Z",
"filemod_count": 0,
"has_children": False,
"ingress_time": 1607024805760,
"modload_count": 14,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000008a0-00000000-1d6c9acb438f08d",
"parent_hash": [
"0eb067650f90e1af3b660c229a58d5e4c505a928847349e06dadb5e88df713f4",
"660254c8228b83705c80374d47f570f1"
],
"parent_name": "c:\\windows\\microsoft.net\\framework64\\v4.0.30319\\ngen.exe",
"parent_pid": 2208,
"process_cmdline": [
"C:\\Windows\\Microsoft.NET\\Framework64\\v4.0.30319\\mscorsvw.exe"
" -StartupEvent 1b4 -InterruptEvent 0 -NGENProcess 168 -Pipe 174 -Comment \"NGen Worker Process\""
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-0000066c-00000000-1d6c9acb43e29bb",
"process_hash": [
"a0b98e5e57de8f666a04ac3eec86d25b",
"60def9905f16bd5d10684afea17ab3a9accdd8ba4a6e06240e84e3acb5f94e3d"
],
"process_name": "c:\\windows\\microsoft.net\\framework64\\v4.0.30319\\mscorsvw.exe",
"process_pid": [
1644
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-03T19:44:24.953Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 0,
"scriptload_count": 0
}
],
"crossproc_count": 4,
"device_external_ip": "24.243.76.124",
"device_group_id": 0,
"device_id": 329219,
"device_internal_ip": "172.16.115.191",
"device_name": "desktop-8qonquj",
"device_os": "WINDOWS",
"device_policy": "default",
"device_policy_id": 2198,
"device_timestamp": "2020-12-03T19:44:25.020Z",
"filemod_count": 5,
"has_children": True,
"hits": False,
"ingress_time": 1607024805760,
"modload_count": 11,
"netconn_count": 0,
"org_id": "WNEXFKQ7",
"parent_guid": "WNEXFKQ7-00050603-000023fc-00000000-1d6c9acae2c7003",
"parent_hash": [
"6e4b6d2af6d99dcc1de0e097ea51d43a",
"c4db063d8de31c0a64d172950f857509ee97baa488d8678d48eb6e75b17527b0"
],
"parent_name": "c:\\windows\\microsoft.net\\framework64\\v4.0.30319\\ngentask.exe",
"parent_pid": 9212,
"process_cmdline": [
"\"C:\\Windows\\Microsoft.NET\\Framework64\\v4.0.30319\\ngen.exe\" "
"install \"System.Core, Version=4.0.0.0, Culture=neutral, PublicKeyToken="
"b77a5c561934e089\" /NoDependencies /noroot /version:v4.0.30319 /LegacyServiceBehavior"
],
"process_effective_reputation": "TRUSTED_WHITE_LIST",
"process_guid": "WNEXFKQ7-00050603-000008a0-00000000-1d6c9acb438f08d",
"process_hash": [
"660254c8228b83705c80374d47f570f1",
"0eb067650f90e1af3b660c229a58d5e4c505a928847349e06dadb5e88df713f4"
],
"process_name": "c:\\windows\\microsoft.net\\framework64\\v4.0.30319\\ngen.exe",
"process_pid": [
2208
],
"process_reputation": "TRUSTED_WHITE_LIST",
"process_start_time": "2020-12-03T19:44:24.919Z",
"process_terminated": True,
"process_username": [
"NT AUTHORITY\\SYSTEM"
],
"regmod_count": 0,
"scriptload_count": 0
},
"contacted": 34,
"completed": 34
}
GET_PROCESS_NOT_FOUND = {
"exception": "NOT_FOUND",
"summary": {
"process": None,
"siblings": None,
"parent": None,
"children": None
},
"contacted": 33,
"completed": 33
}
GET_PROCESS_SUMMARY_NOT_FOUND = {
"exception": "NOT_FOUND",
"summary": {
"process": None,
"siblings": None,
"parent": None,
"children": None
},
"contacted": 33,
"completed": 33
}
POST_PROCESS_DETAILS_JOB_RESP = {
'job_id': 'ccc47a52-9a61-4c77-8652-8a03dc187b98'
}
GET_PROCESS_DETAILS_JOB_STATUS_RESP = {
'contacted': 16,
'completed': 16
}
GET_PROCESS_DETAILS_JOB_STATUS_IN_PROGRESS_RESP = {
'contacted': 16,
'completed': 8
}
GET_PROCESS_DETAILS_JOB_RESULTS_RESP = {
'contacted': 16,
'completed': 16,
'num_available': 1,
'num_found': 1,
'results': [
{
"backend_timestamp": "2020-08-28T19:14:40.394Z",
"childproc_count": 333576,
"crossproc_count": 0,
"device_external_ip": "34.56.78.90",
"device_group_id": 0,
"device_id": 176678,
"device_location": "UNKNOWN",
"device_name": "devr-dev",
"device_os": "LINUX",
"device_os_version": "CentOS 7.6-1810",
"device_policy": "sm-restrictive",
"device_policy_id": 11200,
"device_target_priority": "MEDIUM",
"device_timestamp": "2020-08-28T19:12:41.178Z",
"document_guid": "6Gqoe-abQXu-k9LagGOoQg",
"filemod_count": 0,
"ingress_time": 1598642021337,
"modload_count": 0,
"netconn_count": 0,
"org_id": "test",
"parent_effective_reputation": "NOT_LISTED",
"parent_guid": "ABCD1234-0002b226-00000001-00000000-1d6225bbba75e43",
"parent_hash": [
"81b37dcb0321108e564d528df827580153ab64005be3bcafd5162e9e7e707e85",
"e4b9902024ac32b3ca37f6b4c9b841e8"
],
"parent_name": "/usr/lib/systemd/systemd",
"parent_pid": 1,
"parent_publisher_state": [
"FILE_SIGNATURE_STATE_NOT_SIGNED"
],
"parent_reputation": "NOT_LISTED",
"process_cmdline": [
"/usr/bin/gitea"
],
"process_cmdline_length": [
14
],
"process_effective_reputation": "NOT_LISTED",
"process_guid": "80dab519-3b5f-4502-afad-da87cd58a4c3",
"process_hash": [
"285044ad8f8b9322d0cc5e929e2cc18c",
"5975d972eea6b1c53ef9a69452797439ed5bf63fae72e1780761ea1c2cb6976a"
],
"process_name": "/usr/bin/bash",
"process_pid": [
10111,
10222,
10333
],
"process_publisher_state": [
"FILE_SIGNATURE_STATE_NOT_SIGNED"
],
"process_reputation": "NOT_LISTED",
"process_sha256": "5975d972eea6b1c53ef9a69452797439ed5bf63fae72e1780761ea1c2cb6976a",
"process_start_time": "2020-05-04T21:34:03.968Z",
"process_username": [
"root"
],
"regmod_count": 0,
"scriptload_count": 0
}
]
}
GET_PROCESS_DETAILS_JOB_RESULTS_RESP_ZERO = {
'contacted': 0,
'completed': 0,
'num_available': 0,
'num_found': 0,
'results': []
}
GET_FACET_SEARCH_RESULTS_RESP = {
"ranges": [
{
"start": "2020-10-20T00:00:00Z",
"end": "2020-11-12T00:00:00Z",
"bucket_size": "+1DAY",
"field": "backend_timestamp",
"values": [
{
"total": 1555,
"name": "2020-10-20T00:00:00Z"
},
{
"total": 1970,
"name": "2020-10-21T00:00:00Z"
},
{
"total": 7727,
"name": "2020-10-22T00:00:00Z"
},
{
"total": 2453,
"name": "2020-10-23T00:00:00Z"
},
{
"total": 37,
"name": "2020-11-11T00:00:00Z"
}
]
}
],
"terms": [
{
"values": [
{
"total": 797,
"id": "2020-10-22T20:56:31.215Z",
"name": "2020-10-22T20:56:31.215Z"
},
{
"total": 708,
"id": "2020-10-19T22:35:43.547Z",
"name": "2020-10-19T22:35:43.547Z"
},
{
"total": 518,
"id": "2020-10-09T14:17:55.189Z",
"name": "2020-10-09T14:17:55.189Z"
},
{
"total": 83,
"id": "2020-11-12T01:40:04.682Z",
"name": "2020-11-12T01:40:04.682Z"
},
{
"total": 26,
"id": "2020-07-30T14:15:50.415Z",
"name": "2020-07-30T14:15:50.415Z"
},
{
"total": 9,
"id": "2020-10-20T18:09:58.469Z",
"name": "2020-10-20T18:09:58.469Z"
},
{
"total": 9,
"id": "2020-10-23T05:48:32.744Z",
"name": "2020-10-23T05:48:32.744Z"
},
{
"total": 8,
"id": "2020-08-24T18:46:30.369Z",
"name": "2020-08-24T18:46:30.369Z"
},
{
"total": 7,
"id": "2020-09-23T15:03:29.263Z",
"name": "2020-09-23T15:03:29.263Z"
}
],
"field": "backend_timestamp"
},
{
"values": [
{
"total": 38,
"id": "2020-10-19T21:25:06.668Z",
"name": "2020-10-19T21:25:06.668Z"
},
{
"total": 13,
"id": "2020-10-22T20:48:22.188Z",
"name": "2020-10-22T20:48:22.188Z"
},
{
"total": 5,
"id": "2020-07-30T14:12:47.986Z",
"name": "2020-07-30T14:12:47.986Z"
},
{
"total": 4,
"id": "2020-11-12T01:38:10.788Z",
"name": "2020-11-12T01:38:10.788Z"
},
{
"total": 2,
"id": "2020-07-30T14:10:41.125Z",
"name": "2020-07-30T14:10:41.125Z"
},
{
"total": 2,
"id": "2020-09-23T15:05:23.758Z",
"name": "2020-09-23T15:05:23.758Z"
},
{
"total": 2,
"id": "2020-10-20T15:53:30.260Z",
"name": "2020-10-20T15:53:30.260Z"
},
{
"total": 1,
"id": "2020-10-23T05:36:34.300Z",
"name": "2020-10-23T05:36:34.300Z"
},
{
"total": 1,
"id": "2020-08-24T17:32:31.211Z",
"name": "2020-08-24T17:32:31.211Z"
}
],
"field": "device_timestamp"
}
],
"num_found": 23753,
"contacted": 36,
"completed": 36
}
EXPECTED_PROCESS_FACETS = {
"backend_timestamp": [
{
"total": 797,
"id": "2020-10-22T20:56:31.215Z",
"name": "2020-10-22T20:56:31.215Z"
},
{
"total": 708,
"id": "2020-10-19T22:35:43.547Z",
"name": "2020-10-19T22:35:43.547Z"
},
{
"total": 518,
"id": "2020-10-09T14:17:55.189Z",
"name": "2020-10-09T14:17:55.189Z"
},
{
"total": 83,
"id": "2020-11-12T01:40:04.682Z",
"name": "2020-11-12T01:40:04.682Z"
},
{
"total": 26,
"id": "2020-07-30T14:15:50.415Z",
"name": "2020-07-30T14:15:50.415Z"
},
{
"total": 9,
"id": "2020-10-20T18:09:58.469Z",
"name": "2020-10-20T18:09:58.469Z"
},
{
"total": 9,
"id": "2020-10-23T05:48:32.744Z",
"name": "2020-10-23T05:48:32.744Z"
},
{
"total": 8,
"id": "2020-08-24T18:46:30.369Z",
"name": "2020-08-24T18:46:30.369Z"
},
{
"total": 7,
"id": "2020-09-23T15:03:29.263Z",
"name": "2020-09-23T15:03:29.263Z"
}
],
"device_timestamp": [
{
"total": 38,
"id": "2020-10-19T21:25:06.668Z",
"name": "2020-10-19T21:25:06.668Z"
},
{
"total": 13,
"id": "2020-10-22T20:48:22.188Z",
"name": "2020-10-22T20:48:22.188Z"
},
{
"total": 5,
"id": "2020-07-30T14:12:47.986Z",
"name": "2020-07-30T14:12:47.986Z"
},
{
"total": 4,
"id": "2020-11-12T01:38:10.788Z",
"name": "2020-11-12T01:38:10.788Z"
},
{
"total": 2,
"id": "2020-07-30T14:10:41.125Z",
"name": "2020-07-30T14:10:41.125Z"
},
{
"total": 2,
"id": "2020-09-23T15:05:23.758Z",
"name": "2020-09-23T15:05:23.758Z"
},
{
"total": 2,
"id": "2020-10-20T15:53:30.260Z",
"name": "2020-10-20T15:53:30.260Z"
},
{
"total": 1,
"id": "2020-10-23T05:36:34.300Z",
"name": "2020-10-23T05:36:34.300Z"
},
{
"total": 1,
"id": "2020-08-24T17:32:31.211Z",
"name": "2020-08-24T17:32:31.211Z"
}
]
}
EXPECTED_PROCESS_RANGES_FACETS = {
"backend_timestamp": [
{
"total": 1555,
"name": "2020-10-20T00:00:00Z"
},
{
"total": 1970,
"name": "2020-10-21T00:00:00Z"
},
{
"total": 7727,
"name": "2020-10-22T00:00:00Z"
},
{
"total": 2453,
"name": "2020-10-23T00:00:00Z"
},
{
"total": 37,
"name": "2020-11-11T00:00:00Z"
}
]
}
GET_FACET_SEARCH_RESULTS_RESP_1 = {
"ranges": [
{
"start": "2020-10-20T00:00:00Z",
"end": "2020-11-12T00:00:00Z",
"bucket_size": "+1DAY",
"field": "backend_timestamp",
"values": [
{
"total": 1555,
"name": "2020-10-20T00:00:00Z"
}
]
}
],
"terms": [
{
"values": [
{
"total": 797,
"id": "2020-10-22T20:56:31.215Z",
"name": "2020-10-22T20:56:31.215Z"
}
],
"field": "backend_timestamp"
}
],
"num_found": 0,
"contacted": 0,
"completed": 0
}
GET_FACET_SEARCH_RESULTS_RESP_NOT_COMPLETE = {
"ranges": [
{
"start": "2020-10-20T00:00:00Z",
"end": "2020-11-12T00:00:00Z",
"bucket_size": "+1DAY",
"field": "backend_timestamp",
"values": [
{
"total": 1555,
"name": "2020-10-20T00:00:00Z"
}
]
}
],
"terms": [
{
"values": [
{
"total": 797,
"id": "2020-10-22T20:56:31.215Z",
"name": "2020-10-22T20:56:31.215Z"
}
],
"field": "backend_timestamp"
}
],
"num_found": 0,
"contacted": 10,
"completed": 2
}
| 37.915849 | 203 | 0.492759 |
945032684fd35ccdff9775b831412e14c11e286e | 20,939 | py | Python | silx/math/test/test_HistogramndLut_nominal.py | vincefn/silx | 4b239abfc90d2fa7d6ab61425f8bfc7b83c0f444 | [
"CC0-1.0",
"MIT"
] | 1 | 2016-10-26T11:05:46.000Z | 2016-10-26T11:05:46.000Z | silx/math/test/test_HistogramndLut_nominal.py | vincefn/silx | 4b239abfc90d2fa7d6ab61425f8bfc7b83c0f444 | [
"CC0-1.0",
"MIT"
] | 7 | 2016-10-19T09:27:26.000Z | 2020-01-24T13:26:56.000Z | silx/math/test/test_HistogramndLut_nominal.py | payno/silx | 13301e61627f98fa837008250ac74a0627a7a560 | [
"CC0-1.0",
"MIT"
] | null | null | null | # coding: utf-8
# /*##########################################################################
# Copyright (C) 2016 European Synchrotron Radiation Facility
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ############################################################################*/
"""
Nominal tests of the HistogramndLut function.
"""
import unittest
import numpy as np
from silx.math import HistogramndLut
def _get_bin_edges(histo_range, n_bins, n_dims):
edges = []
for i_dim in range(n_dims):
edges.append(histo_range[i_dim, 0] +
np.arange(n_bins[i_dim] + 1) *
(histo_range[i_dim, 1] - histo_range[i_dim, 0]) /
n_bins[i_dim])
return tuple(edges)
# ==============================================================
# ==============================================================
# ==============================================================
class _TestHistogramndLut_nominal(unittest.TestCase):
"""
Unit tests of the HistogramndLut class.
"""
ndims = None
def setUp(self):
ndims = self.ndims
self.tested_dim = ndims-1
if ndims is None:
raise ValueError('ndims class member not set.')
sample = np.array([5.5, -3.3,
0., -0.5,
3.3, 8.8,
-7.7, 6.0,
-4.0])
weights = np.array([500.5, -300.3,
0.01, -0.5,
300.3, 800.8,
-700.7, 600.6,
-400.4])
n_elems = len(sample)
if ndims == 1:
shape = (n_elems,)
else:
shape = (n_elems, ndims)
self.sample = np.zeros(shape=shape, dtype=sample.dtype)
if ndims == 1:
self.sample = sample
else:
self.sample[..., ndims-1] = sample
self.weights = weights
# the tests are performed along one dimension,
# all the other bins indices along the other dimensions
# are expected to be 2
# (e.g : when testing a 2D sample : [0, x] will go into
# bin [2, y] because of the bin ranges [-2, 2] and n_bins = 4
# for the first dimension)
self.other_axes_index = 2
self.histo_range = np.repeat([[-2., 2.]], ndims, axis=0)
self.histo_range[ndims-1] = [-4., 6.]
self.n_bins = np.array([4]*ndims)
self.n_bins[ndims-1] = 5
if ndims == 1:
def fill_histo(h, v, dim, op=None):
if op:
h[:] = op(h[:], v)
else:
h[:] = v
self.fill_histo = fill_histo
else:
def fill_histo(h, v, dim, op=None):
idx = [self.other_axes_index]*len(h.shape)
idx[dim] = slice(0, None)
if op:
h[idx] = op(h[idx], v)
else:
h[idx] = v
self.fill_histo = fill_histo
def test_nominal_bin_edges(self):
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
bin_edges = instance.bins_edges
expected_edges = _get_bin_edges(self.histo_range,
self.n_bins,
self.ndims)
for i_edges, edges in enumerate(expected_edges):
self.assertTrue(np.array_equal(bin_edges[i_edges],
expected_edges[i_edges]),
msg='Testing bin_edges for dim {0}'
''.format(i_edges+1))
def test_nominal_histo_range(self):
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
histo_range = instance.histo_range
self.assertTrue(np.array_equal(histo_range, self.histo_range))
def test_nominal_last_bin_closed(self):
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
last_bin_closed = instance.last_bin_closed
self.assertEqual(last_bin_closed, False)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins,
last_bin_closed=True)
last_bin_closed = instance.last_bin_closed
self.assertEqual(last_bin_closed, True)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins,
last_bin_closed=False)
last_bin_closed = instance.last_bin_closed
self.assertEqual(last_bin_closed, False)
def test_nominal_n_bins_array(self):
test_n_bins = np.arange(self.ndims) + 10
instance = HistogramndLut(self.sample,
self.histo_range,
test_n_bins)
n_bins = instance.n_bins
self.assertTrue(np.array_equal(test_n_bins, n_bins))
def test_nominal_n_bins_scalar(self):
test_n_bins = 10
expected_n_bins = np.array([test_n_bins] * self.ndims)
instance = HistogramndLut(self.sample,
self.histo_range,
test_n_bins)
n_bins = instance.n_bins
self.assertTrue(np.array_equal(expected_n_bins, n_bins))
def test_nominal_histo_ref(self):
"""
"""
expected_h_tpl = np.array([2, 1, 1, 1, 1])
expected_c_tpl = np.array([-700.7, -0.5, 0.01, 300.3, 500.5])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.double)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
instance.accumulate(self.weights)
histo = instance.histo()
w_histo = instance.weighted_histo()
histo_ref = instance.histo(copy=False)
w_histo_ref = instance.weighted_histo(copy=False)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
self.assertTrue(np.array_equal(histo_ref, expected_h))
self.assertTrue(np.array_equal(w_histo_ref, expected_c))
histo_ref[0, ...] = histo_ref[0, ...] + 10
w_histo_ref[0, ...] = w_histo_ref[0, ...] + 20
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
self.assertFalse(np.array_equal(histo_ref, expected_h))
self.assertFalse(np.array_equal(w_histo_ref, expected_c))
histo_2 = instance.histo()
w_histo_2 = instance.weighted_histo()
self.assertFalse(np.array_equal(histo_2, expected_h))
self.assertFalse(np.array_equal(w_histo_2, expected_c))
self.assertTrue(np.array_equal(histo_2, histo_ref))
self.assertTrue(np.array_equal(w_histo_2, w_histo_ref))
def test_nominal_accumulate_once(self):
"""
"""
expected_h_tpl = np.array([2, 1, 1, 1, 1])
expected_c_tpl = np.array([-700.7, -0.5, 0.01, 300.3, 500.5])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.double)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
instance.accumulate(self.weights)
histo = instance.histo()
w_histo = instance.weighted_histo()
self.assertEqual(w_histo.dtype, np.float64)
self.assertEqual(histo.dtype, np.uint32)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
self.assertTrue(np.array_equal(instance.histo(), expected_h))
self.assertTrue(np.array_equal(instance.weighted_histo(),
expected_c))
def test_nominal_accumulate_twice(self):
"""
"""
expected_h_tpl = np.array([2, 1, 1, 1, 1])
expected_c_tpl = np.array([-700.7, -0.5, 0.01, 300.3, 500.5])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.double)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
# calling accumulate twice
expected_h *= 2
expected_c *= 2
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
instance.accumulate(self.weights)
instance.accumulate(self.weights)
histo = instance.histo()
w_histo = instance.weighted_histo()
self.assertEqual(w_histo.dtype, np.float64)
self.assertEqual(histo.dtype, np.uint32)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
self.assertTrue(np.array_equal(instance.histo(), expected_h))
self.assertTrue(np.array_equal(instance.weighted_histo(),
expected_c))
def test_nominal_apply_lut_once(self):
"""
"""
expected_h_tpl = np.array([2, 1, 1, 1, 1])
expected_c_tpl = np.array([-700.7, -0.5, 0.01, 300.3, 500.5])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.double)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
histo, w_histo = instance.apply_lut(self.weights)
self.assertEqual(w_histo.dtype, np.float64)
self.assertEqual(histo.dtype, np.uint32)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
self.assertEqual(instance.histo(), None)
self.assertEqual(instance.weighted_histo(), None)
def test_nominal_apply_lut_twice(self):
"""
"""
expected_h_tpl = np.array([2, 1, 1, 1, 1])
expected_c_tpl = np.array([-700.7, -0.5, 0.01, 300.3, 500.5])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.double)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
# calling apply_lut twice
expected_h *= 2
expected_c *= 2
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
histo, w_histo = instance.apply_lut(self.weights)
histo_2, w_histo_2 = instance.apply_lut(self.weights,
histo=histo,
weighted_histo=w_histo)
self.assertEqual(id(histo), id(histo_2))
self.assertEqual(id(w_histo), id(w_histo_2))
self.assertEqual(w_histo.dtype, np.float64)
self.assertEqual(histo.dtype, np.uint32)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
self.assertEqual(instance.histo(), None)
self.assertEqual(instance.weighted_histo(), None)
def test_nominal_accumulate_last_bin_closed(self):
"""
"""
expected_h_tpl = np.array([2, 1, 1, 1, 2])
expected_c_tpl = np.array([-700.7, -0.5, 0.01, 300.3, 1101.1])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.double)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins,
last_bin_closed=True)
instance.accumulate(self.weights)
histo = instance.histo()
w_histo = instance.weighted_histo()
self.assertEqual(w_histo.dtype, np.float64)
self.assertEqual(histo.dtype, np.uint32)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
def test_nominal_accumulate_weight_min_max(self):
"""
"""
weight_min = -299.9
weight_max = 499.9
expected_h_tpl = np.array([0, 1, 1, 1, 0])
expected_c_tpl = np.array([0., -0.5, 0.01, 300.3, 0.])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.double)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
instance.accumulate(self.weights,
weight_min=weight_min,
weight_max=weight_max)
histo = instance.histo()
w_histo = instance.weighted_histo()
self.assertEqual(w_histo.dtype, np.float64)
self.assertEqual(histo.dtype, np.uint32)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
def test_nominal_accumulate_forced_int32(self):
"""
double weights, int32 weighted_histogram
"""
expected_h_tpl = np.array([2, 1, 1, 1, 1])
expected_c_tpl = np.array([-700, 0, 0, 300, 500])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.double)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins,
dtype=np.int32)
instance.accumulate(self.weights)
histo = instance.histo()
w_histo = instance.weighted_histo()
self.assertEqual(w_histo.dtype, np.int32)
self.assertEqual(histo.dtype, np.uint32)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
def test_nominal_accumulate_forced_float32(self):
"""
int32 weights, float32 weighted_histogram
"""
expected_h_tpl = np.array([2, 1, 1, 1, 1])
expected_c_tpl = np.array([-700., 0., 0., 300., 500.])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.float32)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins,
dtype=np.float32)
instance.accumulate(self.weights.astype(np.int32))
histo = instance.histo()
w_histo = instance.weighted_histo()
self.assertEqual(w_histo.dtype, np.float32)
self.assertEqual(histo.dtype, np.uint32)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
def test_nominal_accumulate_int32(self):
"""
int32 weights
"""
expected_h_tpl = np.array([2, 1, 1, 1, 1])
expected_c_tpl = np.array([-700, 0, 0, 300, 500])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.int32)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
instance.accumulate(self.weights.astype(np.int32))
histo = instance.histo()
w_histo = instance.weighted_histo()
self.assertEqual(w_histo.dtype, np.int32)
self.assertEqual(histo.dtype, np.uint32)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
def test_nominal_accumulate_int32_double(self):
"""
int32 weights
"""
expected_h_tpl = np.array([2, 1, 1, 1, 1])
expected_c_tpl = np.array([-700, 0, 0, 300, 500])
expected_h = np.zeros(shape=self.n_bins, dtype=np.double)
expected_c = np.zeros(shape=self.n_bins, dtype=np.int32)
self.fill_histo(expected_h, expected_h_tpl, self.ndims-1)
self.fill_histo(expected_c, expected_c_tpl, self.ndims-1)
instance = HistogramndLut(self.sample,
self.histo_range,
self.n_bins)
instance.accumulate(self.weights.astype(np.int32))
instance.accumulate(self.weights)
histo = instance.histo()
w_histo = instance.weighted_histo()
expected_h *= 2
expected_c *= 2
self.assertEqual(w_histo.dtype, np.int32)
self.assertEqual(histo.dtype, np.uint32)
self.assertTrue(np.array_equal(histo, expected_h))
self.assertTrue(np.array_equal(w_histo, expected_c))
def testNoneNativeTypes(self):
type = self.sample.dtype.newbyteorder("B")
sampleB = self.sample.astype(type)
type = self.sample.dtype.newbyteorder("L")
sampleL = self.sample.astype(type)
histo_inst = HistogramndLut(sampleB,
self.histo_range,
self.n_bins)
histo_inst = HistogramndLut(sampleL,
self.histo_range,
self.n_bins)
class TestHistogramndLut_nominal_1d(_TestHistogramndLut_nominal):
ndims = 1
class TestHistogramndLut_nominal_2d(_TestHistogramndLut_nominal):
ndims = 2
class TestHistogramndLut_nominal_3d(_TestHistogramndLut_nominal):
ndims = 3
# ==============================================================
# ==============================================================
# ==============================================================
test_cases = (TestHistogramndLut_nominal_1d,
TestHistogramndLut_nominal_2d,
TestHistogramndLut_nominal_3d,)
def suite():
loader = unittest.defaultTestLoader
test_suite = unittest.TestSuite()
for test_class in test_cases:
tests = loader.loadTestsFromTestCase(test_class)
test_suite.addTests(tests)
return test_suite
if __name__ == '__main__':
unittest.main(defaultTest="suite")
| 35.67121 | 80 | 0.570132 |
53de379f48364f457acd42685817961b8a032b82 | 1,988 | py | Python | cms/migrations/0017_pagetype.py | Mario-Kart-Felix/django-cms | 6d68439fe7fd59d000f99e27c1f2135a3f9c816a | [
"BSD-3-Clause"
] | 5,659 | 2015-01-01T02:42:30.000Z | 2020-10-07T02:38:29.000Z | cms/migrations/0017_pagetype.py | rpep/django-cms | 53dddb106f45963f9f8393d434b4313fa3bbdf54 | [
"BSD-3-Clause"
] | 3,264 | 2015-01-02T10:11:48.000Z | 2020-10-08T13:15:07.000Z | cms/migrations/0017_pagetype.py | rpep/django-cms | 53dddb106f45963f9f8393d434b4313fa3bbdf54 | [
"BSD-3-Clause"
] | 2,132 | 2015-01-01T11:28:21.000Z | 2020-10-06T09:09:11.000Z | from django.db import migrations, models
def update_descendants(parent, **data):
parent.children.update(**data)
for child in parent.children.iterator():
update_descendants(child, **data)
def migrate_to_page_types(apps, schema_editor):
Page = apps.get_model('cms', 'Page')
db_alias = schema_editor.connection.alias
page_types = Page.objects.using(db_alias).filter(
reverse_id='page_types',
publisher_is_draft=True,
)
for page_types_root in page_types:
update_descendants(page_types_root, is_page_type=True)
# Remove reverse id from draft page
page_types_root.reverse_id = ''
page_types_root.is_page_type = True
page_types_root.save(update_fields=['reverse_id', 'is_page_type'])
page_types_root_public = page_types_root.publisher_public
if page_types_root_public:
# very strange case.. technically page-types should never be published.
# but nothing actually prevents it, so update public pages (if any).
update_descendants(page_types_root_public, is_page_type=True)
# Remove reverse id from public page
page_types_root_public.reverse_id = ''
page_types_root_public.is_page_type = True
page_types_root_public.save(update_fields=['reverse_id', 'is_page_type'])
class Migration(migrations.Migration):
dependencies = [
('cms', '0016_auto_20160608_1535'),
]
operations = [
migrations.AddField(
model_name='page',
name='is_page_type',
field=models.BooleanField(default=False),
),
migrations.CreateModel(
name='PageType',
fields=[
],
options={
'default_permissions': [],
'proxy': True,
},
bases=('cms.page',),
),
migrations.RunPython(migrate_to_page_types, migrations.RunPython.noop),
]
| 31.555556 | 85 | 0.635312 |
a7c94b3d222890f1dda096e5c44650b91a21b0fd | 7,332 | py | Python | textory/textures.py | sebiegli/textory | 8cf5110d40dbf7898e9f4186b233e3abf4cd24f9 | [
"MIT"
] | null | null | null | textory/textures.py | sebiegli/textory | 8cf5110d40dbf7898e9f4186b233e3abf4cd24f9 | [
"MIT"
] | null | null | null | textory/textures.py | sebiegli/textory | 8cf5110d40dbf7898e9f4186b233e3abf4cd24f9 | [
"MIT"
] | null | null | null | #! /usr/bin/python
# -*- coding: utf-8 -*-
import numpy as np
import functools
import dask.array as da
from .util import neighbour_diff_squared, _dask_neighbour_diff_squared, _win_view_stat, window_sum, xr_wrapper
@xr_wrapper
def variogram(x, lag=1, win_size=5, win_geom="square", **kwargs):
"""
Calculate moveing window variogram with specified
lag for array.
Parameters
----------
x : array like
Input array
lag : int
Lag distance for variogram, defaults to 1.
win_size : int, optional
Length of one side of window. Window will be of size window*window.
win_geom : {"square", "round"}
Geometry of the kernel. Defaults to square.
Returns
-------
array like
Array where each element is the variogram of the window around the element
"""
if isinstance(x, da.core.Array):
diff = _dask_neighbour_diff_squared(x, lag=lag, func="nd_variogram")
else:
diff = neighbour_diff_squared(x, lag=lag, func="nd_variogram")
res = window_sum(diff, lag=lag, win_size=win_size, win_geom=win_geom)
return res
@xr_wrapper
def pseudo_cross_variogram(x, y, lag=1, win_size=5, win_geom="square", **kwargs):
"""
Calculate moveing window pseudo-variogram with specified
lag for the two arrays.
Parameters
----------
x, y : array like
Input array
lag : int
Lag distance for variogram, defaults to 1.
win_size : int, optional
Length of one side of window. Window will be of size window*window.
win_geom : {"square", "round"}
Geometry of the kernel. Defaults to square.
Returns
-------
array like
Array where each element is the pseudo-variogram
between the two arrays of the window around the element.
"""
if isinstance(x, da.core.Array):
diff = _dask_neighbour_diff_squared(x, y, lag, func="nd_variogram")
else:
diff = neighbour_diff_squared(x, y, lag, func="nd_variogram")
res = window_sum(diff, lag=lag, win_size=win_size, win_geom=win_geom)
return res
@xr_wrapper
def cross_variogram(x, y, lag=1, win_size=5, win_geom="square", **kwargs):
"""
Calculate moveing window pseudo-variogram with specified
lag for the two arrays.
Parameters
----------
x, y : array like
Input array
lag : int
Lag distance for variogram, defaults to 1.
win_size : int, optional
Length of one side of window. Window will be of size window*window.
win_geom : {"square", "round"}
Geometry of the kernel. Defaults to square.
Returns
-------
array like
Array where each element is the pseudo-variogram
between the two arrays of the window around the element.
"""
if isinstance(x, da.core.Array):
diff = _dask_neighbour_diff_squared(x, y, lag, func="nd_cross_variogram")
else:
diff = neighbour_diff_squared(x, y, lag, func="nd_cross_variogram")
res = window_sum(diff, lag=lag, win_size=win_size, win_geom=win_geom)
return res
@xr_wrapper
def madogram(x, lag=1, win_size=5, win_geom="square", **kwargs):
"""
Calculate moveing window madogram with specified
lag for array.
Parameters
----------
x : array like
Input array
lag : int
Lag distance for variogram, defaults to 1.
win_size : int, optional
Length of one side of window. Window will be of size window*window.
win_geom : {"square", "round"}
Geometry of the kernel. Defaults to square.
Returns
-------
array like
Array where each element is the madogram of the window around the element
"""
if isinstance(x, da.core.Array):
diff = _dask_neighbour_diff_squared(x, lag=lag, func="nd_madogram")
else:
diff = neighbour_diff_squared(x, lag=lag, func="nd_madogram")
res = window_sum(diff, lag=lag, win_size=win_size, win_geom=win_geom)
return res
@xr_wrapper
def rodogram(x, lag=1, win_size=5, win_geom="square", **kwargs):
"""
Calculate moveing window rodogram with specified
lag for array.
Parameters
----------
x : array like
Input array
lag : int
Lag distance for variogram, defaults to 1.
win_size : int, optional
Length of one side of window. Window will be of size window*window.
win_geom : {"square", "round"}
Geometry of the kernel. Defaults to square.
Returns
-------
array like
Array where each element is the madogram of the window around the element
"""
if isinstance(x, da.core.Array):
diff = _dask_neighbour_diff_squared(x, lag=lag, func="nd_rodogram")
else:
diff = neighbour_diff_squared(x, lag=lag, func="nd_rodogram")
res = window_sum(diff, lag=lag, win_size=win_size, win_geom=win_geom)
return res
@xr_wrapper
def window_statistic(x, stat="nanmean", win_size=5, **kwargs):
"""
Calculate the specified statistic with a moveing window of size `win_size`.
Parameters
----------
x : array like
Input array
stat : {"nanmean", "nanmax", "nanmin", "nanmedian", "nanstd"}
Statistical measure to calculate.
win_size : int, optional
Length of one side of window. Window will be of size window*window.
Returns
-------
array like
Todo
----
- checking if array dimensions are multiple of win_size. pad if not
- make sure that each chunk is multiple of win_size in map_overlap
"""
if win_size%2 == 0:
raise("Window size must be odd.")
#create view_as_windows function with reduced parameters for mapping
pcon = functools.partial(_win_view_stat, win_size=win_size, stat=stat)
if isinstance(x, da.core.Array):
conv_padding = int(win_size//2)
res = x.map_overlap(pcon, depth={0: conv_padding, 1: conv_padding}, boundary={0: np.nan, 1: np.nan})#, trim=False)
else:
res = pcon(x)
return res
#def variogram_diff_old(band1, band2, lag=None, window=None):
#band2 = np.pad(band2, ((1,1),(1,1)), mode="edge")
#out = np.zeros(band1.shape, dtype=band1.dtype.name)
##left and right neighbour
#out = (band1 - band2[1:-1,2::])**2
#out += (band1 - band2[1:-1,0:-2:])**2
##above and below neighbours
#out += (band1 - band2[2::,1:-1])**2
#out += (band1 - band2[0:-2,1:-1])**2
##left diagonal neighbours
#out += (band1 - band2[0:-2,0:-2])**2
#out += (band1 - band2[2::,0:-2])**2
##right diagonal neigbours
#out += (band1 -band2[0:-2,2::])**2
#out += (band1 - band2[2::,2::])**2
#return out
#def variogram_diff_loop(band1, band2, lag=1, window=None):
#band2 = np.pad(band2, ((lag,lag),(lag,lag)), mode="edge")
#out = np.zeros(band1.shape, dtype=band1.dtype.name)
#win = 2*lag + 1
#radius = int(win/2)
#r = list(range(win))
#for x in r:
#x_off = x - radius
#if x == min(r) or x == max(r):
#y_r = r
#else:
#y_r = [max(r), min(r)]
#for y in y_r:
#y_off = y - radius
#out += (band1 - band2[y_off:-y_off, x_off:-x_off])**2
#return out
| 28.752941 | 122 | 0.614157 |
102ed9caf6d5236c31873fcd5208412e9712ea9d | 10,144 | py | Python | silx/opencl/sift/test/test_convol.py | vallsv/silx | 834bfe9272af99096faa360e1ad96291bf46a2ac | [
"CC0-1.0",
"MIT"
] | 1 | 2017-08-03T15:51:42.000Z | 2017-08-03T15:51:42.000Z | silx/opencl/sift/test/test_convol.py | vallsv/silx | 834bfe9272af99096faa360e1ad96291bf46a2ac | [
"CC0-1.0",
"MIT"
] | 7 | 2016-10-19T09:27:26.000Z | 2020-01-24T13:26:56.000Z | silx/opencl/sift/test/test_convol.py | vallsv/silx | 834bfe9272af99096faa360e1ad96291bf46a2ac | [
"CC0-1.0",
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Project: Sift implementation in Python + OpenCL
# https://github.com/silx-kit/silx
#
# Copyright (C) 2013-2017 European Synchrotron Radiation Facility, Grenoble, France
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
"""
Test suite for all preprocessing kernels.
"""
from __future__ import division, print_function
__authors__ = ["Jérôme Kieffer", "Pierre Paleo"]
__contact__ = "jerome.kieffer@esrf.eu"
__license__ = "MIT"
__copyright__ = "2013 European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "25/06/2018"
import os
import time
import logging
import numpy
try:
import scipy.misc
import scipy.ndimage
except ImportError:
scipy = None
import unittest
from silx.opencl import ocl
if ocl:
import pyopencl.array
from ..utils import calc_size, get_opencl_code
logger = logging.getLogger(__name__)
def my_blur(img, kernel):
"""
hand made implementation of gaussian blur with OUR kernel
which differs from Scipy's if ksize is even
"""
tmp1 = scipy.ndimage.filters.convolve1d(img, kernel, axis=-1, mode="reflect")
return scipy.ndimage.filters.convolve1d(tmp1, kernel, axis=0, mode="reflect")
@unittest.skipUnless(scipy and ocl, "scipy or opencl not available")
class TestConvol(unittest.TestCase):
@classmethod
def setUpClass(cls):
super(TestConvol, cls).setUpClass()
if ocl:
cls.ctx = ocl.create_context()
if logger.getEffectiveLevel() <= logging.INFO:
cls.PROFILE = True
cls.queue = pyopencl.CommandQueue(cls.ctx, properties=pyopencl.command_queue_properties.PROFILING_ENABLE)
else:
cls.PROFILE = False
cls.queue = pyopencl.CommandQueue(cls.ctx)
device = cls.ctx.devices[0]
device_id = device.platform.get_devices().index(device)
platform_id = pyopencl.get_platforms().index(device.platform)
cls.max_wg = ocl.platforms[platform_id].devices[device_id].max_work_group_size
# logger.warning("max_work_group_size: %s on (%s, %s)", cls.max_wg, platform_id, device_id)
@classmethod
def tearDownClass(cls):
super(TestConvol, cls).tearDownClass()
cls.ctx = None
cls.queue = None
def setUp(self):
if scipy and ocl is None:
return
if hasattr(scipy.misc, "ascent"):
self.input = scipy.misc.ascent().astype(numpy.float32)
else:
self.input = scipy.misc.lena().astype(numpy.float32)
self.input = numpy.ascontiguousarray(self.input[0:507, 0:209])
self.gpu_in = pyopencl.array.to_device(self.queue, self.input)
self.gpu_tmp = pyopencl.array.empty(self.queue, self.input.shape, dtype=numpy.float32, order="C")
self.gpu_out = pyopencl.array.empty(self.queue, self.input.shape, dtype=numpy.float32, order="C")
kernel_src = get_opencl_code(os.path.join("sift", "convolution.cl"))
self.program = pyopencl.Program(self.ctx, kernel_src).build()
self.IMAGE_W = numpy.int32(self.input.shape[-1])
self.IMAGE_H = numpy.int32(self.input.shape[0])
if self.max_wg < 512:
if self.max_wg > 1:
self.wg = (self.max_wg, 1)
else:
self.wg = (1, 1)
else:
self.wg = (256, 2)
self.shape = calc_size((self.input.shape[1], self.input.shape[0]), self.wg)
def tearDown(self):
self.input = None
# self.gpudata.release()
self.program = None
self.gpu_in = self.gpu_tmp = self.gpu_out = None
def test_convol_hor(self):
"""
tests the convolution kernel
"""
for sigma in [2, 15 / 8.]:
ksize = int(8 * sigma + 1)
x = numpy.arange(ksize) - (ksize - 1.0) / 2.0
gaussian = numpy.exp(-(x / sigma) ** 2 / 2.0).astype(numpy.float32)
gaussian /= gaussian.sum(dtype=numpy.float32)
gpu_filter = pyopencl.array.to_device(self.queue, gaussian)
t0 = time.time()
k1 = self.program.horizontal_convolution(self.queue, self.shape, self.wg,
self.gpu_in.data, self.gpu_out.data, gpu_filter.data, numpy.int32(ksize), self.IMAGE_W, self.IMAGE_H)
res = self.gpu_out.get()
t1 = time.time()
ref = scipy.ndimage.filters.convolve1d(self.input, gaussian, axis=-1, mode="reflect")
t2 = time.time()
delta = abs(ref - res).max()
if ksize % 2 == 0: # we have a problem with even kernels !!!
self.assert_(delta < 50, "sigma= %s delta=%s" % (sigma, delta))
else:
self.assert_(delta < 1e-4, "sigma= %s delta=%s" % (sigma, delta))
logger.info("sigma= %s delta=%s" % (sigma, delta))
if self.PROFILE:
logger.info("Global execution time: CPU %.3fms, GPU: %.3fms." % (1000.0 * (t2 - t1), 1000.0 * (t1 - t0)))
logger.info("Horizontal convolution took %.3fms" % (1e-6 * (k1.profile.end - k1.profile.start)))
@unittest.skipIf(scipy and ocl is None, "scipy or opencl not available")
def test_convol_vert(self):
"""
tests the convolution kernel
"""
for sigma in [2, 15 / 8.]:
ksize = int(8 * sigma + 1)
x = numpy.arange(ksize) - (ksize - 1.0) / 2.0
gaussian = numpy.exp(-(x / sigma) ** 2 / 2.0).astype(numpy.float32)
gaussian /= gaussian.sum(dtype=numpy.float32)
gpu_filter = pyopencl.array.to_device(self.queue, gaussian)
t0 = time.time()
k1 = self.program.vertical_convolution(self.queue, self.shape, self.wg,
self.gpu_in.data,
self.gpu_out.data,
gpu_filter.data,
numpy.int32(ksize),
self.IMAGE_W, self.IMAGE_H)
res = self.gpu_out.get()
t1 = time.time()
ref = scipy.ndimage.filters.convolve1d(self.input, gaussian, axis=0, mode="reflect")
t2 = time.time()
delta = abs(ref - res).max()
if ksize % 2 == 0: # we have a problem with even kernels !!!
self.assert_(delta < 50, "sigma= %s delta=%s" % (sigma, delta))
else:
self.assert_(delta < 1e-4, "sigma= %s delta=%s" % (sigma, delta))
logger.info("sigma= %s delta=%s" % (sigma, delta))
if self.PROFILE:
logger.info("Global execution time: CPU %.3fms, GPU: %.3fms." % (1000.0 * (t2 - t1), 1000.0 * (t1 - t0)))
logger.info("Vertical convolution took %.3fms" % (1e-6 * (k1.profile.end - k1.profile.start)))
def test_convol(self):
"""
tests the convolution kernel
"""
for sigma in [2, 15 / 8.]:
ksize = int(8 * sigma + 1)
x = numpy.arange(ksize) - (ksize - 1.0) / 2.0
gaussian = numpy.exp(-(x / sigma) ** 2 / 2.0).astype(numpy.float32)
gaussian /= gaussian.sum(dtype=numpy.float32)
gpu_filter = pyopencl.array.to_device(self.queue, gaussian)
t0 = time.time()
k1 = self.program.horizontal_convolution(self.queue, self.shape, self.wg,
self.gpu_in.data, self.gpu_tmp.data, gpu_filter.data, numpy.int32(ksize), self.IMAGE_W, self.IMAGE_H)
k2 = self.program.vertical_convolution(self.queue, self.shape, self.wg,
self.gpu_tmp.data, self.gpu_out.data, gpu_filter.data, numpy.int32(ksize), self.IMAGE_W, self.IMAGE_H)
res = self.gpu_out.get()
k2.wait()
t1 = time.time()
ref = my_blur(self.input, gaussian)
# ref = scipy.ndimage.filters.gaussian_filter(self.input, sigma, mode="reflect")
t2 = time.time()
delta = abs(ref - res).max()
if ksize % 2 == 0: # we have a problem with even kernels !!!
self.assert_(delta < 50, "sigma= %s delta=%s" % (sigma, delta))
else:
self.assert_(delta < 1e-4, "sigma= %s delta=%s" % (sigma, delta))
logger.info("sigma= %s delta=%s" % (sigma, delta))
if self.PROFILE:
logger.info("Global execution time: CPU %.3fms, GPU: %.3fms." % (1000.0 * (t2 - t1), 1000.0 * (t1 - t0)))
logger.info("Horizontal convolution took %.3fms and vertical convolution took %.3fms" % (1e-6 * (k1.profile.end - k1.profile.start),
1e-6 * (k2.profile.end - k2.profile.start)))
def suite():
testSuite = unittest.TestSuite()
testSuite.addTest(TestConvol("test_convol"))
testSuite.addTest(TestConvol("test_convol_hor"))
testSuite.addTest(TestConvol("test_convol_vert"))
return testSuite
| 44.884956 | 148 | 0.590694 |
cb2337bcf788afccdc9c5a689352e723a99564df | 41,681 | py | Python | sdk/search/azure-search-documents/azure/search/documents/_internal/_generated/operations/_documents_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/search/azure-search-documents/azure/search/documents/_internal/_generated/operations/_documents_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 2 | 2021-11-03T06:10:36.000Z | 2021-12-01T06:29:39.000Z | sdk/search/azure-search-documents/azure/search/documents/_internal/_generated/operations/_documents_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 1 | 2021-05-19T02:55:10.000Z | 2021-05-19T02:55:10.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DocumentsOperations(object):
"""DocumentsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.search.documents.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def count(
self,
request_options=None, # type: Optional["models.RequestOptions"]
**kwargs # type: Any
):
# type: (...) -> int
"""Queries the number of documents in the index.
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: long, or the result of cls(response)
:rtype: long
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[int]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.count.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexName': self._serialize.url("self._config.index_name", self._config.index_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('long', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
count.metadata = {'url': '/docs/$count'} # type: ignore
def search_get(
self,
search_text=None, # type: Optional[str]
search_options=None, # type: Optional["models.SearchOptions"]
request_options=None, # type: Optional["models.RequestOptions"]
**kwargs # type: Any
):
# type: (...) -> "models.SearchDocumentsResult"
"""Searches for documents in the index.
:param search_text: A full-text search query expression; Use "*" or omit this parameter to
match all documents.
:type search_text: str
:param search_options: Parameter group.
:type search_options: ~azure.search.documents.models.SearchOptions
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SearchDocumentsResult, or the result of cls(response)
:rtype: ~azure.search.documents.models.SearchDocumentsResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SearchDocumentsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_include_total_result_count = None
_facets = None
_filter = None
_highlight_fields = None
_highlight_post_tag = None
_highlight_pre_tag = None
_minimum_coverage = None
_order_by = None
_query_type = None
_scoring_parameters = None
_scoring_profile = None
_search_fields = None
_search_mode = None
_scoring_statistics = None
_session_id = None
_select = None
_skip = None
_top = None
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
if search_options is not None:
_include_total_result_count = search_options.include_total_result_count
_facets = search_options.facets
_filter = search_options.filter
_highlight_fields = search_options.highlight_fields
_highlight_post_tag = search_options.highlight_post_tag
_highlight_pre_tag = search_options.highlight_pre_tag
_minimum_coverage = search_options.minimum_coverage
_order_by = search_options.order_by
_query_type = search_options.query_type
_scoring_parameters = search_options.scoring_parameters
_scoring_profile = search_options.scoring_profile
_search_fields = search_options.search_fields
_search_mode = search_options.search_mode
_scoring_statistics = search_options.scoring_statistics
_session_id = search_options.session_id
_select = search_options.select
_skip = search_options.skip
_top = search_options.top
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.search_get.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexName': self._serialize.url("self._config.index_name", self._config.index_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if search_text is not None:
query_parameters['search'] = self._serialize.query("search_text", search_text, 'str')
if _include_total_result_count is not None:
query_parameters['$count'] = self._serialize.query("include_total_result_count", _include_total_result_count, 'bool')
if _facets is not None:
query_parameters['facet'] = [self._serialize.query("facets", q, 'str') if q is not None else '' for q in _facets]
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
if _highlight_fields is not None:
query_parameters['highlight'] = self._serialize.query("highlight_fields", _highlight_fields, '[str]', div=',')
if _highlight_post_tag is not None:
query_parameters['highlightPostTag'] = self._serialize.query("highlight_post_tag", _highlight_post_tag, 'str')
if _highlight_pre_tag is not None:
query_parameters['highlightPreTag'] = self._serialize.query("highlight_pre_tag", _highlight_pre_tag, 'str')
if _minimum_coverage is not None:
query_parameters['minimumCoverage'] = self._serialize.query("minimum_coverage", _minimum_coverage, 'float')
if _order_by is not None:
query_parameters['$orderby'] = self._serialize.query("order_by", _order_by, '[str]', div=',')
if _query_type is not None:
query_parameters['queryType'] = self._serialize.query("query_type", _query_type, 'str')
if _scoring_parameters is not None:
query_parameters['scoringParameter'] = [self._serialize.query("scoring_parameters", q, 'str') if q is not None else '' for q in _scoring_parameters]
if _scoring_profile is not None:
query_parameters['scoringProfile'] = self._serialize.query("scoring_profile", _scoring_profile, 'str')
if _search_fields is not None:
query_parameters['searchFields'] = self._serialize.query("search_fields", _search_fields, '[str]', div=',')
if _search_mode is not None:
query_parameters['searchMode'] = self._serialize.query("search_mode", _search_mode, 'str')
if _scoring_statistics is not None:
query_parameters['scoringStatistics'] = self._serialize.query("scoring_statistics", _scoring_statistics, 'str')
if _session_id is not None:
query_parameters['sessionId'] = self._serialize.query("session_id", _session_id, 'str')
if _select is not None:
query_parameters['$select'] = self._serialize.query("select", _select, '[str]', div=',')
if _skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", _skip, 'int')
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SearchDocumentsResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
search_get.metadata = {'url': '/docs'} # type: ignore
def search_post(
self,
search_request, # type: "models.SearchRequest"
request_options=None, # type: Optional["models.RequestOptions"]
**kwargs # type: Any
):
# type: (...) -> "models.SearchDocumentsResult"
"""Searches for documents in the index.
:param search_request: The definition of the Search request.
:type search_request: ~azure.search.documents.models.SearchRequest
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SearchDocumentsResult, or the result of cls(response)
:rtype: ~azure.search.documents.models.SearchDocumentsResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SearchDocumentsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.search_post.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexName': self._serialize.url("self._config.index_name", self._config.index_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(search_request, 'SearchRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SearchDocumentsResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
search_post.metadata = {'url': '/docs/search.post.search'} # type: ignore
def get(
self,
key, # type: str
selected_fields=None, # type: Optional[List[str]]
request_options=None, # type: Optional["models.RequestOptions"]
**kwargs # type: Any
):
# type: (...) -> object
"""Retrieves a document from the index.
:param key: The key of the document to retrieve.
:type key: str
:param selected_fields: List of field names to retrieve for the document; Any field not
retrieved will be missing from the returned document.
:type selected_fields: list[str]
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: object, or the result of cls(response)
:rtype: object
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[object]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexName': self._serialize.url("self._config.index_name", self._config.index_name, 'str'),
'key': self._serialize.url("key", key, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if selected_fields is not None:
query_parameters['$select'] = self._serialize.query("selected_fields", selected_fields, '[str]', div=',')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('object', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/docs(\'{key}\')'} # type: ignore
def suggest_get(
self,
search_text, # type: str
suggester_name, # type: str
suggest_options=None, # type: Optional["models.SuggestOptions"]
request_options=None, # type: Optional["models.RequestOptions"]
**kwargs # type: Any
):
# type: (...) -> "models.SuggestDocumentsResult"
"""Suggests documents in the index that match the given partial query text.
:param search_text: The search text to use to suggest documents. Must be at least 1 character,
and no more than 100 characters.
:type search_text: str
:param suggester_name: The name of the suggester as specified in the suggesters collection
that's part of the index definition.
:type suggester_name: str
:param suggest_options: Parameter group.
:type suggest_options: ~azure.search.documents.models.SuggestOptions
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SuggestDocumentsResult, or the result of cls(response)
:rtype: ~azure.search.documents.models.SuggestDocumentsResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SuggestDocumentsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_filter = None
_use_fuzzy_matching = None
_highlight_post_tag = None
_highlight_pre_tag = None
_minimum_coverage = None
_order_by = None
_search_fields = None
_select = None
_top = None
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
if suggest_options is not None:
_filter = suggest_options.filter
_use_fuzzy_matching = suggest_options.use_fuzzy_matching
_highlight_post_tag = suggest_options.highlight_post_tag
_highlight_pre_tag = suggest_options.highlight_pre_tag
_minimum_coverage = suggest_options.minimum_coverage
_order_by = suggest_options.order_by
_search_fields = suggest_options.search_fields
_select = suggest_options.select
_top = suggest_options.top
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.suggest_get.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexName': self._serialize.url("self._config.index_name", self._config.index_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['search'] = self._serialize.query("search_text", search_text, 'str')
query_parameters['suggesterName'] = self._serialize.query("suggester_name", suggester_name, 'str')
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
if _use_fuzzy_matching is not None:
query_parameters['fuzzy'] = self._serialize.query("use_fuzzy_matching", _use_fuzzy_matching, 'bool')
if _highlight_post_tag is not None:
query_parameters['highlightPostTag'] = self._serialize.query("highlight_post_tag", _highlight_post_tag, 'str')
if _highlight_pre_tag is not None:
query_parameters['highlightPreTag'] = self._serialize.query("highlight_pre_tag", _highlight_pre_tag, 'str')
if _minimum_coverage is not None:
query_parameters['minimumCoverage'] = self._serialize.query("minimum_coverage", _minimum_coverage, 'float')
if _order_by is not None:
query_parameters['$orderby'] = self._serialize.query("order_by", _order_by, '[str]', div=',')
if _search_fields is not None:
query_parameters['searchFields'] = self._serialize.query("search_fields", _search_fields, '[str]', div=',')
if _select is not None:
query_parameters['$select'] = self._serialize.query("select", _select, '[str]', div=',')
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SuggestDocumentsResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
suggest_get.metadata = {'url': '/docs/search.suggest'} # type: ignore
def suggest_post(
self,
suggest_request, # type: "models.SuggestRequest"
request_options=None, # type: Optional["models.RequestOptions"]
**kwargs # type: Any
):
# type: (...) -> "models.SuggestDocumentsResult"
"""Suggests documents in the index that match the given partial query text.
:param suggest_request: The Suggest request.
:type suggest_request: ~azure.search.documents.models.SuggestRequest
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SuggestDocumentsResult, or the result of cls(response)
:rtype: ~azure.search.documents.models.SuggestDocumentsResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SuggestDocumentsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.suggest_post.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexName': self._serialize.url("self._config.index_name", self._config.index_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(suggest_request, 'SuggestRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('SuggestDocumentsResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
suggest_post.metadata = {'url': '/docs/search.post.suggest'} # type: ignore
def index(
self,
batch, # type: "models.IndexBatch"
request_options=None, # type: Optional["models.RequestOptions"]
**kwargs # type: Any
):
# type: (...) -> "models.IndexDocumentsResult"
"""Sends a batch of document write actions to the index.
:param batch: The batch of index actions.
:type batch: ~azure.search.documents.models.IndexBatch
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IndexDocumentsResult, or the result of cls(response)
:rtype: ~azure.search.documents.models.IndexDocumentsResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.IndexDocumentsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.index.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexName': self._serialize.url("self._config.index_name", self._config.index_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(batch, 'IndexBatch')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 207]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
if response.status_code == 200:
deserialized = self._deserialize('IndexDocumentsResult', pipeline_response)
if response.status_code == 207:
deserialized = self._deserialize('IndexDocumentsResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
index.metadata = {'url': '/docs/search.index'} # type: ignore
def autocomplete_get(
self,
search_text, # type: str
suggester_name, # type: str
request_options=None, # type: Optional["models.RequestOptions"]
autocomplete_options=None, # type: Optional["models.AutocompleteOptions"]
**kwargs # type: Any
):
# type: (...) -> "models.AutocompleteResult"
"""Autocompletes incomplete query terms based on input text and matching terms in the index.
:param search_text: The incomplete term which should be auto-completed.
:type search_text: str
:param suggester_name: The name of the suggester as specified in the suggesters collection
that's part of the index definition.
:type suggester_name: str
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.models.RequestOptions
:param autocomplete_options: Parameter group.
:type autocomplete_options: ~azure.search.documents.models.AutocompleteOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AutocompleteResult, or the result of cls(response)
:rtype: ~azure.search.documents.models.AutocompleteResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AutocompleteResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
_autocomplete_mode = None
_filter = None
_use_fuzzy_matching = None
_highlight_post_tag = None
_highlight_pre_tag = None
_minimum_coverage = None
_search_fields = None
_top = None
if autocomplete_options is not None:
_autocomplete_mode = autocomplete_options.autocomplete_mode
_filter = autocomplete_options.filter
_use_fuzzy_matching = autocomplete_options.use_fuzzy_matching
_highlight_post_tag = autocomplete_options.highlight_post_tag
_highlight_pre_tag = autocomplete_options.highlight_pre_tag
_minimum_coverage = autocomplete_options.minimum_coverage
_search_fields = autocomplete_options.search_fields
_top = autocomplete_options.top
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
accept = "application/json"
# Construct URL
url = self.autocomplete_get.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexName': self._serialize.url("self._config.index_name", self._config.index_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
query_parameters['search'] = self._serialize.query("search_text", search_text, 'str')
query_parameters['suggesterName'] = self._serialize.query("suggester_name", suggester_name, 'str')
if _autocomplete_mode is not None:
query_parameters['autocompleteMode'] = self._serialize.query("autocomplete_mode", _autocomplete_mode, 'str')
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
if _use_fuzzy_matching is not None:
query_parameters['fuzzy'] = self._serialize.query("use_fuzzy_matching", _use_fuzzy_matching, 'bool')
if _highlight_post_tag is not None:
query_parameters['highlightPostTag'] = self._serialize.query("highlight_post_tag", _highlight_post_tag, 'str')
if _highlight_pre_tag is not None:
query_parameters['highlightPreTag'] = self._serialize.query("highlight_pre_tag", _highlight_pre_tag, 'str')
if _minimum_coverage is not None:
query_parameters['minimumCoverage'] = self._serialize.query("minimum_coverage", _minimum_coverage, 'float')
if _search_fields is not None:
query_parameters['searchFields'] = self._serialize.query("search_fields", _search_fields, '[str]', div=',')
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('AutocompleteResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
autocomplete_get.metadata = {'url': '/docs/search.autocomplete'} # type: ignore
def autocomplete_post(
self,
autocomplete_request, # type: "models.AutocompleteRequest"
request_options=None, # type: Optional["models.RequestOptions"]
**kwargs # type: Any
):
# type: (...) -> "models.AutocompleteResult"
"""Autocompletes incomplete query terms based on input text and matching terms in the index.
:param autocomplete_request: The definition of the Autocomplete request.
:type autocomplete_request: ~azure.search.documents.models.AutocompleteRequest
:param request_options: Parameter group.
:type request_options: ~azure.search.documents.models.RequestOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AutocompleteResult, or the result of cls(response)
:rtype: ~azure.search.documents.models.AutocompleteResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AutocompleteResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_x_ms_client_request_id = None
if request_options is not None:
_x_ms_client_request_id = request_options.x_ms_client_request_id
api_version = "2020-06-30"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.autocomplete_post.metadata['url'] # type: ignore
path_format_arguments = {
'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
'indexName': self._serialize.url("self._config.index_name", self._config.index_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _x_ms_client_request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("x_ms_client_request_id", _x_ms_client_request_id, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(autocomplete_request, 'AutocompleteRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.SearchError, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('AutocompleteResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
autocomplete_post.metadata = {'url': '/docs/search.post.autocomplete'} # type: ignore
| 50.461259 | 160 | 0.670161 |
e8788c9fc7223a38447cc88811a67ef9cd5dd611 | 218 | py | Python | World/Object/Unit/Pet/model.py | sundayz/idewave-core | 5bdb88892173c9c3e8c85f431cf9b5dbd9f23941 | [
"Apache-2.0"
] | 10 | 2019-06-29T19:24:52.000Z | 2021-02-21T22:45:57.000Z | World/Object/Unit/Pet/model.py | sundayz/idewave-core | 5bdb88892173c9c3e8c85f431cf9b5dbd9f23941 | [
"Apache-2.0"
] | 4 | 2019-08-15T07:03:36.000Z | 2021-06-02T13:01:25.000Z | World/Object/Unit/Pet/model.py | sundayz/idewave-core | 5bdb88892173c9c3e8c85f431cf9b5dbd9f23941 | [
"Apache-2.0"
] | 8 | 2019-06-30T22:47:48.000Z | 2021-02-20T19:21:30.000Z | from World.Object.Unit.model import Unit
from World.Object.Constants.HighGuid import HighGuid
class Pet(Unit):
def __init__(self):
super().__init__()
self.high_guid = HighGuid.HIGHGUID_PET.value
| 21.8 | 52 | 0.724771 |
71170500c123e08d326c6b1f5370d9c4000903df | 8,358 | py | Python | tests/test_models_albert.py | liuzh91/gluon-nlp | 189bbdcc56d8e58aa908963949687b99ff9a3cff | [
"Apache-2.0"
] | null | null | null | tests/test_models_albert.py | liuzh91/gluon-nlp | 189bbdcc56d8e58aa908963949687b99ff9a3cff | [
"Apache-2.0"
] | null | null | null | tests/test_models_albert.py | liuzh91/gluon-nlp | 189bbdcc56d8e58aa908963949687b99ff9a3cff | [
"Apache-2.0"
] | null | null | null | import pytest
import numpy as np
from numpy.testing import assert_allclose
import mxnet as mx
import tempfile
from gluonnlp.models.albert import AlbertModel, AlbertForMLM, AlbertForPretrain,\
list_pretrained_albert, get_pretrained_albert
mx.npx.set_np()
def get_test_cfg():
vocab_size = 500
num_token_types = 3
num_layers = 3
num_heads = 2
units = 64
hidden_size = 96
hidden_dropout_prob = 0.0
attention_dropout_prob = 0.0
cfg = AlbertModel.get_cfg().clone()
cfg.defrost()
cfg.MODEL.vocab_size = vocab_size
cfg.MODEL.num_token_types = num_token_types
cfg.MODEL.units = units
cfg.MODEL.hidden_size = hidden_size
cfg.MODEL.num_heads = num_heads
cfg.MODEL.num_layers = num_layers
cfg.MODEL.hidden_dropout_prob = hidden_dropout_prob
cfg.MODEL.attention_dropout_prob = attention_dropout_prob
return cfg
@pytest.mark.parametrize('static_alloc,static_shape', [(False, False),
(True, True)])
@pytest.mark.parametrize('compute_layout', ['auto', 'NT', 'TN'])
def test_albert_backbone(static_alloc, static_shape, compute_layout):
batch_size = 3
cfg = get_test_cfg()
cfg.defrost()
cfg.MODEL.compute_layout = compute_layout
cfg.freeze()
model = AlbertModel.from_cfg(cfg, use_pooler=True)
model.initialize()
model.hybridize(static_alloc=static_alloc, static_shape=static_shape)
cfg_tn = cfg.clone()
cfg_tn.defrost()
cfg_tn.MODEL.layout = 'TN'
cfg_tn.freeze()
model_tn = AlbertModel.from_cfg(cfg_tn, use_pooler=True)
model_tn.share_parameters(model.collect_params())
model_tn.hybridize(static_alloc=static_alloc, static_shape=static_shape)
for seq_length in [64, 96]:
valid_length = mx.np.random.randint(seq_length // 2, seq_length, (batch_size,))
inputs = mx.np.random.randint(0, cfg.MODEL.vocab_size, (batch_size, seq_length))
token_types = mx.np.random.randint(0, cfg.MODEL.num_token_types, (batch_size, seq_length))
contextual_embedding, pooled_out = model(inputs, token_types, valid_length)
contextual_embedding_tn, pooled_out_tn = model_tn(inputs.T, token_types.T, valid_length)
# Verify layout
assert_allclose(np.swapaxes(contextual_embedding_tn.asnumpy(), 0, 1),
contextual_embedding.asnumpy(), 1E-4, 1E-4)
assert_allclose(pooled_out_tn.asnumpy(), pooled_out.asnumpy(), 1E-4, 1E-4)
assert contextual_embedding.shape == (batch_size, seq_length, cfg.MODEL.units)
assert pooled_out.shape == (batch_size, cfg.MODEL.units)
# Ensure the embeddings that exceed valid_length are masked
contextual_embedding_np = contextual_embedding.asnumpy()
pooled_out_np = pooled_out.asnumpy()
for i in range(batch_size):
ele_valid_length = valid_length[i].asnumpy()
assert_allclose(contextual_embedding_np[i, ele_valid_length:],
np.zeros_like(contextual_embedding_np[i, ele_valid_length:]),
1E-5, 1E-5)
# Ensure that the content are correctly masked
new_inputs = mx.np.concatenate([inputs, inputs[:, :5]], axis=-1)
new_token_types = mx.np.concatenate([token_types, token_types[:, :5]], axis=-1)
new_contextual_embedding, new_pooled_out = \
model(new_inputs, new_token_types, valid_length)
new_contextual_embedding_np = new_contextual_embedding.asnumpy()
new_pooled_out_np = new_pooled_out.asnumpy()
for i in range(batch_size):
ele_valid_length = valid_length[i].asnumpy()
assert_allclose(new_contextual_embedding_np[i, :ele_valid_length],
contextual_embedding_np[i, :ele_valid_length], 1E-5, 1E-5)
assert_allclose(new_pooled_out_np, pooled_out_np, 1E-4, 1E-4)
@pytest.mark.parametrize('compute_layout', ['auto', 'NT', 'TN'])
def test_albert_for_mlm_model(compute_layout):
batch_size = 3
cfg = get_test_cfg()
cfg.defrost()
cfg.MODEL.compute_layout = compute_layout
cfg.freeze()
albert_mlm_model = AlbertForMLM(backbone_cfg=cfg)
albert_mlm_model.initialize()
albert_mlm_model.hybridize()
cfg_tn = cfg.clone()
cfg_tn.defrost()
cfg_tn.MODEL.layout = 'TN'
cfg_tn.freeze()
albert_mlm_tn_model = AlbertForMLM(backbone_cfg=cfg_tn)
albert_mlm_tn_model.share_parameters(albert_mlm_model.collect_params())
albert_mlm_tn_model.hybridize()
num_mask = 16
seq_length = 64
inputs = mx.np.random.randint(0, cfg.MODEL.vocab_size, (batch_size, seq_length))
token_types = mx.np.random.randint(0, cfg.MODEL.num_token_types, (batch_size, seq_length))
valid_length = mx.np.random.randint(seq_length // 2, seq_length, (batch_size,))
masked_positions = mx.np.random.randint(0, seq_length // 2, (batch_size, num_mask))
contextual_embeddings, pooled_out, mlm_scores = albert_mlm_model(inputs, token_types, valid_length, masked_positions)
contextual_embeddings_tn, pooled_out_tn, mlm_scores_tn = albert_mlm_tn_model(inputs.T, token_types.T, valid_length, masked_positions)
assert_allclose(np.swapaxes(contextual_embeddings_tn.asnumpy(), 0, 1),
contextual_embeddings.asnumpy(), 1E-4, 1E-4)
assert_allclose(pooled_out_tn.asnumpy(), pooled_out.asnumpy(), 1E-4, 1E-4)
assert_allclose(mlm_scores_tn.asnumpy(), mlm_scores.asnumpy(), 1E-4, 1E-4)
assert mlm_scores.shape == (batch_size, num_mask, cfg.MODEL.vocab_size)
@pytest.mark.parametrize('compute_layout', ['auto', 'NT', 'TN'])
def test_albert_for_pretrain_model(compute_layout):
batch_size = 3
cfg = get_test_cfg()
cfg.defrost()
cfg.MODEL.compute_layout = compute_layout
cfg.freeze()
albert_pretrain_model = AlbertForPretrain(backbone_cfg=cfg)
albert_pretrain_model.initialize()
albert_pretrain_model.hybridize()
cfg_tn = cfg.clone()
cfg_tn.defrost()
cfg_tn.MODEL.layout = 'TN'
cfg_tn.freeze()
albert_pretrain_model_tn = AlbertForPretrain(backbone_cfg=cfg_tn)
albert_pretrain_model_tn.share_parameters(albert_pretrain_model.collect_params())
albert_pretrain_model_tn.hybridize()
num_mask = 16
seq_length = 64
inputs = mx.np.random.randint(0, cfg.MODEL.vocab_size, (batch_size, seq_length))
token_types = mx.np.random.randint(0, cfg.MODEL.num_token_types, (batch_size, seq_length))
valid_length = mx.np.random.randint(seq_length // 2, seq_length, (batch_size,))
masked_positions = mx.np.random.randint(0, seq_length // 2, (batch_size, num_mask))
contextual_embeddings, pooled_out, sop_score, mlm_scores =\
albert_pretrain_model(inputs, token_types, valid_length, masked_positions)
contextual_embeddings_tn, pooled_out_tn, sop_score_tn, mlm_scores_tn = \
albert_pretrain_model_tn(inputs.T, token_types.T, valid_length, masked_positions)
assert_allclose(np.swapaxes(contextual_embeddings_tn.asnumpy(), 0, 1),
contextual_embeddings.asnumpy(), 1E-4, 1E-4)
assert_allclose(pooled_out_tn.asnumpy(), pooled_out.asnumpy(), 1E-4, 1E-4)
assert_allclose(sop_score.asnumpy(), sop_score_tn.asnumpy(), 1E-4, 1E-4)
assert_allclose(mlm_scores.asnumpy(), mlm_scores_tn.asnumpy(), 1E-4, 1E-4)
assert mlm_scores.shape == (batch_size, num_mask, cfg.MODEL.vocab_size)
assert sop_score.shape == (batch_size, 2)
def test_list_pretrained_albert():
assert len(list_pretrained_albert()) > 0
@pytest.mark.remote_required
@pytest.mark.parametrize('model_name', list_pretrained_albert())
def test_albert_get_pretrained(model_name):
assert len(list_pretrained_albert()) > 0
with tempfile.TemporaryDirectory() as root:
cfg, tokenizer, backbone_params_path, mlm_params_path =\
get_pretrained_albert(model_name, load_backbone=True, load_mlm=True, root=root)
assert cfg.MODEL.vocab_size == len(tokenizer.vocab)
albert_model = AlbertModel.from_cfg(cfg)
albert_model.load_parameters(backbone_params_path)
albert_mlm_model = AlbertForMLM(cfg)
if mlm_params_path is not None:
albert_mlm_model.load_parameters(mlm_params_path)
# Just load the backbone
albert_mlm_model = AlbertForMLM(cfg)
albert_mlm_model.backbone_model.load_parameters(backbone_params_path)
| 47.220339 | 137 | 0.717755 |
5502fb25a1b152675ef52545696485dca9a83678 | 11,982 | py | Python | tests/apps/courses/test_templates_course_run_detail.py | Verisage/richie | 8a78d3bc6cde74d6252cbfc51a2d195e26be64fc | [
"MIT"
] | null | null | null | tests/apps/courses/test_templates_course_run_detail.py | Verisage/richie | 8a78d3bc6cde74d6252cbfc51a2d195e26be64fc | [
"MIT"
] | null | null | null | tests/apps/courses/test_templates_course_run_detail.py | Verisage/richie | 8a78d3bc6cde74d6252cbfc51a2d195e26be64fc | [
"MIT"
] | null | null | null | """
End-to-end tests for the course run detail view
"""
from datetime import datetime
from unittest import mock
from django.utils import timezone
import pytz
from cms.test_utils.testcases import CMSTestCase
from richie.apps.core.factories import UserFactory
from richie.apps.courses.factories import (
CategoryFactory,
CourseFactory,
CourseRunFactory,
OrganizationFactory,
)
from richie.apps.courses.models import CourseRun, CourseState
class CourseRunCMSTestCase(CMSTestCase):
"""
End-to-end test suite to validate the content and Ux of the course run detail view
It's worth to notice related draft items (Person, Organization) are only
displayed on a draft course page so admin can preview them. But draft items are
hidden from published page so common users can not see them.
"""
def test_templates_course_run_detail_cms_published_content(self):
"""
Validate that the important elements are displayed on a published course run page
"""
categories = CategoryFactory.create_batch(4)
organizations = OrganizationFactory.create_batch(4)
course = CourseFactory(
page_title="Very interesting course",
fill_organizations=organizations,
fill_categories=categories,
should_publish=True,
)
course_run = CourseRunFactory(
page_title="first session",
page_parent=course.extended_object,
resource_link="https://www.example.com/enroll",
enrollment_start=datetime(2018, 10, 21, tzinfo=pytz.utc),
enrollment_end=datetime(2019, 1, 18, tzinfo=pytz.utc),
start=datetime(2018, 12, 10, tzinfo=pytz.utc),
end=datetime(2019, 2, 14, tzinfo=pytz.utc),
languages=["en", "fr"],
)
page = course_run.extended_object
# Publish only 2 out of 4 categories and 2 out of 4 organizations
categories[0].extended_object.publish("en")
categories[1].extended_object.publish("en")
organizations[0].extended_object.publish("en")
organizations[1].extended_object.publish("en")
# The unpublished objects may have been published and unpublished which puts them in a
# status different from objects that have never been published.
# We want to test both cases.
categories[2].extended_object.publish("en")
categories[2].extended_object.unpublish("en")
organizations[2].extended_object.publish("en")
organizations[2].extended_object.unpublish("en")
# The page should not be visible before it is published
url = page.get_absolute_url()
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# Now publish the page and check its content
page.publish("en")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
"<title>First session - Very interesting course</title>",
html=True,
)
self.assertContains(
response,
'<h1 class="course-detail__content__title">'
"Very interesting course<br>first session</h1>",
html=True,
)
# Only published categories should be present on the page
for category in categories[:2]:
self.assertContains(
response,
(
'<a class="category-plugin-tag" href="{:s}">'
'<div class="category-plugin-tag__title">{:s}</div></a>'
).format(
category.extended_object.get_absolute_url(),
category.extended_object.get_title(),
),
html=True,
)
for category in categories[-2:]:
self.assertNotContains(response, category.extended_object.get_title())
# Public organizations should be in response content
for organization in organizations[:2]:
self.assertContains(
response,
'<div class="organization-glimpse__title">{title:s}</div>'.format(
title=organization.extended_object.get_title()
),
html=True,
)
# Draft organizations should not be in response content
for organization in organizations[-2:]:
self.assertNotContains(
response, organization.extended_object.get_title(), html=True
)
# The course run details should be on the page
self.assertContains(
response, "<dt>Enrollment starts</dt><dd>Oct. 21, 2018</dd>"
)
self.assertContains(response, "<dt>Enrollment ends</dt><dd>Jan. 18, 2019</dd>")
self.assertContains(response, "<dt>Course starts</dt><dd>Dec. 10, 2018</dd>")
self.assertContains(response, "<dt>Course ends</dt><dd>Feb. 14, 2019</dd>")
self.assertContains(response, "<dt>Languages</dt><dd>English and french</dd>")
def test_templates_course_run_detail_cms_draft_content(self):
"""
A staff user should see a draft course run including its draft elements with
an annotation
"""
user = UserFactory(is_staff=True, is_superuser=True)
self.client.login(username=user.username, password="password")
categories = CategoryFactory.create_batch(4)
organizations = OrganizationFactory.create_batch(4)
course = CourseFactory(
page_title="Very interesting course",
fill_organizations=organizations,
fill_categories=categories,
should_publish=True,
)
course_run = CourseRunFactory(
page_title="first session",
page_parent=course.extended_object,
resource_link="https://www.example.com/enroll",
enrollment_start=datetime(2018, 10, 21, tzinfo=pytz.utc),
enrollment_end=datetime(2019, 1, 18, tzinfo=pytz.utc),
start=datetime(2018, 12, 10, tzinfo=pytz.utc),
end=datetime(2019, 2, 14, tzinfo=pytz.utc),
languages=["en", "fr"],
)
page = course_run.extended_object
# Publish only 2 out of 4 categories and 2 out of 4 organizations
categories[0].extended_object.publish("en")
categories[1].extended_object.publish("en")
organizations[0].extended_object.publish("en")
organizations[1].extended_object.publish("en")
# The unpublished objects may have been published and unpublished which puts them in a
# status different from objects that have never been published.
# We want to test both cases.
categories[2].extended_object.publish("en")
categories[2].extended_object.unpublish("en")
organizations[2].extended_object.publish("en")
organizations[2].extended_object.unpublish("en")
# The page should be visible as draft to the staff user
url = page.get_absolute_url()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
"<title>First session - Very interesting course</title>",
html=True,
)
self.assertContains(
response,
'<h1 class="course-detail__content__title">'
"Very interesting course<br>first session</h1>",
html=True,
)
# Draft and public organizations should all be present on the page
for organization in organizations:
self.assertContains(
response,
'<div class="organization-glimpse__title">{title:s}</div>'.format(
title=organization.extended_object.get_title()
),
html=True,
)
# Draft organizations should be annotated for styling
self.assertContains(response, "organization-glimpse--draft", count=2)
# The published categories should be present on the page
for category in categories[:2]:
self.assertContains(
response,
(
'<a class="category-plugin-tag" href="{:s}">'
'<div class="category-plugin-tag__title">{:s}</div></a>'
).format(
category.extended_object.get_absolute_url(),
category.extended_object.get_title(),
),
html=True,
)
# Draft categories should also be present on the page with an annotation for styling
for category in categories[-2:]:
self.assertContains(
response,
(
'<a class="{element:s} {element:s}--draft" href="{url:s}">'
'<div class="category-plugin-tag__title">{title:s}</div></a>'
).format(
url=category.extended_object.get_absolute_url(),
element="category-plugin-tag",
title=category.extended_object.get_title(),
),
html=True,
)
# The course run details should be on the page
self.assertContains(
response, "<dt>Enrollment starts</dt><dd>Oct. 21, 2018</dd>"
)
self.assertContains(response, "<dt>Enrollment ends</dt><dd>Jan. 18, 2019</dd>")
self.assertContains(response, "<dt>Course starts</dt><dd>Dec. 10, 2018</dd>")
self.assertContains(response, "<dt>Course ends</dt><dd>Feb. 14, 2019</dd>")
self.assertContains(response, "<dt>Languages</dt><dd>English and french</dd>")
def test_templates_course_run_detail_no_index(self):
"""
A course run page should not be indexable by search engine robots.
"""
course = CourseFactory(should_publish=True)
course_run = CourseRunFactory(
page_parent=course.extended_object, should_publish=True
)
url = course_run.extended_object.get_absolute_url()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<meta name="robots" content="noindex">')
def prepare_to_test_state(self, state):
"""
Not a test.
Create objects and mock to help testing the impact of the state on template rendering.
"""
course = CourseFactory(should_publish=True)
course_run = CourseRunFactory(
page_parent=course.extended_object,
resource_link="https://www.example.com/enroll",
should_publish=True,
)
url = course_run.extended_object.get_absolute_url()
with mock.patch.object(
CourseRun, "state", new_callable=mock.PropertyMock, return_value=state
):
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
return response
def test_templates_course_run_detail_state_with_cta(self):
"""A course run in a state with a call to action should include a link and the CTA."""
response = self.prepare_to_test_state(CourseState(0, timezone.now()))
self.assertContains(
response,
'<a class="course-detail__content__run__block__cta" '
'href="https://www.example.com/enroll">Enroll now</a>',
html=True,
)
def test_templates_course_run_detail_state_without_cta(self):
"""A course run in a state without a call to action should include a state button."""
response = self.prepare_to_test_state(CourseState(6))
self.assertContains(
response,
'<button class="course-detail__content__run__block__cta '
'course-detail__content__run__block__cta--projected">To be scheduled</button>',
html=True,
)
| 40.208054 | 94 | 0.614505 |
f68643981530822059227a47bb3016d4498914e8 | 3,488 | py | Python | local_configs/11.2/b0_c0s32.py | wzpscott/SegformerDistillation | 6558757f5071251410e90270e197755860a6f41c | [
"DOC"
] | null | null | null | local_configs/11.2/b0_c0s32.py | wzpscott/SegformerDistillation | 6558757f5071251410e90270e197755860a6f41c | [
"DOC"
] | null | null | null | local_configs/11.2/b0_c0s32.py | wzpscott/SegformerDistillation | 6558757f5071251410e90270e197755860a6f41c | [
"DOC"
] | null | null | null | _base_ = [
'../_base_/datasets/ade20k_repeat.py',
'../_base_/default_runtime.py',
'../_base_/schedules/schedule_160k_adamw.py'
]
norm_cfg = dict(type='SyncBN', requires_grad=True)
c = 0
s = 32
model = dict(
type='SDModule',
cfg_s=dict(
type='EncoderDecoder',
pretrained='pretrained/mit_b0.pth',
backbone=dict(
type='mit_b0',
style='pytorch'),
decode_head=dict(
type='SegFormerHead',
in_channels=[32, 64, 160, 256],
in_index=[0, 1, 2, 3],
feature_strides=[4, 8, 16, 32],
channels=128,
dropout_ratio=0.1,
num_classes=150,
norm_cfg=norm_cfg,
align_corners=False,
decoder_params=dict(embed_dim=256),
loss_decode=dict(type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0)),
),
cfg_t=dict(
type='EncoderDecoder',
backbone=dict(
type='mit_b4',
style='pytorch'),
decode_head=dict(
type='SegFormerHead',
in_channels=[64, 128, 320, 512],
in_index=[0, 1, 2, 3],
feature_strides=[4, 8, 16, 32],
channels=128,
dropout_ratio=0.1,
num_classes=150,
norm_cfg=norm_cfg,
align_corners=False,
decoder_params=dict(embed_dim=768),
loss_decode=dict(type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0))
),
distillation = [
{'student_layer':'decode_head.linear_pred',
'teacher_layer':'decode_head.linear_pred',
'loss_name':'KLDLoss',
'loss_config':{
'weight':1,
'tau':1,
'reshape_config':'logits',
'mask_config':False,
'transform_config':{'loss_type':'spatial','kernel_size':s,'stride':s},
'ff_config':False,
# 'earlystop_config':120000,
},
},
# {'student_layer':'decode_head.linear_pred',
# 'teacher_layer':'decode_head.linear_pred',
# 'loss_name':'KLDLoss',
# 'loss_config':{
# 'weight':1,
# 'tau':1,
# 'reshape_config':'logits',
# 'resize_config':{'mode':'bilinear','align_corners':False},
# 'mask_config':False,
# 'transform_config':{'loss_type':'channel','group_size':c},
# 'ff_config':False,
# # 'shift_config':True,
# # 'earlystop_config':120000,
# },
# },
],
t_pretrain = './pretrained/segformer.b4.512x512.ade.160k.pth', # 老师的预训练模型
train_cfg=dict(),
test_cfg=dict(mode='whole'),
)
optimizer = dict(_delete_=True, type='AdamW', lr=0.00006, betas=(0.9,0.999), weight_decay=0.01,
paramwise_cfg=dict(custom_keys={'pos_block': dict(decay_mult=0.),
'norm': dict(decay_mult=0.),
'head': dict(lr_mult=10.)
}))
lr_config = dict(_delete_=True, policy='poly',
warmup='linear',
warmup_iters=1500,
warmup_ratio=1e-6,
power=1.0, min_lr=0.0, by_epoch=False)
data = dict(samples_per_gpu=2)
evaluation = dict(interval=2000, metric='mIoU')
work_dir = f'/apdcephfs/private_inchzhang/shared_info/11.2/b0_c{c}s{s}'
resume_from = work_dir+'/latest.pth'
| 35.232323 | 95 | 0.527523 |
ff21384752551fb894f5be2364c426f7649632ff | 1,288 | py | Python | mysqlconnection.py | Elizandtech/tennis_paradise | bf97dc2e5a357ff906480cc35e3f5da868c9ff57 | [
"MIT"
] | 1 | 2019-07-24T05:31:30.000Z | 2019-07-24T05:31:30.000Z | mysqlconnection.py | Elizandtech/tennis_paradise | bf97dc2e5a357ff906480cc35e3f5da868c9ff57 | [
"MIT"
] | 5 | 2019-07-24T14:55:09.000Z | 2019-07-24T17:46:06.000Z | mysqlconnection.py | Elizandtech/tennis_paradise | bf97dc2e5a357ff906480cc35e3f5da868c9ff57 | [
"MIT"
] | 1 | 2019-07-24T16:12:59.000Z | 2019-07-24T16:12:59.000Z | import pymysql.cursors
class MySQLConnection:
def __init__(self, db):
connection = pymysql.connect(host = 'localhost',
user = 'root',
password = 'root',
db = db,
charset = 'utf8mb4',
cursorclass = pymysql.cursors.DictCursor,
autocommit = True)
self.connection = connection
def query_db(self, query, data=None):
cursor = self.connection.cursor()
try:
query = cursor.mogrify(query, data)
print("Running Query:\n", query)
cursor.execute(query, data)
if query.lower().find("select") >= 0:
result = cursor.fetchall()
print("Query result:", result)
return result
else:
self.connection.commit()
if query.lower().find('insert') >= 0:
return cursor.lastrowid
return True
except Exception as e:
print("ERROR:", e)
return False
def __del__(self):
self.connection.close()
def connectToMySQL(db):
return MySQLConnection(db)
| 34.810811 | 77 | 0.468944 |
afdd0e9768e7a838236b2f03be9a6131d27c199e | 1,938 | py | Python | webhook.py | xiangflight/tb2lark-notification | 00ad16cf6d4a703b83bbedc6e296f40355101f8c | [
"Apache-2.0"
] | null | null | null | webhook.py | xiangflight/tb2lark-notification | 00ad16cf6d4a703b83bbedc6e296f40355101f8c | [
"Apache-2.0"
] | null | null | null | webhook.py | xiangflight/tb2lark-notification | 00ad16cf6d4a703b83bbedc6e296f40355101f8c | [
"Apache-2.0"
] | null | null | null | from flask import Flask, request, jsonify
import requests
import json
app = Flask(__name__)
@app.route('/hook', methods=['POST'])
def web_hook():
"""
transfer station of teambition's webhook to feishu's webhook
"""
# resolve teambition's request, maybe we need some authentication
signature = request.headers.get('X-Signature', '')
response = request.json
type = response['event']
task_state_map = {'task.create': '创建任务', 'task.update': '更新任务', 'task.remove': '删除任务'}
if (type not in list(task_state_map.keys())):
return jsonify({"code": 200, "status": 'success'})
task_state = task_state_map.get(type)
task_id = response['data']['task']['taskId']
task_url = 'https://www.teambition.com/task/{}'.format(task_id)
task_content = response['data']['task']['content']
# make request to feishu's webhook
payload = {
"msg_type": "post",
"content": {
"post": {
"zh_cn": {
"title": "Doraemon提醒您",
"content": [
[
{
"tag": "text",
"text": "{}({}):".format(task_state, task_content)
},
{
"tag": "a",
"text": task_url,
"href": task_url
}
]
]
}
}
}
}
web_hook_url = ${url of your custom lark bot}
r = requests.post(web_hook_url, data=json.dumps(payload))
if (r.json().get('StatusCode') == 0):
return jsonify({"code":200, "status": 'success'})
return jsonify({"code":203, "status": 'error'})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000)
| 30.28125 | 90 | 0.46646 |
8e48b184bccaa6d9588206ad78f04c09d01fb167 | 22,346 | py | Python | sphinx/util/typing.py | danieleades/sphinx | 1c98aea126919b218766371e1fdf308a1167e95f | [
"BSD-2-Clause"
] | null | null | null | sphinx/util/typing.py | danieleades/sphinx | 1c98aea126919b218766371e1fdf308a1167e95f | [
"BSD-2-Clause"
] | 1,662 | 2015-01-02T11:45:27.000Z | 2015-01-03T12:21:29.000Z | sphinx/util/typing.py | danieleades/sphinx | 1c98aea126919b218766371e1fdf308a1167e95f | [
"BSD-2-Clause"
] | null | null | null | """The composite types for Sphinx."""
import sys
import typing
from struct import Struct
from types import TracebackType
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Type, TypeVar, Union
from docutils import nodes
from docutils.parsers.rst.states import Inliner
from sphinx.deprecation import RemovedInSphinx60Warning, deprecated_alias
if sys.version_info > (3, 7):
from typing import ForwardRef
else:
from typing import _ForwardRef # type: ignore
class ForwardRef:
"""A pseudo ForwardRef class for py36."""
def __init__(self, arg: Any, is_argument: bool = True) -> None:
self.arg = arg
def _evaluate(self, globalns: Dict, localns: Dict) -> Any:
ref = _ForwardRef(self.arg)
return ref._eval_type(globalns, localns)
try:
from types import UnionType # type: ignore # python 3.10 or above
except ImportError:
UnionType = None
if False:
# For type annotation
from typing import Type # NOQA # for python3.5.1
# builtin classes that have incorrect __module__
INVALID_BUILTIN_CLASSES = {
Struct: 'struct.Struct', # Before Python 3.9
TracebackType: 'types.TracebackType',
}
def is_invalid_builtin_class(obj: Any) -> bool:
"""Check *obj* is an invalid built-in class."""
try:
return obj in INVALID_BUILTIN_CLASSES
except TypeError: # unhashable type
return False
# Text like nodes which are initialized with text and rawsource
TextlikeNode = Union[nodes.Text, nodes.TextElement]
# type of None
NoneType = type(None)
# path matcher
PathMatcher = Callable[[str], bool]
# common role functions
RoleFunction = Callable[[str, str, str, int, Inliner, Dict[str, Any], List[str]],
Tuple[List[nodes.Node], List[nodes.system_message]]]
# A option spec for directive
OptionSpec = Dict[str, Callable[[str], Any]]
# title getter functions for enumerable nodes (see sphinx.domains.std)
TitleGetter = Callable[[nodes.Node], str]
# inventory data on memory
InventoryItem = Tuple[str, str, str, str]
Inventory = Dict[str, Dict[str, InventoryItem]]
def get_type_hints(obj: Any, globalns: Dict = None, localns: Dict = None) -> Dict[str, Any]:
"""Return a dictionary containing type hints for a function, method, module or class object.
This is a simple wrapper of `typing.get_type_hints()` that does not raise an error on
runtime.
"""
from sphinx.util.inspect import safe_getattr # lazy loading
try:
return typing.get_type_hints(obj, globalns, localns)
except NameError:
# Failed to evaluate ForwardRef (maybe TYPE_CHECKING)
return safe_getattr(obj, '__annotations__', {})
except AttributeError:
# Failed to evaluate ForwardRef (maybe not runtime checkable)
return safe_getattr(obj, '__annotations__', {})
except TypeError:
# Invalid object is given. But try to get __annotations__ as a fallback for
# the code using type union operator (PEP 604) in python 3.9 or below.
return safe_getattr(obj, '__annotations__', {})
except KeyError:
# a broken class found (refs: https://github.com/sphinx-doc/sphinx/issues/8084)
return {}
def is_system_TypeVar(typ: Any) -> bool:
"""Check *typ* is system defined TypeVar."""
modname = getattr(typ, '__module__', '')
return modname == 'typing' and isinstance(typ, TypeVar)
def restify(cls: Optional[Type], mode: str = 'fully-qualified-except-typing') -> str:
"""Convert python class to a reST reference.
:param mode: Specify a method how annotations will be stringified.
'fully-qualified-except-typing'
Show the module name and qualified name of the annotation except
the "typing" module.
'smart'
Show the name of the annotation.
"""
from sphinx.ext.autodoc.mock import ismock, ismockmodule # lazy loading
from sphinx.util import inspect # lazy loading
if mode == 'smart':
modprefix = '~'
else:
modprefix = ''
try:
if cls is None or cls is NoneType:
return ':py:obj:`None`'
elif cls is Ellipsis:
return '...'
elif isinstance(cls, str):
return cls
elif ismockmodule(cls):
return ':py:class:`%s%s`' % (modprefix, cls.__name__)
elif ismock(cls):
return ':py:class:`%s%s.%s`' % (modprefix, cls.__module__, cls.__name__)
elif is_invalid_builtin_class(cls):
return ':py:class:`%s%s`' % (modprefix, INVALID_BUILTIN_CLASSES[cls])
elif inspect.isNewType(cls):
if sys.version_info > (3, 10):
# newtypes have correct module info since Python 3.10+
return ':py:class:`%s%s.%s`' % (modprefix, cls.__module__, cls.__name__)
else:
return ':py:class:`%s`' % cls.__name__
elif UnionType and isinstance(cls, UnionType):
if len(cls.__args__) > 1 and None in cls.__args__:
args = ' | '.join(restify(a, mode) for a in cls.__args__ if a)
return 'Optional[%s]' % args
else:
return ' | '.join(restify(a, mode) for a in cls.__args__)
elif cls.__module__ in ('__builtin__', 'builtins'):
if hasattr(cls, '__args__'):
return ':py:class:`%s`\\ [%s]' % (
cls.__name__,
', '.join(restify(arg, mode) for arg in cls.__args__),
)
else:
return ':py:class:`%s`' % cls.__name__
else:
if sys.version_info >= (3, 7): # py37+
return _restify_py37(cls, mode)
else:
return _restify_py36(cls, mode)
except (AttributeError, TypeError):
return inspect.object_description(cls)
def _restify_py37(cls: Optional[Type], mode: str = 'fully-qualified-except-typing') -> str:
"""Convert python class to a reST reference."""
from sphinx.util import inspect # lazy loading
if mode == 'smart':
modprefix = '~'
else:
modprefix = ''
if (inspect.isgenericalias(cls) and
cls.__module__ == 'typing' and cls.__origin__ is Union):
# Union
if len(cls.__args__) > 1 and cls.__args__[-1] is NoneType:
if len(cls.__args__) > 2:
args = ', '.join(restify(a, mode) for a in cls.__args__[:-1])
return ':py:obj:`~typing.Optional`\\ [:obj:`~typing.Union`\\ [%s]]' % args
else:
return ':py:obj:`~typing.Optional`\\ [%s]' % restify(cls.__args__[0], mode)
else:
args = ', '.join(restify(a, mode) for a in cls.__args__)
return ':py:obj:`~typing.Union`\\ [%s]' % args
elif inspect.isgenericalias(cls):
if isinstance(cls.__origin__, typing._SpecialForm):
text = restify(cls.__origin__, mode) # type: ignore
elif getattr(cls, '_name', None):
if cls.__module__ == 'typing':
text = ':py:class:`~%s.%s`' % (cls.__module__, cls._name)
else:
text = ':py:class:`%s%s.%s`' % (modprefix, cls.__module__, cls._name)
else:
text = restify(cls.__origin__, mode)
origin = getattr(cls, '__origin__', None)
if not hasattr(cls, '__args__'):
pass
elif all(is_system_TypeVar(a) for a in cls.__args__):
# Suppress arguments if all system defined TypeVars (ex. Dict[KT, VT])
pass
elif cls.__module__ == 'typing' and cls._name == 'Callable':
args = ', '.join(restify(a, mode) for a in cls.__args__[:-1])
text += r"\ [[%s], %s]" % (args, restify(cls.__args__[-1], mode))
elif cls.__module__ == 'typing' and getattr(origin, '_name', None) == 'Literal':
text += r"\ [%s]" % ', '.join(repr(a) for a in cls.__args__)
elif cls.__args__:
text += r"\ [%s]" % ", ".join(restify(a, mode) for a in cls.__args__)
return text
elif isinstance(cls, typing._SpecialForm):
return ':py:obj:`~%s.%s`' % (cls.__module__, cls._name)
elif sys.version_info >= (3, 11) and cls is typing.Any:
# handle bpo-46998
return f':py:obj:`~{cls.__module__}.{cls.__name__}`'
elif hasattr(cls, '__qualname__'):
if cls.__module__ == 'typing':
return ':py:class:`~%s.%s`' % (cls.__module__, cls.__qualname__)
else:
return ':py:class:`%s%s.%s`' % (modprefix, cls.__module__, cls.__qualname__)
elif isinstance(cls, ForwardRef):
return ':py:class:`%s`' % cls.__forward_arg__
else:
# not a class (ex. TypeVar)
if cls.__module__ == 'typing':
return ':py:obj:`~%s.%s`' % (cls.__module__, cls.__name__)
else:
return ':py:obj:`%s%s.%s`' % (modprefix, cls.__module__, cls.__name__)
def _restify_py36(cls: Optional[Type], mode: str = 'fully-qualified-except-typing') -> str:
if mode == 'smart':
modprefix = '~'
else:
modprefix = ''
module = getattr(cls, '__module__', None)
if module == 'typing':
if getattr(cls, '_name', None):
qualname = cls._name
elif getattr(cls, '__qualname__', None):
qualname = cls.__qualname__
elif getattr(cls, '__forward_arg__', None):
qualname = cls.__forward_arg__
elif getattr(cls, '__origin__', None):
qualname = stringify(cls.__origin__) # ex. Union
else:
qualname = repr(cls).replace('typing.', '')
elif hasattr(cls, '__qualname__'):
qualname = '%s%s.%s' % (modprefix, module, cls.__qualname__)
else:
qualname = repr(cls)
if (isinstance(cls, typing.TupleMeta) and # type: ignore
not hasattr(cls, '__tuple_params__')):
if module == 'typing':
reftext = ':py:class:`~typing.%s`' % qualname
else:
reftext = ':py:class:`%s%s`' % (modprefix, qualname)
params = cls.__args__
if params:
param_str = ', '.join(restify(p, mode) for p in params)
return reftext + '\\ [%s]' % param_str
else:
return reftext
elif isinstance(cls, typing.GenericMeta):
if module == 'typing':
reftext = ':py:class:`~typing.%s`' % qualname
else:
reftext = ':py:class:`%s%s`' % (modprefix, qualname)
if cls.__args__ is None or len(cls.__args__) <= 2:
params = cls.__args__
elif cls.__origin__ == Generator:
params = cls.__args__
else: # typing.Callable
args = ', '.join(restify(arg, mode) for arg in cls.__args__[:-1])
result = restify(cls.__args__[-1], mode)
return reftext + '\\ [[%s], %s]' % (args, result)
if params:
param_str = ', '.join(restify(p, mode) for p in params)
return reftext + '\\ [%s]' % (param_str)
else:
return reftext
elif (hasattr(cls, '__origin__') and
cls.__origin__ is typing.Union):
params = cls.__args__
if params is not None:
if len(params) > 1 and params[-1] is NoneType:
if len(params) > 2:
param_str = ", ".join(restify(p, mode) for p in params[:-1])
return (':py:obj:`~typing.Optional`\\ '
'[:py:obj:`~typing.Union`\\ [%s]]' % param_str)
else:
return ':py:obj:`~typing.Optional`\\ [%s]' % restify(params[0], mode)
else:
param_str = ', '.join(restify(p, mode) for p in params)
return ':py:obj:`~typing.Union`\\ [%s]' % param_str
else:
return ':py:obj:`Union`'
elif hasattr(cls, '__qualname__'):
if cls.__module__ == 'typing':
return ':py:class:`~%s.%s`' % (cls.__module__, cls.__qualname__)
else:
return ':py:class:`%s%s.%s`' % (modprefix, cls.__module__, cls.__qualname__)
elif hasattr(cls, '_name'):
# SpecialForm
if cls.__module__ == 'typing':
return ':py:obj:`~%s.%s`' % (cls.__module__, cls._name)
else:
return ':py:obj:`%s%s.%s`' % (modprefix, cls.__module__, cls._name)
elif hasattr(cls, '__name__'):
# not a class (ex. TypeVar)
if cls.__module__ == 'typing':
return ':py:obj:`~%s.%s`' % (cls.__module__, cls.__name__)
else:
return ':py:obj:`%s%s.%s`' % (modprefix, cls.__module__, cls.__name__)
else:
# others (ex. Any)
if cls.__module__ == 'typing':
return ':py:obj:`~%s.%s`' % (cls.__module__, qualname)
else:
return ':py:obj:`%s%s.%s`' % (modprefix, cls.__module__, qualname)
def stringify(annotation: Any, mode: str = 'fully-qualified-except-typing') -> str:
"""Stringify type annotation object.
:param mode: Specify a method how annotations will be stringified.
'fully-qualified-except-typing'
Show the module name and qualified name of the annotation except
the "typing" module.
'smart'
Show the name of the annotation.
'fully-qualified'
Show the module name and qualified name of the annotation.
"""
from sphinx.ext.autodoc.mock import ismock, ismockmodule # lazy loading
from sphinx.util import inspect # lazy loading
if mode == 'smart':
modprefix = '~'
else:
modprefix = ''
if isinstance(annotation, str):
if annotation.startswith("'") and annotation.endswith("'"):
# might be a double Forward-ref'ed type. Go unquoting.
return annotation[1:-1]
else:
return annotation
elif isinstance(annotation, TypeVar):
if (annotation.__module__ == 'typing' and
mode in ('fully-qualified-except-typing', 'smart')):
return annotation.__name__
else:
return modprefix + '.'.join([annotation.__module__, annotation.__name__])
elif inspect.isNewType(annotation):
if sys.version_info > (3, 10):
# newtypes have correct module info since Python 3.10+
return modprefix + '%s.%s' % (annotation.__module__, annotation.__name__)
else:
return annotation.__name__
elif not annotation:
return repr(annotation)
elif annotation is NoneType:
return 'None'
elif ismockmodule(annotation):
return modprefix + annotation.__name__
elif ismock(annotation):
return modprefix + '%s.%s' % (annotation.__module__, annotation.__name__)
elif is_invalid_builtin_class(annotation):
return modprefix + INVALID_BUILTIN_CLASSES[annotation]
elif str(annotation).startswith('typing.Annotated'): # for py310+
pass
elif (getattr(annotation, '__module__', None) == 'builtins' and
getattr(annotation, '__qualname__', None)):
if hasattr(annotation, '__args__'): # PEP 585 generic
return repr(annotation)
else:
return annotation.__qualname__
elif annotation is Ellipsis:
return '...'
if sys.version_info >= (3, 7): # py37+
return _stringify_py37(annotation, mode)
else:
return _stringify_py36(annotation, mode)
def _stringify_py37(annotation: Any, mode: str = 'fully-qualified-except-typing') -> str:
"""stringify() for py37+."""
module = getattr(annotation, '__module__', None)
modprefix = ''
if module == 'typing' and getattr(annotation, '__forward_arg__', None):
qualname = annotation.__forward_arg__
elif module == 'typing':
if getattr(annotation, '_name', None):
qualname = annotation._name
elif getattr(annotation, '__qualname__', None):
qualname = annotation.__qualname__
else:
qualname = stringify(annotation.__origin__).replace('typing.', '') # ex. Union
if mode == 'smart':
modprefix = '~%s.' % module
elif mode == 'fully-qualified':
modprefix = '%s.' % module
elif hasattr(annotation, '__qualname__'):
if mode == 'smart':
modprefix = '~%s.' % module
else:
modprefix = '%s.' % module
qualname = annotation.__qualname__
elif hasattr(annotation, '__origin__'):
# instantiated generic provided by a user
qualname = stringify(annotation.__origin__, mode)
elif UnionType and isinstance(annotation, UnionType): # types.Union (for py3.10+)
qualname = 'types.Union'
else:
# we weren't able to extract the base type, appending arguments would
# only make them appear twice
return repr(annotation)
if getattr(annotation, '__args__', None):
if not isinstance(annotation.__args__, (list, tuple)):
# broken __args__ found
pass
elif qualname in ('Optional', 'Union'):
if len(annotation.__args__) > 1 and annotation.__args__[-1] is NoneType:
if len(annotation.__args__) > 2:
args = ', '.join(stringify(a, mode) for a in annotation.__args__[:-1])
return '%sOptional[%sUnion[%s]]' % (modprefix, modprefix, args)
else:
return '%sOptional[%s]' % (modprefix,
stringify(annotation.__args__[0], mode))
else:
args = ', '.join(stringify(a, mode) for a in annotation.__args__)
return '%sUnion[%s]' % (modprefix, args)
elif qualname == 'types.Union':
if len(annotation.__args__) > 1 and None in annotation.__args__:
args = ' | '.join(stringify(a) for a in annotation.__args__ if a)
return '%sOptional[%s]' % (modprefix, args)
else:
return ' | '.join(stringify(a) for a in annotation.__args__)
elif qualname == 'Callable':
args = ', '.join(stringify(a, mode) for a in annotation.__args__[:-1])
returns = stringify(annotation.__args__[-1], mode)
return '%s%s[[%s], %s]' % (modprefix, qualname, args, returns)
elif qualname == 'Literal':
args = ', '.join(repr(a) for a in annotation.__args__)
return '%s%s[%s]' % (modprefix, qualname, args)
elif str(annotation).startswith('typing.Annotated'): # for py39+
return stringify(annotation.__args__[0], mode)
elif all(is_system_TypeVar(a) for a in annotation.__args__):
# Suppress arguments if all system defined TypeVars (ex. Dict[KT, VT])
return modprefix + qualname
else:
args = ', '.join(stringify(a, mode) for a in annotation.__args__)
return '%s%s[%s]' % (modprefix, qualname, args)
return modprefix + qualname
def _stringify_py36(annotation: Any, mode: str = 'fully-qualified-except-typing') -> str:
"""stringify() for py36."""
module = getattr(annotation, '__module__', None)
modprefix = ''
if module == 'typing' and getattr(annotation, '__forward_arg__', None):
qualname = annotation.__forward_arg__
elif module == 'typing':
if getattr(annotation, '_name', None):
qualname = annotation._name
elif getattr(annotation, '__qualname__', None):
qualname = annotation.__qualname__
elif getattr(annotation, '__origin__', None):
qualname = stringify(annotation.__origin__) # ex. Union
else:
qualname = repr(annotation).replace('typing.', '')
if mode == 'smart':
modprefix = '~%s.' % module
elif mode == 'fully-qualified':
modprefix = '%s.' % module
elif hasattr(annotation, '__qualname__'):
if mode == 'smart':
modprefix = '~%s.' % module
else:
modprefix = '%s.' % module
qualname = annotation.__qualname__
else:
qualname = repr(annotation)
if (isinstance(annotation, typing.TupleMeta) and # type: ignore
not hasattr(annotation, '__tuple_params__')): # for Python 3.6
params = annotation.__args__
if params:
param_str = ', '.join(stringify(p, mode) for p in params)
return '%s%s[%s]' % (modprefix, qualname, param_str)
else:
return modprefix + qualname
elif isinstance(annotation, typing.GenericMeta):
params = None
if annotation.__args__ is None or len(annotation.__args__) <= 2: # type: ignore # NOQA
params = annotation.__args__ # type: ignore
elif annotation.__origin__ == Generator: # type: ignore
params = annotation.__args__ # type: ignore
else: # typing.Callable
args = ', '.join(stringify(arg, mode) for arg
in annotation.__args__[:-1]) # type: ignore
result = stringify(annotation.__args__[-1]) # type: ignore
return '%s%s[[%s], %s]' % (modprefix, qualname, args, result)
if params is not None:
param_str = ', '.join(stringify(p, mode) for p in params)
return '%s%s[%s]' % (modprefix, qualname, param_str)
elif (hasattr(annotation, '__origin__') and
annotation.__origin__ is typing.Union):
params = annotation.__args__
if params is not None:
if len(params) > 1 and params[-1] is NoneType:
if len(params) > 2:
param_str = ", ".join(stringify(p, mode) for p in params[:-1])
return '%sOptional[%sUnion[%s]]' % (modprefix, modprefix, param_str)
else:
return '%sOptional[%s]' % (modprefix, stringify(params[0], mode))
else:
param_str = ', '.join(stringify(p, mode) for p in params)
return '%sUnion[%s]' % (modprefix, param_str)
return modprefix + qualname
deprecated_alias('sphinx.util.typing',
{
'DirectiveOption': Callable[[str], Any],
},
RemovedInSphinx60Warning)
| 40.92674 | 96 | 0.579925 |
8831015b63c7713127a53e0f09c6877448c3167a | 458 | py | Python | 2015/17/part1.py | timofurrer/aoc-2020 | 446b688a57601d9891f520e43b7f822c373a6ff4 | [
"MIT"
] | null | null | null | 2015/17/part1.py | timofurrer/aoc-2020 | 446b688a57601d9891f520e43b7f822c373a6ff4 | [
"MIT"
] | null | null | null | 2015/17/part1.py | timofurrer/aoc-2020 | 446b688a57601d9891f520e43b7f822c373a6ff4 | [
"MIT"
] | null | null | null | from pathlib import Path
with (Path(__file__).parent / "input.txt").open() as puzzle_input_file:
puzzle_input_raw = puzzle_input_file.read()
import itertools
TOTAL_EGGNOG = 150
containers = [int(x) for x in puzzle_input_raw.splitlines()]
all_combinations = itertools.chain(*(itertools.combinations(containers, x) for x in range(3, len(containers) + 1)))
using_all_eggnog = sum(sum(x) == TOTAL_EGGNOG for x in all_combinations)
print(using_all_eggnog) | 35.230769 | 115 | 0.770742 |
c4707f77c85d5637a7753cbc10d78b3dc565f674 | 3,937 | py | Python | tests/test_workload.py | dogfish182/tf-restore-helper | 51b5c45d5c9e012bcce33db3f4c0355010113f55 | [
"MIT"
] | null | null | null | tests/test_workload.py | dogfish182/tf-restore-helper | 51b5c45d5c9e012bcce33db3f4c0355010113f55 | [
"MIT"
] | 191 | 2021-05-03T18:29:39.000Z | 2022-03-29T10:18:33.000Z | tests/test_workload.py | dogfish182/tf-restore-helper | 51b5c45d5c9e012bcce33db3f4c0355010113f55 | [
"MIT"
] | null | null | null | """Test cases for the Workload object."""
import json
import logging
import os
from os.path import abspath
from os.path import dirname
from os.path import join
from typing import Any
from typing import Dict
from typing import List
from unittest.mock import MagicMock
from unittest.mock import patch
import boto3 # type: ignore
import pytest
from moto import mock_ec2 # type: ignore
from tf_restore_helper import Workload
@pytest.fixture
def plan() -> str:
"""Returns a json string."""
plan_file = join(dirname(abspath(__file__)), "assets/plan.json")
with open(plan_file) as file:
plan = file.read()
return plan
@pytest.fixture
def volumes() -> List[Dict[Any, Any]]:
"""Returns a dict containing aws ebs volumes."""
volumes_file = join(dirname(abspath(__file__)), "assets/volumes.json")
with open(volumes_file) as volume_file:
volumes: List[Dict[Any, Any]] = json.loads(volume_file.read())
return volumes
@pytest.fixture(scope="function")
def aws_credentials() -> None:
"""Mocked AWS Credentials for moto."""
os.environ["AWS_ACCESS_KEY_ID"] = "testing" # noqa
os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" # noqa
os.environ["AWS_SECURITY_TOKEN"] = "testing" # noqa
os.environ["AWS_SESSION_TOKEN"] = "testing" # noqa
@pytest.fixture(scope="function")
def ec2(aws_credentials: Dict[Any, Any]) -> boto3.client:
"""Returns a mocked ec2 boto client."""
with mock_ec2():
yield boto3.client("ec2", region_name="eu-west-1")
@pytest.fixture
def workload(plan: str, ec2: boto3.client) -> Workload:
"""Returns a workload object."""
workload = Workload(plan, ec2)
return workload
def test_logger_object(workload: Workload) -> None:
"""Tests the logger object is created."""
assert isinstance(workload.logger, logging.Logger)
@patch.object(Workload, "cloud_volumes")
def test_cloud_volumes(workload: Workload, volumes: List[Dict[Any, Any]]) -> None:
"""Test patched volumes attribute."""
workload.cloud_volumes = volumes # type: ignore
volumes = [
volume.get("VolumeId", "").startswith("vol-")
for volume in workload.cloud_volumes
]
assert volumes
assert all(volumes)
def test_tf_ebs_volumes(workload: Workload) -> None:
"""Test volumes are correctly collected."""
volumes = [
volume.get("values", {}).get("id").startswith("vol-")
for volume in workload.tf_ebs_volumes
]
assert volumes
assert all(volumes)
def test_flattened_plan(workload: Workload) -> None:
"""Test json plan is flattened correctly."""
flattened_plan = workload.flattened_plan
assert {"providers", "resources"}.issubset(set(flattened_plan.keys()))
@patch.object(Workload, "cloud_volumes")
def test_volume_attachment_removal(
inp: MagicMock, workload: Workload, volumes: List[Dict[Any, Any]]
) -> None:
"""Tests attachment removal."""
workload.cloud_volumes = volumes # type: ignore
print(workload.cloud_volumes)
assert workload._should_remove_attachment("vol-0000000000000001") is False
assert workload._should_remove_attachment("vol-0000000000000002") is True
@patch.object(Workload, "cloud_volumes")
def test_terraform_update_data(
inp: MagicMock, workload: Workload, volumes: List[Dict[Any, Any]]
) -> None:
"""Tests terraform update data."""
workload.cloud_volumes = volumes # type: ignore
assert workload.terraform_update_data[0].new_volume_id == "vol-0000000000000001"
assert workload.terraform_update_data[0].device_name == "/dev/sdf"
assert workload.terraform_update_data[0].instance_id == "i-0e00000000000078"
assert (
workload.terraform_update_data[0].terraform_attachment_address
== "aws_volume_attachment.test-instance-volume-1"
)
assert (
workload.terraform_update_data[0].terraform_ebs_volume_address
== "aws_ebs_volume.test-instance-volume-1"
)
| 32.00813 | 84 | 0.707137 |
cc06c8bcead588a0dff27450085b089185cebe49 | 2,536 | py | Python | kubernetes/test/test_batch_v2alpha1_api.py | kevingessner/python | 3f4d09d260cf0839fae8173852c69e0419188454 | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_batch_v2alpha1_api.py | kevingessner/python | 3f4d09d260cf0839fae8173852c69e0419188454 | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_batch_v2alpha1_api.py | kevingessner/python | 3f4d09d260cf0839fae8173852c69e0419188454 | [
"Apache-2.0"
] | 1 | 2018-07-19T16:37:20.000Z | 2018-07-19T16:37:20.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.9.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.apis.batch_v2alpha1_api import BatchV2alpha1Api
class TestBatchV2alpha1Api(unittest.TestCase):
""" BatchV2alpha1Api unit test stubs """
def setUp(self):
self.api = kubernetes.client.apis.batch_v2alpha1_api.BatchV2alpha1Api()
def tearDown(self):
pass
def test_create_namespaced_cron_job(self):
"""
Test case for create_namespaced_cron_job
"""
pass
def test_delete_collection_namespaced_cron_job(self):
"""
Test case for delete_collection_namespaced_cron_job
"""
pass
def test_delete_namespaced_cron_job(self):
"""
Test case for delete_namespaced_cron_job
"""
pass
def test_get_api_resources(self):
"""
Test case for get_api_resources
"""
pass
def test_list_cron_job_for_all_namespaces(self):
"""
Test case for list_cron_job_for_all_namespaces
"""
pass
def test_list_namespaced_cron_job(self):
"""
Test case for list_namespaced_cron_job
"""
pass
def test_patch_namespaced_cron_job(self):
"""
Test case for patch_namespaced_cron_job
"""
pass
def test_patch_namespaced_cron_job_status(self):
"""
Test case for patch_namespaced_cron_job_status
"""
pass
def test_read_namespaced_cron_job(self):
"""
Test case for read_namespaced_cron_job
"""
pass
def test_read_namespaced_cron_job_status(self):
"""
Test case for read_namespaced_cron_job_status
"""
pass
def test_replace_namespaced_cron_job(self):
"""
Test case for replace_namespaced_cron_job
"""
pass
def test_replace_namespaced_cron_job_status(self):
"""
Test case for replace_namespaced_cron_job_status
"""
pass
if __name__ == '__main__':
unittest.main()
| 19.067669 | 105 | 0.614353 |
bf63acf7cd43858d169271e4bbcc5878a1f70620 | 6,303 | py | Python | ajax_forms/tests/settings.py | chrisspen/django-ajax-forms | da5031192a46aaa2fe3de5d6984b376755c9e429 | [
"BSD-3-Clause"
] | null | null | null | ajax_forms/tests/settings.py | chrisspen/django-ajax-forms | da5031192a46aaa2fe3de5d6984b376755c9e429 | [
"BSD-3-Clause"
] | 29 | 2017-09-30T09:45:56.000Z | 2021-03-01T14:40:23.000Z | ajax_forms/tests/settings.py | chrisspen/django-ajax-forms | da5031192a46aaa2fe3de5d6984b376755c9e429 | [
"BSD-3-Clause"
] | 9 | 2017-05-12T11:48:47.000Z | 2018-07-13T12:07:58.000Z | # Django settings for ajax_forms project.
import os
#import sys
#sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
#PROJECT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
PROJECT_DIR = os.path.dirname(__file__)
DEBUG = True
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'example_project.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
LANGUAGES = (
('en-us', 'English'),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '-mb^wb!5r&*gkhzssa0%msfg!$y*chd6$9$#ozh4nlz3ek&fb1'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'ajax_forms.tests.urls'
# Python dotted path to the WSGI application used by Django's runserver.
#WSGI_APPLICATION = 'example_project.wsgi.application'
print('PROJECT_DIR:', PROJECT_DIR)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'ajax_forms',
'ajax_forms.tests',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# Required in Django>=1.10.
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
'%s/templates' % PROJECT_DIR,
'%s/../templates' % PROJECT_DIR,
'%s/../static' % PROJECT_DIR,
],
# 'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
'debug': DEBUG,
},
},
]
| 34.255435 | 108 | 0.655402 |
7d7bef87434f0f2e6b2ea6d2bf4d7f5703ba124b | 11,455 | py | Python | salt/modules/zk_concurrency.py | ifraixedes/saltstack-salt | b54becb8b43cc9b7c00b2c0bc637ac534dc62896 | [
"Apache-2.0"
] | 9,425 | 2015-01-01T05:59:24.000Z | 2022-03-31T20:44:05.000Z | salt/modules/zk_concurrency.py | ifraixedes/saltstack-salt | b54becb8b43cc9b7c00b2c0bc637ac534dc62896 | [
"Apache-2.0"
] | 33,507 | 2015-01-01T00:19:56.000Z | 2022-03-31T23:48:20.000Z | salt/modules/zk_concurrency.py | ifraixedes/saltstack-salt | b54becb8b43cc9b7c00b2c0bc637ac534dc62896 | [
"Apache-2.0"
] | 5,810 | 2015-01-01T19:11:45.000Z | 2022-03-31T02:37:20.000Z | """
Concurrency controls in zookeeper
=========================================================================
:depends: kazoo
:configuration: See :py:mod:`salt.modules.zookeeper` for setup instructions.
This module allows you to acquire and release a slot. This is primarily useful
for ensureing that no more than N hosts take a specific action at once. This can
also be used to coordinate between masters.
"""
import logging
import sys
try:
import kazoo.client
from kazoo.retry import ForceRetryError
import kazoo.recipe.lock
import kazoo.recipe.barrier
import kazoo.recipe.party
from kazoo.exceptions import CancelledError
from kazoo.exceptions import NoNodeError
from socket import gethostname
# TODO: use the kazoo one, waiting for pull req:
# https://github.com/python-zk/kazoo/pull/206
class _Semaphore(kazoo.recipe.lock.Semaphore):
def __init__(
self,
client,
path,
identifier=None,
max_leases=1,
ephemeral_lease=True,
):
identifier = identifier or gethostname()
kazoo.recipe.lock.Semaphore.__init__(
self, client, path, identifier=identifier, max_leases=max_leases
)
self.ephemeral_lease = ephemeral_lease
# if its not ephemeral, make sure we didn't already grab it
if not self.ephemeral_lease:
try:
for child in self.client.get_children(self.path):
try:
data, stat = self.client.get(self.path + "/" + child)
if identifier == data.decode("utf-8"):
self.create_path = self.path + "/" + child
self.is_acquired = True
break
except NoNodeError: # pragma: nocover
pass
except NoNodeError: # pragma: nocover
pass
def _get_lease(self, data=None):
# Make sure the session is still valid
if self._session_expired:
raise ForceRetryError("Retry on session loss at top")
# Make sure that the request hasn't been canceled
if self.cancelled:
raise CancelledError("Semaphore cancelled")
# Get a list of the current potential lock holders. If they change,
# notify our wake_event object. This is used to unblock a blocking
# self._inner_acquire call.
children = self.client.get_children(self.path, self._watch_lease_change)
# If there are leases available, acquire one
if len(children) < self.max_leases:
self.client.create(
self.create_path, self.data, ephemeral=self.ephemeral_lease
)
# Check if our acquisition was successful or not. Update our state.
if self.client.exists(self.create_path):
self.is_acquired = True
else:
self.is_acquired = False
# Return current state
return self.is_acquired
HAS_DEPS = True
except ImportError:
HAS_DEPS = False
__virtualname__ = "zk_concurrency"
def __virtual__():
if not HAS_DEPS:
return (False, "Module zk_concurrency: dependencies failed")
__context__["semaphore_map"] = {}
return __virtualname__
def _get_zk_conn(profile=None, **connection_args):
if profile:
prefix = "zookeeper:" + profile
else:
prefix = "zookeeper"
def get(key, default=None):
"""
look in connection_args first, then default to config file
"""
return connection_args.get(key) or __salt__["config.get"](
":".join([prefix, key]), default
)
hosts = get("hosts", "127.0.0.1:2181")
scheme = get("scheme", None)
username = get("username", None)
password = get("password", None)
default_acl = get("default_acl", None)
if isinstance(hosts, list):
hosts = ",".join(hosts)
if username is not None and password is not None and scheme is None:
scheme = "digest"
auth_data = None
if scheme and username and password:
auth_data = [(scheme, ":".join([username, password]))]
if default_acl is not None:
if isinstance(default_acl, list):
default_acl = [
__salt__["zookeeper.make_digest_acl"](**acl) for acl in default_acl
]
else:
default_acl = [__salt__["zookeeper.make_digest_acl"](**default_acl)]
__context__.setdefault("zkconnection", {}).setdefault(
profile or hosts,
kazoo.client.KazooClient(
hosts=hosts, default_acl=default_acl, auth_data=auth_data
),
)
if not __context__["zkconnection"][profile or hosts].connected:
__context__["zkconnection"][profile or hosts].start()
return __context__["zkconnection"][profile or hosts]
def lock_holders(
path,
zk_hosts=None,
identifier=None,
max_concurrency=1,
timeout=None,
ephemeral_lease=False,
profile=None,
scheme=None,
username=None,
password=None,
default_acl=None,
):
"""
Return an un-ordered list of lock holders
path
The path in zookeeper where the lock is
zk_hosts
zookeeper connect string
identifier
Name to identify this minion, if unspecified defaults to hostname
max_concurrency
Maximum number of lock holders
timeout
timeout to wait for the lock. A None timeout will block forever
ephemeral_lease
Whether the locks in zookeper should be ephemeral
Example:
.. code-block:: bash
salt minion zk_concurrency.lock_holders /lock/path host1:1234,host2:1234
"""
zk = _get_zk_conn(
profile=profile,
hosts=zk_hosts,
scheme=scheme,
username=username,
password=password,
default_acl=default_acl,
)
if path not in __context__["semaphore_map"]:
__context__["semaphore_map"][path] = _Semaphore(
zk,
path,
identifier,
max_leases=max_concurrency,
ephemeral_lease=ephemeral_lease,
)
return __context__["semaphore_map"][path].lease_holders()
def lock(
path,
zk_hosts=None,
identifier=None,
max_concurrency=1,
timeout=None,
ephemeral_lease=False,
force=False, # foricble get the lock regardless of open slots
profile=None,
scheme=None,
username=None,
password=None,
default_acl=None,
):
"""
Get lock (with optional timeout)
path
The path in zookeeper where the lock is
zk_hosts
zookeeper connect string
identifier
Name to identify this minion, if unspecified defaults to the hostname
max_concurrency
Maximum number of lock holders
timeout
timeout to wait for the lock. A None timeout will block forever
ephemeral_lease
Whether the locks in zookeper should be ephemeral
force
Forcibly acquire the lock regardless of available slots
Example:
.. code-block:: bash
salt minion zk_concurrency.lock /lock/path host1:1234,host2:1234
"""
zk = _get_zk_conn(
profile=profile,
hosts=zk_hosts,
scheme=scheme,
username=username,
password=password,
default_acl=default_acl,
)
if path not in __context__["semaphore_map"]:
__context__["semaphore_map"][path] = _Semaphore(
zk,
path,
identifier,
max_leases=max_concurrency,
ephemeral_lease=ephemeral_lease,
)
# forcibly get the lock regardless of max_concurrency
if force:
__context__["semaphore_map"][path].assured_path = True
__context__["semaphore_map"][path].max_leases = sys.maxint
# block waiting for lock acquisition
if timeout:
logging.info("Acquiring lock %s with timeout=%s", path, timeout)
__context__["semaphore_map"][path].acquire(timeout=timeout)
else:
logging.info("Acquiring lock %s with no timeout", path)
__context__["semaphore_map"][path].acquire()
return __context__["semaphore_map"][path].is_acquired
def unlock(
path,
zk_hosts=None, # in case you need to unlock without having run lock (failed execution for example)
identifier=None,
max_concurrency=1,
ephemeral_lease=False,
scheme=None,
profile=None,
username=None,
password=None,
default_acl=None,
):
"""
Remove lease from semaphore
path
The path in zookeeper where the lock is
zk_hosts
zookeeper connect string
identifier
Name to identify this minion, if unspecified defaults to hostname
max_concurrency
Maximum number of lock holders
timeout
timeout to wait for the lock. A None timeout will block forever
ephemeral_lease
Whether the locks in zookeper should be ephemeral
Example:
.. code-block:: bash
salt minion zk_concurrency.unlock /lock/path host1:1234,host2:1234
"""
# if someone passed in zk_hosts, and the path isn't in __context__['semaphore_map'], lets
# see if we can find it
zk = _get_zk_conn(
profile=profile,
hosts=zk_hosts,
scheme=scheme,
username=username,
password=password,
default_acl=default_acl,
)
if path not in __context__["semaphore_map"]:
__context__["semaphore_map"][path] = _Semaphore(
zk,
path,
identifier,
max_leases=max_concurrency,
ephemeral_lease=ephemeral_lease,
)
if path in __context__["semaphore_map"]:
__context__["semaphore_map"][path].release()
del __context__["semaphore_map"][path]
return True
else:
logging.error("Unable to find lease for path %s", path)
return False
def party_members(
path,
zk_hosts=None,
min_nodes=1,
blocking=False,
profile=None,
scheme=None,
username=None,
password=None,
default_acl=None,
):
"""
Get the List of identifiers in a particular party, optionally waiting for the
specified minimum number of nodes (min_nodes) to appear
path
The path in zookeeper where the lock is
zk_hosts
zookeeper connect string
min_nodes
The minimum number of nodes expected to be present in the party
blocking
The boolean indicating if we need to block until min_nodes are available
Example:
.. code-block:: bash
salt minion zk_concurrency.party_members /lock/path host1:1234,host2:1234
salt minion zk_concurrency.party_members /lock/path host1:1234,host2:1234 min_nodes=3 blocking=True
"""
zk = _get_zk_conn(
profile=profile,
hosts=zk_hosts,
scheme=scheme,
username=username,
password=password,
default_acl=default_acl,
)
party = kazoo.recipe.party.ShallowParty(zk, path)
if blocking:
barrier = kazoo.recipe.barrier.DoubleBarrier(zk, path, min_nodes)
barrier.enter()
party = kazoo.recipe.party.ShallowParty(zk, path)
barrier.leave()
return list(party)
| 28.07598 | 107 | 0.619293 |
19920ed4a82575f3f29c2876a67bb711cb979fd2 | 299 | py | Python | manage.py | zbidwell/heltour | 3895142695096a81cc65c3fefb7d4501ed796f46 | [
"MIT"
] | 41 | 2016-08-17T19:58:42.000Z | 2021-11-08T10:52:07.000Z | manage.py | zbidwell/heltour | 3895142695096a81cc65c3fefb7d4501ed796f46 | [
"MIT"
] | 257 | 2016-08-17T22:29:05.000Z | 2022-01-13T00:42:05.000Z | manage.py | zbidwell/heltour | 3895142695096a81cc65c3fefb7d4501ed796f46 | [
"MIT"
] | 31 | 2016-09-23T23:36:14.000Z | 2022-01-14T17:05:08.000Z | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("HELTOUR_ENV", "LIVE")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "heltour.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 24.916667 | 71 | 0.759197 |
48c9c3bf9374f207390eb41d7763a4f430968d82 | 333 | py | Python | python/fedml/cross_silo/hierarchical/communication_manager.py | ray-ruisun/FedML | 24ff30d636bb70f64e94e9ca205375033597d3dd | [
"Apache-2.0"
] | null | null | null | python/fedml/cross_silo/hierarchical/communication_manager.py | ray-ruisun/FedML | 24ff30d636bb70f64e94e9ca205375033597d3dd | [
"Apache-2.0"
] | null | null | null | python/fedml/cross_silo/hierarchical/communication_manager.py | ray-ruisun/FedML | 24ff30d636bb70f64e94e9ca205375033597d3dd | [
"Apache-2.0"
] | null | null | null | from ...core.distributed.client.client_manager import ClientManager
class CommunicationManager(ClientManager):
def __init__(self, args, comm, rank, size, backend):
super().__init__(args, comm, rank, size, backend)
def register_message_receive_handlers(self):
pass
def run(self):
super().run()
| 25.615385 | 67 | 0.696697 |
17fb060ee81671ad05da2395a4ae35699161c3c8 | 5,431 | py | Python | examples/server_example.py | christian-oudard/AIOPyFix | cb9e9372075e6119c0ef363d2095b3b6fddafcf3 | [
"CC0-1.0"
] | null | null | null | examples/server_example.py | christian-oudard/AIOPyFix | cb9e9372075e6119c0ef363d2095b3b6fddafcf3 | [
"CC0-1.0"
] | null | null | null | examples/server_example.py | christian-oudard/AIOPyFix | cb9e9372075e6119c0ef363d2095b3b6fddafcf3 | [
"CC0-1.0"
] | null | null | null | import asyncio
from enum import Enum
import logging
from aiopyfix.connection import ConnectionState, MessageDirection
from aiopyfix.engine import FIXEngine
from aiopyfix.message import FIXMessage
from aiopyfix.server_connection import FIXServer
class Side(Enum):
buy = 1
sell = 2
class Server(FIXEngine):
def __init__(self):
FIXEngine.__init__(self, "server_example.store")
# create a FIX Server using the FIX 4.4 standard
self.server = FIXServer(self, "aiopyfix.FIX44")
# we register some listeners since we want to know when the connection goes up or down
self.server.addConnectionListener(self.onConnect, ConnectionState.CONNECTED)
self.server.addConnectionListener(self.onDisconnect, ConnectionState.DISCONNECTED)
# start our event listener indefinitely
async def start(self, host, port, loop):
await self.server.start(host, port, loop)
def validateSession(self, targetCompId, senderCompId):
logging.info("Received login request for %s / %s" % (senderCompId, targetCompId))
return True
async def onConnect(self, session):
logging.info("Accepted new connection from %s" % (session.address(),))
# register to receive message notifications on the session which has just been created
session.addMessageHandler(self.onLogin, MessageDirection.OUTBOUND, self.server.protocol.msgtype.LOGON)
session.addMessageHandler(self.onNewOrder, MessageDirection.INBOUND,
self.server.protocol.msgtype.NEWORDERSINGLE)
async def onDisconnect(self, session):
logging.info("%s has disconnected" % (session.address(),))
# we need to clean up our handlers, since this session is disconnected now
session.removeMessageHandler(self.onLogin, MessageDirection.OUTBOUND, self.server.protocol.msgtype.LOGON)
session.removeMessageHandler(self.onNewOrder, MessageDirection.INBOUND,
self.server.protocol.msgtype.NEWORDERSINGLE)
async def onLogin(self, connectionHandler, msg):
codec = connectionHandler.codec
logging.info("[" + msg[codec.protocol.fixtags.SenderCompID] + "] <---- " + codec.protocol.msgtype.msgTypeToName(
msg[codec.protocol.fixtags.MsgType]))
async def onNewOrder(self, connectionHandler, request):
codec = connectionHandler.codec
try:
side = Side(int(request.getField(codec.protocol.fixtags.Side)))
logging.debug("<--- [%s] %s: %s %s %s@%s" % (
codec.protocol.msgtype.msgTypeToName(request.getField(codec.protocol.fixtags.MsgType)),
request.getField(codec.protocol.fixtags.ClOrdID), request.getField(codec.protocol.fixtags.Symbol),
side.name, request.getField(codec.protocol.fixtags.OrderQty),
request.getField(codec.protocol.fixtags.Price)))
# respond with an ExecutionReport Ack
msg = FIXMessage(codec.protocol.msgtype.EXECUTIONREPORT)
msg.setField(codec.protocol.fixtags.Price, request.getField(codec.protocol.fixtags.Price))
msg.setField(codec.protocol.fixtags.OrderQty, request.getField(codec.protocol.fixtags.OrderQty))
msg.setField(codec.protocol.fixtags.Symbol, request.getField(codec.protocol.fixtags.OrderQty))
msg.setField(codec.protocol.fixtags.SecurityID, "GB00BH4HKS39")
msg.setField(codec.protocol.fixtags.SecurityIDSource, "4")
msg.setField(codec.protocol.fixtags.Symbol, request.getField(codec.protocol.fixtags.Symbol))
msg.setField(codec.protocol.fixtags.Account, request.getField(codec.protocol.fixtags.Account))
msg.setField(codec.protocol.fixtags.HandlInst, "1")
msg.setField(codec.protocol.fixtags.OrdStatus, "0")
msg.setField(codec.protocol.fixtags.ExecType, "0")
msg.setField(codec.protocol.fixtags.LeavesQty, "0")
msg.setField(codec.protocol.fixtags.Side, request.getField(codec.protocol.fixtags.Side))
msg.setField(codec.protocol.fixtags.ClOrdID, request.getField(codec.protocol.fixtags.ClOrdID))
msg.setField(codec.protocol.fixtags.Currency, request.getField(codec.protocol.fixtags.Currency))
await connectionHandler.sendMsg(msg)
logging.debug("---> [%s] %s: %s %s %s@%s" % (
codec.protocol.msgtype.msgTypeToName(msg.msgType), msg.getField(codec.protocol.fixtags.ClOrdID),
request.getField(codec.protocol.fixtags.Symbol), side.name,
request.getField(codec.protocol.fixtags.OrderQty), request.getField(codec.protocol.fixtags.Price)))
except Exception as e:
msg = FIXMessage(codec.protocol.msgtype.EXECUTIONREPORT)
msg.setField(codec.protocol.fixtags.OrdStatus, "4")
msg.setField(codec.protocol.fixtags.ExecType, "4")
msg.setField(codec.protocol.fixtags.LeavesQty, "0")
msg.setField(codec.protocol.fixtags.Text, str(e))
msg.setField(codec.protocol.fixtags.ClOrdID, request.getField(codec.protocol.fixtags.ClOrdID))
await connectionHandler.sendMsg(msg)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
loop = asyncio.get_event_loop()
server = Server()
loop.run_until_complete(server.start('', 9898, loop))
loop.run_forever()
| 53.245098 | 120 | 0.692874 |
a80b5e663077a9d52ebcbb1ab4ad5709b8b389ef | 3,673 | py | Python | cgeexample.py | tirimid/cgengine | 5df905f094dd867bed256cb6cccee2e02a5eb849 | [
"MIT"
] | null | null | null | cgeexample.py | tirimid/cgengine | 5df905f094dd867bed256cb6cccee2e02a5eb849 | [
"MIT"
] | null | null | null | cgeexample.py | tirimid/cgengine | 5df905f094dd867bed256cb6cccee2e02a5eb849 | [
"MIT"
] | 1 | 2022-01-23T12:03:12.000Z | 2022-01-23T12:03:12.000Z | # MIT License
#
# Copyright (c) 2021 tirimid
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from cgengine import *
# construct the game and managers
drawMan = DrawManager(110, 28, '.')
gameMan = GameManager(drawMan)
game = Game(gameMan)
rr = RenderRect(Vec2(-2, -2), Vec2(-1, -1), '!')
# MUST BE USED, the unregister function is used twice before registering anything.
# since index 0 and 1 are unregistered, 2 objects must be added here.
# if index 0, 1, 2, and 3 are unregistered, then this needs to be done 4 times.
drawMan.registerRect(rr)
drawMan.registerRect(rr)
class Bullet:
def __init__(self, index):
self.pos = Vec2(7, 6)
self.vel = Vec2(1, 0.5)
self.size = Vec2(3, 2)
self.index = index
self.render = RenderRect(Vec2(0, 0), Vec2(0, 0), '*')
self.collider = ColliderRect(Vec2(0, 0), Vec2(0, 0))
def update(self, drawMan):
# bullet movement
self.pos.x += self.vel.x
self.pos.y += self.vel.y
if (self.pos.x >= 106):
self.vel.x = -1
elif (self.pos.x <= 0):
self.vel.x = 1
if (self.pos.y >= 25):
self.vel.y = -0.5
elif (self.pos.y <= 0):
self.vel.y = 0.5
# dynamic object unregister and register
self.render = RenderRect(self.pos, Vec2(self.pos.x + 3, self.pos.y + 2), '*')
self.render.setOutlineChar('/')
self.collider = ColliderRect(self.pos, Vec2(self.pos.x + 3, self.pos.y + 2))
drawMan.unregisterRect(self.index)
drawMan.registerRectId(self.render, self.index)
# construct a bullet
b0 = Bullet(1)
# construct the player
pPos = Vec2(0, 0)
pSize = Vec2(4, 3)
pRect = RenderRect(Vec2(0, 0), Vec2(0, 0), '#')
pCol = ColliderRect(Vec2(0, 0), Vec2(0, 0))
running = True
while running:
# movement
if (isKeyPressed('d')):
pPos.x += 1
if (isKeyPressed('a')):
pPos.x -= 1
if (isKeyPressed('s')):
pPos.y += 0.5
if (isKeyPressed('w')):
pPos.y -= 0.5
if (isKeyPressed('k')):
running = False
# unregister and register the player as it is dynamic
pRect = RenderRect(pPos, Vec2(pPos.x + pSize.x, pPos.y + pSize.y), '#')
pRect.setOutlineChar('=')
pCol = ColliderRect(pPos, Vec2(pPos.x + pSize.x, pPos.y + pSize.y))
drawMan.unregisterRect(0)
drawMan.registerRectId(pRect, 0)
b0.update(drawMan)
if (pCol.isCollidingWith(b0.collider)):
running = False
game.update()
sleeps(0.03)
| 31.93913 | 86 | 0.622924 |
45401c7d152dd8fee93f34af829c2b2fabba104e | 247 | py | Python | tests/example_projects/broken_fizzbuzz/example_tests/test_broken.py | DrSerpent/drserpent | 7b99a76a4633f76819b5da25f31dff57807cb35c | [
"MIT"
] | 4 | 2018-05-12T07:36:21.000Z | 2018-07-27T10:57:56.000Z | tests/example_projects/broken_fizzbuzz/example_tests/test_broken.py | DrSerpent/DrSerpent | 7b99a76a4633f76819b5da25f31dff57807cb35c | [
"MIT"
] | null | null | null | tests/example_projects/broken_fizzbuzz/example_tests/test_broken.py | DrSerpent/DrSerpent | 7b99a76a4633f76819b5da25f31dff57807cb35c | [
"MIT"
] | 1 | 2018-05-07T11:17:56.000Z | 2018-05-07T11:17:56.000Z | import os, sys
from broken_context import *
def test_fail():
return Expect(Fizzbuzz.run(3)).to_equal('HEY')
def test_error():
raise Exception('this failed deliberately')
def test_no_return():
Expect(Fizzbuzz.run(5)).to_equal('YES')
| 20.583333 | 50 | 0.716599 |
dfb0ef73f0b2cfc809bde860232b10bb7dbcc991 | 33,752 | py | Python | evaluation_functions.py | sitnarf/echo-clustering | be5bd26d271da1bc468c02f822503a420aa53211 | [
"MIT"
] | null | null | null | evaluation_functions.py | sitnarf/echo-clustering | be5bd26d271da1bc468c02f822503a420aa53211 | [
"MIT"
] | null | null | null | evaluation_functions.py | sitnarf/echo-clustering | be5bd26d271da1bc468c02f822503a420aa53211 | [
"MIT"
] | null | null | null | import logging
from dataclasses import dataclass
from functools import partial, reduce
from multiprocessing.pool import Pool
from statistics import mean, stdev, StatisticsError
# noinspection Mypy
from typing import Iterable, Optional, Any, Dict, Union, TypedDict, Callable, Tuple, TypeVar, Mapping
from typing import List
import numpy as np
import pandas
import tabulate
from PyALE import ale
from functional_pipeline import pipeline, flatten
from matplotlib import pyplot
from numpy import NaN, float64
from pandas import DataFrame, concat
from pandas import Series
from pandas.core.indexing import IndexingError
from scipy.interpolate import interp1d
from six import moves
from sklearn import clone
from sklearn.inspection import plot_partial_dependence
from sklearn.metrics import precision_score, accuracy_score, roc_auc_score, \
precision_recall_curve, roc_curve, f1_score, average_precision_score, balanced_accuracy_score, \
explained_variance_score, mean_absolute_error, r2_score, brier_score_loss
from sklearn.model_selection import StratifiedKFold
from sklearn.tree import export_text
from toolz import curry, identity
from toolz.curried import get, pluck, map, filter, valmap
from cache import memory
from utils import object2dict, data_subset_iloc, empty_dict
from custom_types import Estimator, ClassificationMetrics, ClassificationMetricsWithSTD, ValueWithStatistics, \
ClassificationMetricsWithStatistics, \
DataStructure, ConfusionMatrix, ConfusionMatrixWithStatistics, Method, RegressionMetrics, RegressionMetricsWithSTD, \
RegressionMetricsWithStatistics
from formatting import dict_to_table_horizontal, format_method, tabulate_formatted, format_structure, format_decimal, \
format_metric_short
from functional import flatten, statements, find_index_right
from functional import pass_args, mapl, pipe, decorate_unpack, find_index, unzip, add_index
from statistics_functions import confidence_interval, get_repeated_cv_corrected_dof
from utils import get_object_attributes, ll, get_log_level, log, Timer
DEFAULT_THRESHOLD = 0.5
T1 = TypeVar('T1')
@dataclass
class ModelExecutionResult(DataStructure):
y_train_predicts: List[Series]
y_predicts: List[Series]
y_train_scores: List[Series]
y_scores: List[Series]
feature_importance: Series
models: List[Any]
elapsed: float
class FoldModelExecutionResult(TypedDict):
y_predict_probabilities: Series
y_predict: Series
y_train_predict: Series
y_train_predict_probabilities: Series
feature_importance: Series
model: Estimator
elapsed: float
@dataclass
class ModelExecutionResultVector(DataStructure):
y_train_predict: Series
y_predict: Series
y_train_score: Series
y_score: Series
feature_importance: Series
model: Any
elapsed: float
@dataclass
class ModelCVResult(DataStructure):
y_train_predicts: List[Series]
y_predicts: List[Series]
y_train_scores: List[DataFrame]
y_scores: List[DataFrame]
feature_importance: List[Series]
models: List[Any]
elapsed: float
class ModelResult(TypedDict):
y_test_score: DataFrame
y_test_predict: Series
y_train_predict: Series
y_train_score: DataFrame
feature_importance: Union[Series, DataFrame]
model: Estimator
elapsed: float
def compute_regression_metrics(y_score, y_true) -> RegressionMetrics:
y_true_masked = y_true.loc[y_score.index]
return RegressionMetrics(
explained_variance=explained_variance_score(y_true_masked, y_score),
r2=r2_score(y_true_masked, y_score),
mean_absolute_error=mean_absolute_error(y_true_masked, y_score),
)
def compute_classification_metrics(
y_score,
y_true,
threshold: float = DEFAULT_THRESHOLD,
ignore_warning: bool = False
) -> ClassificationMetrics:
y_score_normalized = y_score.copy()
y_score_normalized[y_score_normalized < 0] = 0
y_predict = y_score_normalized >= threshold
y_true_masked = y_true.loc[y_predict.index]
roc = roc_curve(y_true_masked, y_score_normalized)
fpr, tpr = get_roc_point_by_threshold(threshold, *roc)
npv = get_metrics_from_confusion_matrix(
get_confusion_from_threshold(y_true_masked, y_score_normalized, threshold)
).npv
precision = precision_score(
y_true_masked, y_predict, **({
'zero_division': 0
} if ignore_warning else {})
)
return ClassificationMetrics(
recall=tpr,
precision=precision,
balanced_accuracy=balanced_accuracy_score(y_true_masked, y_predict),
f1=f1_score(y_true_masked, y_predict),
tnr=1 - fpr,
fpr=fpr,
fnr=1 - tpr,
average_precision=average_precision_score(y_true_masked, y_score_normalized),
accuracy=accuracy_score(y_true_masked, y_predict),
roc_auc=roc_auc_score(y_true_masked, y_score_normalized),
npv=npv,
brier_score=brier_score_loss(y_true_masked, y_score_normalized)
)
def compute_classification_metrics_folds(
y_scores: List[Series],
y: Series,
threshold: float = DEFAULT_THRESHOLD,
) -> Optional[ClassificationMetricsWithSTD]:
return pipeline(
y_scores,
[
map(
lambda y_score:
compute_classification_metrics(get_1_class_y_score(y_score), y, threshold)
),
list,
average_list_dicts_metric_std,
],
)
def compute_regression_metrics_folds(
y_scores: List[Series],
y: Series,
) -> Optional[ClassificationMetricsWithSTD]:
return pipeline(
y_scores,
[
map(lambda y_score: compute_regression_metrics(y_score, y)),
list,
average_list_dicts,
lambda item: RegressionMetrics(**item),
],
)
def average_list_dicts(metrics: List[Dict]) -> Optional[Dict]:
if len(metrics) == 0:
return None
output = {}
try:
keys = metrics[0].__dict__.keys()
except AttributeError:
keys = metrics[0].keys()
for key in keys:
values = list(
map(lambda item: getattr(item, key) if hasattr(item, key) else item[key], metrics)
)
mean_value = mean(values)
try:
stdev_value = stdev(values)
except StatisticsError:
stdev_value = 0
output[key] = (mean_value, stdev_value)
return output
def average_list_dicts_metric_std(metrics: Union[Any]) -> Optional[ClassificationMetricsWithSTD]:
d = average_list_dicts(metrics)
if d:
return ClassificationMetricsWithSTD(**d)
else:
return None
def execute_model_predict_proba(classifier, X):
return classifier.predict_proba(X)
def cross_validate_model(
X,
y,
classifier,
cv=10,
fast=False,
reduce=False,
parallel=True,
predict_proba=None,
n_jobs=12,
return_model: bool = True,
fit_kwargs: Dict = None,
) -> ModelCVResult:
predict_proba = predict_proba or execute_model_predict_proba
if not fast:
cv = StratifiedKFold(n_splits=cv)
sets = cv.split(X, y)
else:
if reduce:
X = X[:100]
y = y[:100]
amount = round(len(X) * (1 / cv))
sets = [(X.index[:-amount], X.index[-amount:])]
# TODO: dirty hack
try:
classifier.set_params(onehot=None)
except ValueError:
pass
return cross_validate_model_sets(
classifier,
X,
y,
sets,
predict_proba,
parallel,
n_jobs,
return_model,
fit_kwargs=fit_kwargs,
)
cross_validate_model_cached = memory.cache(cross_validate_model)
class WorkerInput(TypedDict):
X_train: DataFrame
y_train: Series
X_test: DataFrame
classifier: Estimator
predict_proba: Callable[[Estimator, DataFrame], Series]
feature_names: Optional[List[str]]
return_model: bool
fit_kwargs: Mapping
def cross_validate_model_sets(
classifier,
X,
y,
sets,
predict_proba=execute_model_predict_proba,
parallel=True,
n_jobs=12,
return_model: bool = True,
filter_X_test: Callable[[DataFrame], DataFrame] = identity,
feature_names: Optional[List[str]] = None,
fit_kwargs: Mapping = empty_dict,
) -> ModelCVResult:
worker_input: List[WorkerInput] = [
WorkerInput(
X_train=data_subset_iloc(X, train),
y_train=data_subset_iloc(y, train),
X_test=filter_X_test(X.iloc[test]),
classifier=classifier,
predict_proba=predict_proba,
return_model=return_model,
feature_names=feature_names,
fit_kwargs=fit_kwargs,
) for (train, test) in sets
]
if parallel:
with Pool(min(len(worker_input), n_jobs)) as p:
result = p.map(cross_validate_model_fold, worker_input)
else:
result = list(map(cross_validate_model_fold, worker_input))
return result_from_fold_results(result)
cross_validate_model_sets_cached = memory.cache(cross_validate_model_sets, ignore=['n_jobs'])
def cross_validate_model_sets_args(
get_x_y, n_jobs=12, parallel=True, *args, **kwargs
) -> ModelCVResult:
X, y = get_x_y()
return cross_validate_model_sets(X=X, y=y, n_jobs=n_jobs, parallel=parallel, *args, **kwargs)
cross_validate_model_sets_args_cached = memory.cache(
cross_validate_model_sets_args, ignore=['n_jobs', 'parallel']
)
def cross_validate_model_fold(chunk_input: WorkerInput) -> ModelResult:
log("Execution fold", level=2)
timer = Timer()
classifier = chunk_input['classifier']
X_train = chunk_input['X_train']
y_train = chunk_input['y_train']
X_test = chunk_input['X_test']
return_model = chunk_input['return_model']
if get_log_level() == 1:
print(".")
feature_names = \
chunk_input['feature_names'] if \
('feature_names' in chunk_input and chunk_input['feature_names'] is not None) \
else list(X_train.columns)
classifier.fit(X_train, y_train, **chunk_input['fit_kwargs'])
y_predict = Series(classifier.predict(X_test), index=X_test.index)
y_train_predict = Series(classifier.predict(X_train), index=X_train.index)
try:
y_predict_probabilities_raw = classifier.predict_proba(X_test)
y_train_predict_probabilities_raw = classifier.predict_proba(X_train)
except AttributeError:
y_predict_probabilities = y_predict
y_train_predict_probabilities = y_train_predict
else:
probability_columns = [
f'y_predict_probabilities_{i}' for i in range(y_predict_probabilities_raw.shape[1])
]
y_predict_probabilities = DataFrame(
y_predict_probabilities_raw, index=X_test.index, columns=probability_columns
)
y_train_predict_probabilities = DataFrame(
y_train_predict_probabilities_raw, index=X_train.index, columns=probability_columns
)
if y_predict.dtype == np.float:
y_predict = y_predict \
.map(lambda v: 0 if v < 0 else v) \
.map(lambda v: 1 if v > 1 else v) \
.map(lambda v: round(v))
try:
feature_importance = Series(
classifier[-1].feature_importances_,
index=feature_names,
)
except (TypeError, AttributeError):
try:
classifier[-1].coef_
except AttributeError:
feature_importance = None
logging.debug("No feature importance in the result")
else:
feature_importance = None
# feature_importance = Series(classifier[-1].coef_[0], index=feature_names)
if not return_model:
try:
classifier[-1].get_booster().__del__()
except AttributeError:
pass
return ModelResult(
y_test_score=y_predict_probabilities,
y_test_predict=y_predict,
y_train_predict=y_train_predict,
y_train_score=y_train_predict_probabilities,
feature_importance=feature_importance,
model=classifier[-1] if return_model else None,
elapsed=timer.elapsed_cpu()
)
cross_validate_model_fold_cached = memory.cache(cross_validate_model_fold)
def cross_validate_model_fold_args(
classifier, get_x_y, train_index, test_index, return_model: bool = True
) -> ModelResult:
X, y = get_x_y()
return cross_validate_model_fold(
WorkerInput(
classifier=classifier,
X_train=X.iloc[train_index],
y_train=y.iloc[train_index],
X_test=X.iloc[test_index],
return_model=return_model,
predict_proba=None,
)
)
cross_validate_model_fold_args_cached = memory.cache(cross_validate_model_fold_args)
def result_from_fold_results(results: Iterable[ModelResult]) -> ModelCVResult:
results = list(results)
return ModelCVResult(
feature_importance=pipeline(
results,
[
map(get('feature_importance')),
list,
],
),
y_scores=pipeline(
results,
[
map(get('y_test_score')),
list,
],
),
y_train_scores=pipeline(
results,
[
map(get('y_train_score')),
list,
],
),
y_predicts=pipeline(
results,
[
map(get('y_test_predict')),
list,
],
),
y_train_predicts=pipeline(
results,
[
map(get('y_train_predict')),
list,
],
),
models=pipeline(
results,
[
map(get('model')),
list,
],
),
elapsed=pipeline(results, [
map(get('elapsed')),
max,
])
)
def join_repeats_and_folds_cv_results(results: List[ModelCVResult]) -> ModelResult:
return ModelResult(**pipe(
results,
join_repeats_cv_results,
join_folds_cv_result,
))
def join_repeats_cv_results(results: List[ModelCVResult]) -> ModelCVResult:
return reduce(
lambda result1, result2: ModelCVResult(
y_train_predicts=[*result1['y_train_predicts'], *result2['y_train_predicts']],
y_predicts=[*result1['y_predicts'], *result2['y_predicts']],
y_train_scores=[*result1['y_train_scores'], *result2['y_train_scores']],
y_scores=[*result1['y_scores'], *result2['y_scores']],
feature_importance=[*result1['feature_importance'], *result2['feature_importance']],
models=[*result1['models'], *result2['models']],
elapsed=result1['elapsed'] + result2['elapsed'],
),
results,
)
def get_feature_importance_from_cv_result(result: ModelCVResult) -> DataFrame:
return statements(
feature_importance_vector := pandas.concat(result['feature_importance'],
axis=1).transpose(),
DataFrame(
{
'mean': feature_importance_vector.mean(),
'std': feature_importance_vector.std(),
}
).sort_values(by='mean', ascending=False, inplace=False)
)
def join_folds_cv_result(result: ModelCVResult) -> ModelResult:
return ModelResult(
feature_importance=get_feature_importance_from_cv_result(result)
if result['feature_importance'][0] is not None else None,
y_test_score=pandas.concat(result['y_scores']).sort_index(),
y_test_predict=pandas.concat(result['y_predicts']).sort_index(),
y_train_predict=pandas.concat(result['y_train_predicts']).sort_index(),
y_train_score=pandas.concat(result['y_train_scores']).sort_index(),
models=list(flatten(result['models'])),
elapsed=result['elapsed'],
)
def get_result_vector_from_result(result: ModelCVResult) -> ModelResult:
result_single = join_folds_cv_result(result)
try:
single_vector_y_test_score = result_single['y_test_score'].iloc[:, 1]
except IndexingError:
pass
else:
result_single['y_test_score'] = single_vector_y_test_score
return result_single
def get_full_vector_result_comparison(y, result: ModelResult) -> DataFrame:
return pandas.concat(
[
pandas.DataFrame(
{
'actual': y[result['y_test_predict'].index],
'predict_class': result['y_test_predict'],
}
), result['y_test_score']
],
axis=1,
).sort_values(['actual', 'predict_class'], ascending=False)
@dataclass
class ModelResultCurves:
curve_horizontal: List[float]
curve_vertical_recall_precision: List[float]
curve_vertical_roc: List[float]
@dataclass
class ModelResultCurvesStd(ModelResultCurves):
curve_vertical_recall_precision_std: List[float]
curve_vertical_roc_std: List[float]
curves_interpolate_default = 100
def compute_curves(
y_score: Series, y_true: Series, interpolate=curves_interpolate_default
) -> ModelResultCurves:
y_masked = y_true[y_score.index]
fpr, tpr, _ = roc_curve(
y_masked,
y_score,
)
curve_horizontal = np.linspace(0, 1, interpolate)
precision, recall, _ = precision_recall_curve(
y_masked,
y_score,
)
interpolation_recall_precision = interp1d(recall, precision, assume_sorted=False)
curve_vertical_recall_precision = interpolation_recall_precision(curve_horizontal)
interpolation_roc = interp1d(fpr, tpr)
curve_vertical_roc = interpolation_roc(curve_horizontal)
return ModelResultCurves(
curve_horizontal,
curve_vertical_recall_precision,
curve_vertical_roc,
)
def compute_curves_folds(
y_score_folds: List[Series],
y_true: Series,
interpolate=curves_interpolate_default
) -> ModelResultCurvesStd:
curves_folds = []
curve_horizontal: List[float] = []
for y_score in y_score_folds:
curves = compute_curves(y_score, y_true, interpolate)
curve_horizontal = curves.curve_horizontal
curves_folds.append(curves)
curve_vertical_recall_precision_aligned = zip(
*map(lambda i: i.curve_vertical_recall_precision, curves_folds)
)
curve_vertical_recall_precision_mean = list(map(mean, curve_vertical_recall_precision_aligned))
curve_vertical_recall_precision_std = list(map(stdev, curve_vertical_recall_precision_aligned))
curve_vertical_roc_aligned = zip(*map(lambda i: i.curve_vertical_roc, curves_folds))
curve_vertical_roc_mean = list(map(mean, curve_vertical_roc_aligned))
curve_vertical_roc_std = list(map(stdev, curve_vertical_roc_aligned))
return ModelResultCurvesStd(
curve_horizontal=curve_horizontal,
curve_vertical_recall_precision=curve_vertical_recall_precision_mean,
curve_vertical_recall_precision_std=curve_vertical_recall_precision_std,
curve_vertical_roc=curve_vertical_roc_mean,
curve_vertical_roc_std=curve_vertical_roc_std,
)
def compute_regression_metrics_from_result(
y: Series,
result: ModelCVResult,
) -> Optional[List[RegressionMetrics]]:
return [compute_regression_metrics(y_score, y) for y_score in result['y_scores']]
def compute_classification_metrics_from_result(
y: Series,
result: ModelCVResult,
target_variable: str = 'y_scores',
threshold: float = DEFAULT_THRESHOLD,
ignore_warning: bool = False,
) -> Optional[List[ClassificationMetrics]]:
return [
compute_classification_metrics(
get_1_class_y_score(score), y, threshold=threshold, ignore_warning=ignore_warning
) for score in result[target_variable]
]
def get_classification_metrics(
y: Series,
result: ModelCVResult,
) -> Optional[ClassificationMetricsWithSTD]:
return compute_classification_metrics_folds(result['y_scores'], y)
@curry
def get_regression_metrics(
y: Series,
result: ModelCVResult,
) -> Optional[ClassificationMetricsWithSTD]:
return compute_regression_metrics_folds(result['y_scores'], y)
@curry
def report_cross_validation(y: Series, result: ModelCVResult) -> None:
metrics = get_classification_metrics(y, result)
if metrics:
print(dict_to_table_horizontal(metrics))
def compute_ci_for_metrics_collection(metrics: List[ClassificationMetrics]) -> Dict:
attributes = get_object_attributes(metrics[0])
metrics_with_ci_dict = {
attribute: pass_args(
confidence_interval(list(pluck(attribute, metrics))),
lambda m, ci, std: ValueWithStatistics(m, std, ci),
)
for attribute in attributes
}
return metrics_with_ci_dict
def get_best_threshold_from_roc(
tps: np.array,
fps: np.array,
thresholds: np.array,
) -> Tuple[float, int]:
J = np.abs(tps - fps)
ix = np.argmax(J)
best_thresh = thresholds[ix]
return best_thresh, ix
def get_best_threshold_from_results(y_true: Series, results: List[ModelCVResult]) -> float:
fpr, tpr, thresholds = compute_threshold_averaged_roc(y_true, results)
best_threshold, index = get_best_threshold_from_roc(tpr, fpr, thresholds)
return best_threshold
def compute_classification_metrics_from_results_with_statistics(
y_true: Series,
results: List[ModelCVResult],
threshold: Optional[float] = None,
target_variable: str = 'y_scores',
ignore_warning: bool = False,
) -> ClassificationMetricsWithStatistics:
chosen_threshold = threshold if threshold is not None else get_best_threshold_from_results(
y_true, results
)
return pipeline(
results,
[
partial(
mapl,
partial(
compute_classification_metrics_from_result,
y_true,
threshold=chosen_threshold,
target_variable=target_variable,
ignore_warning=ignore_warning,
)
),
flatten,
list,
compute_ci_for_metrics_collection,
],
)
def compute_regression_metrics_from_results_with_statistics(
y_true: Series,
results: List[ModelCVResult],
) -> RegressionMetricsWithStatistics:
return pipeline(
results,
[
partial(mapl, partial(compute_regression_metrics_from_result, y_true)), flatten, list,
compute_ci_for_metrics_collection, lambda item: RegressionMetricsWithStatistics(**item)
],
)
def compute_metrics_from_result_ci(
y_true: Series,
result: ModelCVResult,
threshold: Optional[float] = None
) -> ClassificationMetricsWithStatistics:
chosen_threshold = threshold if threshold is not None else get_best_threshold_from_results(
y_true, [result]
)
return pipeline(
result,
[
compute_classification_metrics_from_result(y_true, threshold=chosen_threshold),
compute_ci_for_metrics_collection,
],
)
def get_roc_point_by_threshold(
threshold: float,
fpr: np.array,
tpr: np.array,
thresholds: np.array,
) -> Tuple[float, float]:
first_index = find_index(lambda _index: _index >= threshold, thresholds, reverse=True)
second_index = first_index if first_index == len(thresholds) - 1 else first_index + 1
first_threshold = thresholds[first_index]
second_threshold = thresholds[second_index]
ratio = (threshold - second_threshold) / (first_threshold - second_threshold) if (
second_threshold != first_threshold
) else 1
return (
((fpr[second_index] * (1 - ratio)) + (fpr[first_index] * ratio)),
(tpr[second_index] * (1 - ratio) + tpr[first_index] * ratio),
)
def compute_threshold_averaged_roc(
y_true: Series, results: List[ModelCVResult]
) -> Tuple[np.array, np.array, np.array]:
def roc_curve_for_fold(y_score):
_fpr, _tpr, thresholds = roc_curve(y_true.loc[y_score.index], get_1_class_y_score(y_score))
return _fpr, _tpr, thresholds
roc_curves = list(
flatten(
[[roc_curve_for_fold(y_score) for y_score in result['y_scores']] for result in results]
)
)
all_thresholds = sorted(list(flatten([roc[2] for roc in roc_curves])), reverse=True)
def get_merged_roc_point(
_roc_curves: List[Tuple[np.array, np.array, np.array]], threshold: float
) -> Tuple[float, float]:
if threshold > 1:
threshold = 1
merged_fpr, merged_tpr = pipe(
_roc_curves,
map(lambda curve: get_roc_point_by_threshold(threshold, *curve)),
list,
partial(np.mean, axis=0),
)
return merged_fpr, merged_tpr
merged_curve = [get_merged_roc_point(roc_curves, threshold) for threshold in all_thresholds]
fpr, tpr = list(unzip(merged_curve))
indexes_to_delete = []
for index, _ in enumerate(all_thresholds):
try:
if fpr[index] == fpr[index + 1] or fpr[index + 1] < fpr[index]:
indexes_to_delete.append(index)
except IndexError:
pass
def remove_indexes(iterable: Iterable, indexes: List[int]) -> Iterable:
return pipe(
iterable,
add_index,
filter(decorate_unpack(lambda i, _: i not in indexes)),
map(get(1)),
list,
)
return (
np.array(remove_indexes(fpr, indexes_to_delete)),
np.array(remove_indexes(tpr, indexes_to_delete)),
np.array(remove_indexes(all_thresholds, indexes_to_delete)),
)
def get_1_class_y_score(y_score: Union[DataFrame, Series]) -> Series:
if isinstance(y_score, Series):
return y_score
return y_score.iloc[:, 1]
class ConfusionMetrics(DataStructure):
recall: float
precision: float
f1: float
fpr: float
tnr: float
fnr: float
npv: float
def __init__(self, recall, precision, fpr, tnr, fnr, npv):
self.fnr = fnr
self.tnr = tnr
self.recall = recall
self.precision = precision
self.fpr = fpr
self.npv = npv
try:
self.f1 = 2 / ((1 / precision) + (1 / recall))
except ZeroDivisionError:
self.f1 = 0
def get_metrics_from_confusion_matrix(confusion_matrix) -> ConfusionMetrics:
try:
npv = confusion_matrix.tn / (confusion_matrix.tn + confusion_matrix.fn)
except ZeroDivisionError:
npv = 0
return ConfusionMetrics(
precision=(confusion_matrix.tp / (confusion_matrix.tp + confusion_matrix.fp)) if
(confusion_matrix.tp + confusion_matrix.fp) > 0 else NaN,
recall=(confusion_matrix.tp / (confusion_matrix.tp + confusion_matrix.fn)) if
(confusion_matrix.tp + confusion_matrix.fn) > 0 else NaN,
fpr=confusion_matrix.fp / (confusion_matrix.fp + confusion_matrix.tn),
tnr=confusion_matrix.tn / (confusion_matrix.fp + confusion_matrix.tn),
fnr=confusion_matrix.fn / (confusion_matrix.fn + confusion_matrix.tp),
npv=npv,
)
def get_confusion_from_threshold(
y: Series, scores: Series, threshold: float = 0.5
) -> ConfusionMatrix:
fn = 0
tn = 0
tp = 0
fp = 0
for index, score in scores.items():
if score < threshold:
if y.loc[index] == 1:
fn += 1
elif y.loc[index] == 0:
tn += 1
elif score >= threshold:
if y.loc[index] == 1:
tp += 1
elif y.loc[index] == 0:
fp += 1
matrix = ConfusionMatrix(
fn=fn,
tn=tn,
tp=tp,
fp=fp,
)
return matrix
@dataclass
class PRPoint(DataStructure):
threshold: float
metrics: Optional[ConfusionMetrics]
@dataclass
class ROCPoint(DataStructure):
tpr: float
tnr: float
threshold: Optional[float] = 0
def get_metrics_from_roc(curve, tpr_threshold=0.8) -> ROCPoint:
interpolation_roc = interp1d(curve[1], curve[0], assume_sorted=False, kind='linear')
return ROCPoint(
tnr=1 - interpolation_roc(tpr_threshold).flat[0],
tpr=tpr_threshold,
)
def get_metrics_from_pr(curve, tpr_threshold=0.8) -> PRPoint:
index, tpr_final = find_index_right(lambda _, tpr: tpr >= tpr_threshold, curve[1])
return PRPoint(threshold=float64(curve[2][index]), metrics=None)
def get_end_to_end_metrics_table(
y_true, results_for_methods_optimized, results_for_methods_default
):
metrics_for_methods_optimized = valmap(
lambda r: compute_classification_metrics_from_results_with_statistics(y_true, r),
results_for_methods_optimized
)
metrics_for_methods_default = valmap(
lambda r: compute_classification_metrics_from_results_with_statistics(y_true, r),
results_for_methods_default
)
return metrics_for_methods_optimized, metrics_for_methods_default
def get_si_k_evaluation(
X_all, range_n_clusters, protocol, features_for_k: List[Union[List[str], str]]
):
is_flat_features = all((not isinstance(item, list) for item in features_for_k))
def get_si_point(_k: int) -> Optional[float]:
_METRICS = "si"
if is_flat_features:
_X = X_all[features_for_k]
else:
_features = features_for_k[_k]
if _features is None:
return None
else:
_X = X_all[_features]
_y_pred = protocol.algorithm(_X, _k)
_score = protocol.measure_internal_metrics(_X, _y_pred)
return _score[_METRICS]
points: List[Union[None, float]] = list(map(get_si_point, range_n_clusters))
return points
def compare_and_format_results(
y_true: Series,
results_for_methods: Dict[str, List[ModelCVResult]],
include: Tuple[str] = (
'balanced_accuracy', 'roc_auc', 'recall', 'fpr', 'f1', 'average_precision'
),
) -> str:
metrics_for_methods = valmap(
lambda r: compute_classification_metrics_from_results_with_statistics(y_true, r),
results_for_methods
)
def get_line(method: str, metrics: ClassificationMetricsWithStatistics):
return [format_method(method), *[metrics[metric].mean for metric in include]]
lines = sorted(
[get_line(method, metrics) for method, metrics in metrics_for_methods.items()],
key=get(1),
reverse=True,
)
max_by_column = [
None if index == 0 else max(pluck(index, lines)) for index in range(len(include) + 1)
]
lines_with_differences = [
list(
flatten(
[
item if item_index == 0 else [item, item - max_by_column[item_index]]
for item_index, item in enumerate(line)
]
)
)
for line in lines
]
return tabulate_formatted(
format_structure(
format_decimal,
[
['', *flatten(map(lambda metric: [format_metric_short(metric), ''], include))],
*lines_with_differences,
],
)
)
def get_list_of_scores_from_repeated_cv_results(
repeated_cv_results: List[ModelCVResult]
) -> List[Series]:
return list(flatten([repeat['y_scores'] for repeat in repeated_cv_results]))
def average_list_of_confusion_matrices(
matrices: List[ConfusionMatrix]
) -> ConfusionMatrixWithStatistics:
return pipe(
matrices,
partial(map, object2dict),
list,
average_list_dicts,
partial(valmap, lambda value: ValueWithStatistics(mean=value[0], std=value[1], ci=None)),
lambda matrix: ConfusionMatrixWithStatistics(**matrix),
)
def partial_dependency_analysis(
method: Method, X: DataFrame, y: Series, features: List = None
) -> None:
if features is None:
features = list(X.columns)
_pipeline = method.get_pipeline()
_pipeline.fit_transform(X, y)
plot_partial_dependence(_pipeline, X, features, target=y)
pass
def ale_analysis(model: Estimator, X: DataFrame, features: List = None, **ale_args) -> None:
if features is None:
features = list(X.columns)
for feature in features:
print(feature)
ale(
X=X,
model=model,
feature=[feature],
include_CI=True,
C=0.95,
**ale_args,
)
pyplot.show()
def visualise_trees(models, columns):
columns = [list(range(
1,
len(models[0][-1].estimators_) + 1,
))] + list(
map(
lambda model: list(
map(
lambda estimator:
# str(weight__estimator[0]) + "\n" +
export_text(estimator, show_weights=True, feature_names=list(columns)) + '\n',
model[-1].estimators_
)
),
models,
)
)
transposed = list(map(list, moves.zip_longest(*columns, fillvalue='-')))
return tabulate.tabulate(transposed, [f'fold {i}' for i in range(1, 11)])
def get_roc_from_fold(y, result):
y_score = result['y_predict_probabilities'].iloc[:, 1]
return roc_curve(y.loc[y_score.index], y_score)
def get_pr_from_fold(y, result):
y_score = result['y_predict_probabilities'].iloc[:, 1]
return precision_recall_curve(y.loc[y_score.index], y_score)
def get_train_test_sampling(X: DataFrame, fraction=0.8) -> List[Tuple[List[int], List[int]]]:
n_train = round(fraction * len(X))
return [(list(range(n_train)), list(range(n_train, len(X))))]
| 30.407207 | 121 | 0.66085 |
f9f7b7019917580dc1fc470d944c36c3c5e83ab0 | 123 | py | Python | abc/abc142/abc142c-3.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | 1 | 2019-08-21T00:49:34.000Z | 2019-08-21T00:49:34.000Z | abc/abc142/abc142c-3.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | abc/abc142/abc142c-3.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | N = int(input())
A = list(map(int, input().split()))
rev = [0] * N
for i in range(N):
rev[A[i] - 1] = i+1
print(*rev)
| 15.375 | 35 | 0.512195 |
7053ccf93baa6aa6b7a6ad86852971c38817e18a | 4,011 | py | Python | pixelart_image.py | blobbeliblob/pixelart_converter | 2347796d287bdcd370f6ae98026229b5f8da2d64 | [
"MIT"
] | null | null | null | pixelart_image.py | blobbeliblob/pixelart_converter | 2347796d287bdcd370f6ae98026229b5f8da2d64 | [
"MIT"
] | null | null | null | pixelart_image.py | blobbeliblob/pixelart_converter | 2347796d287bdcd370f6ae98026229b5f8da2d64 | [
"MIT"
] | null | null | null | '''
author: Camilo Hernández
This script takes an image file and converts it into pixelart based on the given initial parameters
file_name # this should point to the name of the image file (without the file extension)
file_format # this should be the type of file
new_file_appendix # this should be what is added to the file name on the new image
pixel_size # this should be the size of a square in the pixelart, i.e. how many pixels wide and high it is
color_palette_src # this should point to a file containing data on the colors used in the pixelart image
'''
import os
import sys
from PIL import Image
#returns color closest to the given color using Euclidean distance
def pixel_color(pixel, palette):
closest_color = palette[0]
minimum_distance = (closest_color[0]-pixel[0])**2+(closest_color[1]-pixel[1])**2+(closest_color[2]-pixel[2])**2
for color in palette: #color[0]=R, color[1]=G, color[2]=B
distance = (color[0]-pixel[0])**2+(color[1]-pixel[1])**2+(color[2]-pixel[2])**2
if distance < minimum_distance:
minimum_distance = distance
closest_color = color
return closest_color
#converts the given image into pixelart
def pixelart(img, size, color_palette_src, fast=True):
#get the color palette
try:
print("Opening palette...")
color_file = open(color_palette_src, "r")
color_data = color_file.readlines()
palette = []
for color in color_data:
c = color.split(",")
palette.append((int(c[0]), int(c[1]), int(c[2])))
print("Successful!\n")
except Exception as e:
print("Failed!")
print("Error: " + str(e))
print("Image returned without modification.\n")
return img #return the image non-modified
img = img.convert('RGB') #convert the image to rgb
width, height = img.size #get dimensions
img = img.crop((0, 0, width - width % size, height - height % size)) #crop the image to fit the pixel size
width, height = img.size #get new dimensions
pixels = img.load() #get pixels
print("Processing pixels...")
if not fast:
for x in range(width):
for y in range(height):
pixels[x, y] = pixel_color(pixels[x, y], palette)
print("Progress: " + str(x + 1) + " / " + str(width), end="\r")
for x in range(0, width, size):
for y in range(0, height, size):
r, g, b = 0, 0, 0
for i in range(x, x+size):
for j in range(y, y+size):
r, g, b = r + pixels[i, j][0], g + pixels[i, j][1], b + pixels[i, j][2]
r, g, b = r/(size**2), g/(size**2), b/(size**2)
color = pixel_color((r, g, b), palette)
for i in range(x, x+size):
for j in range(y, y+size):
pixels[i, j] = color
print("Progress: " + str(x + 1) + " / " + str(width), end="\r")
print("Pixels processed!\n")
return img
def main():
#initial data
file_name = "test" #name of the image to be converted
file_format = "jpg" #format of the image to be converted
new_file_appendix = "_pixelart" #added to the name of the new file, leave as empty string if source file should be replaced
src = file_name + "." + file_format
pixel_size = 8 #how many pixels wide and high should a "pixel" be in the new image
color_palette_src = "palette_16" #name of the file containing color palette, each color should be on a new line, rgb values should be comma separated, no whitespace
#open the image
try:
print("Opening image...")
img = Image.open(src)
print("Successful!\n")
except Exception as e:
print("Failed!")
print("Error: " + str(e) + "\n")
print("Program will terminate.")
return #end the program
#process the image
try:
print("Converting...\n")
img = pixelart(img, pixel_size, color_palette_src)
print("Conversion successful!\n")
except Exception as e:
print("Conversion failed!")
print("Error: " + str(e) + "\n")
#save the image
try:
print("Saving image...")
new_src = file_name + new_file_appendix + "." + file_format
img.save(new_src)
print("Successful!\n")
except Exception as e:
print("Failed!")
print("Error: " + str(e) + "\n")
print("Program will terminate.")
return #end the program
main()
#end of program
| 35.495575 | 165 | 0.68013 |
a68aee7aa8f896eeae0901d1e0144b6f435c5d8b | 2,234 | py | Python | python/paddle/tests/test_progressbar.py | Li-fAngyU/Paddle | e548f65f96697830035a28f9070b40829408ccdb | [
"Apache-2.0"
] | 8 | 2016-08-15T07:02:27.000Z | 2016-08-24T09:34:00.000Z | python/paddle/tests/test_progressbar.py | Li-fAngyU/Paddle | e548f65f96697830035a28f9070b40829408ccdb | [
"Apache-2.0"
] | 1 | 2022-01-28T07:23:22.000Z | 2022-01-28T07:23:22.000Z | python/paddle/tests/test_progressbar.py | Li-fAngyU/Paddle | e548f65f96697830035a28f9070b40829408ccdb | [
"Apache-2.0"
] | 1 | 2022-03-02T11:36:03.000Z | 2022-03-02T11:36:03.000Z | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import unittest
import random
import time
from paddle.hapi.progressbar import ProgressBar
class TestProgressBar(unittest.TestCase):
def prog_bar(self, num, epoch, width, verbose=1):
for epoch in range(epoch):
progbar = ProgressBar(num, verbose=verbose)
values = [
['loss', 50.341673],
['acc', 0.00256],
]
for step in range(1, num + 1):
values[0][1] -= random.random() * 0.1
values[1][1] += random.random() * 0.1
if step % 10 == 0:
progbar.update(step, values)
time.sleep(0.002)
progbar.update(step, values)
progbar.update(1, [['loss', int(1)]])
progbar.update(1, [['loss', 'INF']])
progbar.update(1, [['loss', 1e-4]])
progbar.update(1, [['loss', np.array([1.])]])
progbar.update(1, [['loss', np.array([1e-4])]])
progbar.update(1, [['loss', np.array([1]).astype(np.uint16)]])
progbar.start()
progbar.update(0, values)
progbar._dynamic_display = False
progbar.update(1e4, values)
progbar._num = None
progbar.update(0, values)
progbar._num = 1
progbar.update(1 + 1e-4, values)
def test1(self):
self.prog_bar(50, 1, 30)
def test2(self):
self.prog_bar(50, 2, 30)
def test4(self):
self.prog_bar(50, 2, 30, verbose=2)
def test_errors(self):
with self.assertRaises(TypeError):
ProgressBar(-1)
if __name__ == '__main__':
unittest.main()
| 31.027778 | 74 | 0.598926 |
ca371e8e4877886b139363b3b510e9fc80053593 | 13,537 | py | Python | sympy/functions/elementary/tests/test_hyperbolic.py | pernici/sympy | 5e6e3b71da777f5b85b8ca2d16f33ed020cf8a41 | [
"BSD-3-Clause"
] | 2 | 2015-11-13T16:40:57.000Z | 2017-09-15T15:37:19.000Z | sympy/functions/elementary/tests/test_hyperbolic.py | jegerjensen/sympy | 3a43310f1957a21a6f095fe2801cc05b5268a2c7 | [
"BSD-3-Clause"
] | 1 | 2016-06-13T01:29:51.000Z | 2016-06-14T00:38:27.000Z | sympy/functions/elementary/tests/test_hyperbolic.py | jegerjensen/sympy | 3a43310f1957a21a6f095fe2801cc05b5268a2c7 | [
"BSD-3-Clause"
] | null | null | null | from sympy import symbols, Symbol, sinh, nan, oo, zoo, pi, asinh, acosh, log, sqrt, \
coth, I, cot, E, tanh, tan, cosh, cos, S, sin, Rational, atanh, acoth, \
Integer, O, exp
from sympy.utilities.pytest import XFAIL
def test_sinh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert sinh(nan) == nan
assert sinh(zoo) == nan
assert sinh(oo) == oo
assert sinh(-oo) == -oo
assert sinh(0) == 0
assert sinh(1) == sinh(1)
assert sinh(-1) == -sinh(1)
assert sinh(x) == sinh(x)
assert sinh(-x) == -sinh(x)
assert sinh(pi) == sinh(pi)
assert sinh(-pi) == -sinh(pi)
assert sinh(2**1024 * E) == sinh(2**1024 * E)
assert sinh(-2**1024 * E) == -sinh(2**1024 * E)
assert sinh(pi*I) == 0
assert sinh(-pi*I) == 0
assert sinh(2*pi*I) == 0
assert sinh(-2*pi*I) == 0
assert sinh(-3*10**73*pi*I) == 0
assert sinh(7*10**103*pi*I) == 0
assert sinh(pi*I/2) == I
assert sinh(-pi*I/2) == -I
assert sinh(5*pi*I/2) == I
assert sinh(7*pi*I/2) == -I
assert sinh(pi*I/3) == S.Half*sqrt(3)*I
assert sinh(-2*pi*I/3) == -S.Half*sqrt(3)*I
assert sinh(pi*I/4) == S.Half*sqrt(2)*I
assert sinh(-pi*I/4) == -S.Half*sqrt(2)*I
assert sinh(17*pi*I/4) == S.Half*sqrt(2)*I
assert sinh(-3*pi*I/4) == -S.Half*sqrt(2)*I
assert sinh(pi*I/6) == S.Half*I
assert sinh(-pi*I/6) == -S.Half*I
assert sinh(7*pi*I/6) == -S.Half*I
assert sinh(-5*pi*I/6) == -S.Half*I
assert sinh(pi*I/105) == sin(pi/105)*I
assert sinh(-pi*I/105) == -sin(pi/105)*I
assert sinh(2 + 3*I) == sinh(2 + 3*I)
assert sinh(x*I) == sin(x)*I
assert sinh(k*pi*I) == 0
assert sinh(17*k*pi*I) == 0
assert sinh(k*pi*I/2) == sin(k*pi/2)*I
def test_cosh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert cosh(nan) == nan
assert cosh(zoo) == nan
assert cosh(oo) == oo
assert cosh(-oo) == oo
assert cosh(0) == 1
assert cosh(1) == cosh(1)
assert cosh(-1) == cosh(1)
assert cosh(x) == cosh(x)
assert cosh(-x) == cosh(x)
assert cosh(pi*I) == cos(pi)
assert cosh(-pi*I) == cos(pi)
assert cosh(2**1024 * E) == cosh(2**1024 * E)
assert cosh(-2**1024 * E) == cosh(2**1024 * E)
assert cosh(pi*I/2) == 0
assert cosh(-pi*I/2) == 0
assert cosh(pi*I/2) == 0
assert cosh(-pi*I/2) == 0
assert cosh((-3*10**73+1)*pi*I/2) == 0
assert cosh((7*10**103+1)*pi*I/2) == 0
assert cosh(pi*I) == -1
assert cosh(-pi*I) == -1
assert cosh(5*pi*I) == -1
assert cosh(8*pi*I) == 1
assert cosh(pi*I/3) == S.Half
assert cosh(-2*pi*I/3) == -S.Half
assert cosh(pi*I/4) == S.Half*sqrt(2)
assert cosh(-pi*I/4) == S.Half*sqrt(2)
assert cosh(11*pi*I/4) == -S.Half*sqrt(2)
assert cosh(-3*pi*I/4) == -S.Half*sqrt(2)
assert cosh(pi*I/6) == S.Half*sqrt(3)
assert cosh(-pi*I/6) == S.Half*sqrt(3)
assert cosh(7*pi*I/6) == -S.Half*sqrt(3)
assert cosh(-5*pi*I/6) == -S.Half*sqrt(3)
assert cosh(pi*I/105) == cos(pi/105)
assert cosh(-pi*I/105) == cos(pi/105)
assert cosh(2 + 3*I) == cosh(2 + 3*I)
assert cosh(x*I) == cos(x)
assert cosh(k*pi*I) == cos(k*pi)
assert cosh(17*k*pi*I) == cos(17*k*pi)
assert cosh(k*pi) == cosh(k*pi)
def test_tanh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert tanh(nan) == nan
assert tanh(zoo) == nan
assert tanh(oo) == 1
assert tanh(-oo) == -1
assert tanh(0) == 0
assert tanh(1) == tanh(1)
assert tanh(-1) == -tanh(1)
assert tanh(x) == tanh(x)
assert tanh(-x) == -tanh(x)
assert tanh(pi) == tanh(pi)
assert tanh(-pi) == -tanh(pi)
assert tanh(2**1024 * E) == tanh(2**1024 * E)
assert tanh(-2**1024 * E) == -tanh(2**1024 * E)
assert tanh(pi*I) == 0
assert tanh(-pi*I) == 0
assert tanh(2*pi*I) == 0
assert tanh(-2*pi*I) == 0
assert tanh(-3*10**73*pi*I) == 0
assert tanh(7*10**103*pi*I) == 0
assert tanh(pi*I/2) == tanh(pi*I/2)
assert tanh(-pi*I/2) == -tanh(pi*I/2)
assert tanh(5*pi*I/2) == tanh(5*pi*I/2)
assert tanh(7*pi*I/2) == tanh(7*pi*I/2)
assert tanh(pi*I/3) == sqrt(3)*I
assert tanh(-2*pi*I/3) == sqrt(3)*I
assert tanh(pi*I/4) == I
assert tanh(-pi*I/4) == -I
assert tanh(17*pi*I/4) == I
assert tanh(-3*pi*I/4) == I
assert tanh(pi*I/6) == I/sqrt(3)
assert tanh(-pi*I/6) == -I/sqrt(3)
assert tanh(7*pi*I/6) == I/sqrt(3)
assert tanh(-5*pi*I/6) == I/sqrt(3)
assert tanh(pi*I/105) == tan(pi/105)*I
assert tanh(-pi*I/105) == -tan(pi/105)*I
assert tanh(2 + 3*I) == tanh(2 + 3*I)
assert tanh(x*I) == tan(x)*I
assert tanh(k*pi*I) == 0
assert tanh(17*k*pi*I) == 0
assert tanh(k*pi*I/2) == tan(k*pi/2)*I
def test_coth():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert coth(nan) == nan
assert coth(zoo) == nan
assert coth(oo) == 1
assert coth(-oo) == -1
assert coth(0) == coth(0)
assert coth(1) == coth(1)
assert coth(-1) == -coth(1)
assert coth(x) == coth(x)
assert coth(-x) == -coth(x)
assert coth(pi*I) == -I*cot(pi)
assert coth(-pi*I) == cot(pi)*I
assert coth(2**1024 * E) == coth(2**1024 * E)
assert coth(-2**1024 * E) == -coth(2**1024 * E)
assert coth(pi*I) == -I*cot(pi)
assert coth(-pi*I) == I*cot(pi)
assert coth(2*pi*I) == -I*cot(2*pi)
assert coth(-2*pi*I) == I*cot(2*pi)
assert coth(-3*10**73*pi*I) == I*cot(3*10**73*pi)
assert coth(7*10**103*pi*I) == -I*cot(7*10**103*pi)
assert coth(pi*I/2) == 0
assert coth(-pi*I/2) == 0
assert coth(5*pi*I/2) == 0
assert coth(7*pi*I/2) == 0
assert coth(pi*I/3) == -I/sqrt(3)
assert coth(-2*pi*I/3) == -I/sqrt(3)
assert coth(pi*I/4) == -I
assert coth(-pi*I/4) == I
assert coth(17*pi*I/4) == -I
assert coth(-3*pi*I/4) == -I
assert coth(pi*I/6) == -sqrt(3)*I
assert coth(-pi*I/6) == sqrt(3)*I
assert coth(7*pi*I/6) == -sqrt(3)*I
assert coth(-5*pi*I/6) == -sqrt(3)*I
assert coth(pi*I/105) == -cot(pi/105)*I
assert coth(-pi*I/105) == cot(pi/105)*I
assert coth(2 + 3*I) == coth(2 + 3*I)
assert coth(x*I) == -cot(x)*I
assert coth(k*pi*I) == -cot(k*pi)*I
assert coth(17*k*pi*I) == -cot(17*k*pi)*I
assert coth(k*pi*I) == -cot(k*pi)*I
def test_asinh():
x, y = symbols('x,y')
assert asinh(x) == asinh(x)
assert asinh(-x) == -asinh(x)
assert asinh(nan) == nan
assert asinh( 0) == 0
assert asinh(+1) == log(sqrt(2)+1)
assert asinh(-1) == log(sqrt(2)-1)
assert asinh(I) == pi*I/2
assert asinh(-I) == -pi*I/2
assert asinh(I/2) == pi*I/6
assert asinh(-I/2) == -pi*I/6
assert asinh(oo) == oo
assert asinh(-oo) == -oo
assert asinh(I*oo) == oo
assert asinh(-I *oo) == -oo
assert asinh(zoo) == zoo
assert asinh(I *(sqrt(3) - 1)/(2**(S(3)/2))) == pi*I/12
assert asinh(-I *(sqrt(3) - 1)/(2**(S(3)/2))) == -pi*I/12
assert asinh(I*(sqrt(5)-1)/4) == pi*I/10
assert asinh(-I*(sqrt(5)-1)/4) == -pi*I/10
assert asinh(I*(sqrt(5)+1)/4) == 3*pi*I/10
assert asinh(-I*(sqrt(5)+1)/4) == -3*pi*I/10
def test_asinh_series():
x = Symbol('x')
assert asinh(x).series(x, 0, 8) == \
x - x**3/6 + 3*x**5/40 - 5*x**7/112 + O(x**8)
t5 = asinh(x).taylor_term(5, x)
assert t5 == 3*x**5/40
assert asinh(x).taylor_term(7, x, t5, 0) == -5*x**7/112
def test_acosh():
# TODO please write more tests -- see #652
# From http://functions.wolfram.com/ElementaryFunctions/ArcCosh/03/01/
# at specific points
assert acosh(1) == 0
assert acosh(-1) == pi*I
assert acosh(0) == I*pi/2
assert acosh(Rational(1,2)) == I*pi/3
assert acosh(Rational(-1,2)) == 2*pi*I/3
assert acosh(zoo) == oo
assert acosh(I) == log(I*(1+sqrt(2)))
assert acosh(-I) == log(-I*(1+sqrt(2)))
assert acosh((sqrt(3)-1)/(2*sqrt(2))) == 5*pi*I/12
assert acosh(-(sqrt(3)-1)/(2*sqrt(2))) == 7*pi*I/12
assert acosh(sqrt(2)/2) == I*pi/4
assert acosh(-sqrt(2)/2) == 3*I*pi/4
assert acosh(sqrt(3)/2) == I*pi/6
assert acosh(-sqrt(3)/2) == 5*I*pi/6
assert acosh(sqrt(2+sqrt(2))/2) == I*pi/8
assert acosh(-sqrt(2+sqrt(2))/2) == 7*I*pi/8
assert acosh(sqrt(2-sqrt(2))/2) == 3*I*pi/8
assert acosh(-sqrt(2-sqrt(2))/2) == 5*I*pi/8
assert acosh((1+sqrt(3))/(2*sqrt(2))) == I*pi/12
assert acosh(-(1+sqrt(3))/(2*sqrt(2))) == 11*I*pi/12
assert acosh((sqrt(5)+1)/4) == I*pi/5
assert acosh(-(sqrt(5)+1)/4) == 4*I*pi/5
def test_acosh_infinities():
assert acosh(oo) == oo
assert acosh(-oo) == oo
assert acosh(I*oo) == oo
assert acosh(-I*oo) == oo
def test_acosh_series():
x = Symbol('x')
assert acosh(x).series(x, 0, 8) == \
-I*x + pi*I/2 - I*x**3/6 - 3*I*x**5/40 - 5*I*x**7/112 + O(x**8)
t5 = acosh(x).taylor_term(5, x)
assert t5 == - 3*I*x**5/40
assert acosh(x).taylor_term(7, x, t5, 0) == - 5*I*x**7/112
# TODO please write more tests -- see #652
def test_atanh():
# TODO please write more tests -- see #652
# From http://functions.wolfram.com/ElementaryFunctions/ArcTanh/03/01/
# at specific points
x = Symbol('x')
#at specific points
assert atanh(0) == 0
assert atanh(I) == I*pi/4
assert atanh(-I) == -I*pi/4
assert atanh(1) == oo
assert atanh(-1) == -oo
# at infinites
assert atanh(I*oo) == I*pi/2
assert atanh(-I*oo) == -I*pi/2
assert atanh(zoo) == nan
#properties
assert atanh(-x) == -atanh(x)
assert atanh(I/sqrt(3)) == I*pi/6
assert atanh(-I/sqrt(3)) == -I*pi/6
assert atanh(I*sqrt(3)) == I*pi/3
assert atanh(-I*sqrt(3)) == -I*pi/3
assert atanh(I*(1+sqrt(2))) == 3*pi*I/8
assert atanh(I*(sqrt(2)-1)) == pi*I/8
assert atanh(I*(1-sqrt(2))) == -pi*I/8
assert atanh(-I*(1+sqrt(2))) == -3*pi*I/8
assert atanh(I*sqrt(5+2*sqrt(5))) == 2*I*pi/5
assert atanh(-I*sqrt(5+2*sqrt(5))) == -2*I*pi/5
assert atanh(I*(2-sqrt(3))) == pi*I/12
assert atanh(I*(sqrt(3)-2)) == -pi*I/12
assert atanh(oo) == -I*pi/2
def test_atanh_infinities():
assert atanh(oo) == -I*pi/2
assert atanh(-oo) == I*pi/2
# TODO please write more tests -- see #652
def test_acoth():
# TODO please write more tests -- see #652
# From http://functions.wolfram.com/ElementaryFunctions/ArcCoth/03/01/
# at specific points
x = Symbol('x')
#at specific points
assert acoth(0) == I*pi/2
assert acoth(I) == -I*pi/4
assert acoth(-I) == I*pi/4
assert acoth(1) == oo
assert acoth(-1) == -oo
# at infinites
assert acoth(oo) == 0
assert acoth(-oo) == 0
assert acoth(I*oo) == 0
assert acoth(-I*oo) == 0
assert acoth(zoo) == 0
#properties
assert acoth(-x) == -acoth(x)
assert acoth(I/sqrt(3)) == -I*pi/3
assert acoth(-I/sqrt(3)) == I*pi/3
assert acoth(I*sqrt(3)) == -I*pi/6
assert acoth(-I*sqrt(3)) == I*pi/6
assert acoth(I*(1+sqrt(2))) == -pi*I/8
assert acoth(-I*(sqrt(2)+1)) == pi*I/8
assert acoth(I*(1-sqrt(2))) == 3*pi*I/8
assert acoth(I*(sqrt(2)-1)) == -3*pi*I/8
assert acoth(I*sqrt(5+2*sqrt(5))) == -I*pi/10
assert acoth(-I*sqrt(5+2*sqrt(5))) == I*pi/10
assert acoth(I*(2+sqrt(3))) == -pi*I/12
assert acoth(-I*(2+sqrt(3))) == pi*I/12
assert acoth(I*(2-sqrt(3))) == -5*pi*I/12
assert acoth(I*(sqrt(3)-2)) == 5*pi*I/12
def test_simplifications():
x = Symbol('x')
assert sinh(asinh(x)) == x
assert sinh(acosh(x)) == sqrt(x-1) * sqrt(x+1)
assert sinh(atanh(x)) == x/sqrt(1-x**2)
assert sinh(acoth(x)) == 1/(sqrt(x-1) * sqrt(x+1))
assert cosh(asinh(x)) == sqrt(1+x**2)
assert cosh(acosh(x)) == x
assert cosh(atanh(x)) == 1/sqrt(1-x**2)
assert cosh(acoth(x)) == x/(sqrt(x-1) * sqrt(x+1))
assert tanh(asinh(x)) == x/sqrt(1+x**2)
assert tanh(acosh(x)) == sqrt(x-1) * sqrt(x+1) / x
assert tanh(atanh(x)) == x
assert tanh(acoth(x)) == 1/x
assert coth(asinh(x)) == sqrt(1+x**2)/x
assert coth(acosh(x)) == x/(sqrt(x-1) * sqrt(x+1))
assert coth(atanh(x)) == 1/x
assert coth(acoth(x)) == x
def test_issue1037():
assert cosh(asinh(Integer(3)/2)) == sqrt(Integer(13)/4)
def test_sinh_rewrite():
x = Symbol('x')
assert sinh(x).rewrite(exp) == (exp(x)-exp(-x))/2
assert sinh(x).rewrite(cosh) == -I*cosh(x+I*pi/2)
tanh_half = tanh(S.Half*x)
assert sinh(x).rewrite(tanh) == 2*tanh_half/(1-tanh_half**2)
coth_half = coth(S.Half*x)
assert sinh(x).rewrite(coth) == 2*coth_half/(coth_half**2-1)
def test_cosh_rewrite():
x = Symbol('x')
assert cosh(x).rewrite(exp) == (exp(x)+exp(-x))/2
assert cosh(x).rewrite(sinh) == -I*sinh(x+I*pi/2)
tanh_half = tanh(S.Half*x)**2
assert cosh(x).rewrite(tanh) == (1+tanh_half)/(1-tanh_half)
coth_half = coth(S.Half*x)**2
assert cosh(x).rewrite(coth) == (coth_half+1)/(coth_half-1)
def test_tanh_rewrite():
x = Symbol('x')
assert tanh(x).rewrite(exp) == (exp(x)-exp(-x))/(exp(x)+exp(-x))
assert tanh(x).rewrite(sinh) == I*sinh(x)/sinh(I*pi/2-x)
assert tanh(x).rewrite(cosh) == I*cosh(I*pi/2-x)/cosh(x)
assert tanh(x).rewrite(coth) == 1/coth(x)
def test_coth_rewrite():
x = Symbol('x')
assert coth(x).rewrite(exp) == (exp(x)+exp(-x))/(exp(x)-exp(-x))
assert coth(x).rewrite(sinh) == -I*sinh(I*pi/2-x)/sinh(x)
assert coth(x).rewrite(cosh) == -I*cosh(x)/cosh(I*pi/2-x)
assert coth(x).rewrite(tanh) == 1/tanh(x)
| 28.498947 | 85 | 0.543695 |
e858da065cdcb11412c30ebe0275ca6759b7678b | 89 | py | Python | screens/apps.py | neosergio/transit-advertising-screen-cms | 9c27d4d7ed9fe598c1c48ca96ee5d10f619c8683 | [
"Apache-2.0"
] | 1 | 2020-10-30T08:30:34.000Z | 2020-10-30T08:30:34.000Z | screens/apps.py | neosergio/transit-advertising-screen-cms | 9c27d4d7ed9fe598c1c48ca96ee5d10f619c8683 | [
"Apache-2.0"
] | 7 | 2020-06-05T23:08:42.000Z | 2022-02-10T12:08:54.000Z | screens/apps.py | neosergio/transit-advertising-screen-cms | 9c27d4d7ed9fe598c1c48ca96ee5d10f619c8683 | [
"Apache-2.0"
] | 1 | 2020-10-30T08:30:47.000Z | 2020-10-30T08:30:47.000Z | from django.apps import AppConfig
class ScreensConfig(AppConfig):
name = 'screens'
| 14.833333 | 33 | 0.752809 |
eec60c82bf5f6565a7b27311046565c69db4436d | 470 | py | Python | test_hermes/test_log.py | transifex/hermes | cbe9f6f3fc61bf2e1dcabc55ab6bdb4a67d49267 | [
"BSD-3-Clause"
] | 2 | 2015-07-07T22:48:51.000Z | 2015-12-11T10:09:12.000Z | test_hermes/test_log.py | transifex/hermes | cbe9f6f3fc61bf2e1dcabc55ab6bdb4a67d49267 | [
"BSD-3-Clause"
] | 1 | 2015-03-18T17:19:20.000Z | 2015-03-18T17:19:20.000Z | test_hermes/test_log.py | transifex/hermes | cbe9f6f3fc61bf2e1dcabc55ab6bdb4a67d49267 | [
"BSD-3-Clause"
] | 2 | 2018-10-23T17:00:59.000Z | 2021-03-18T12:16:35.000Z | from unittest import TestCase
from hermes import log as hermes_log
from multiprocessing import current_process
class LogTestCase(TestCase):
def test_get_logger_creates_logger_and_adds_handler_only_once(self):
logger = hermes_log.get_logger(current_process())
self.assertIsNotNone(logger)
self.assertEqual(len(logger.handlers), 1)
logger = hermes_log.get_logger(current_process())
self.assertEqual(len(logger.handlers), 1)
| 31.333333 | 72 | 0.761702 |
7a9c6fbbf43610638ea8ceb2b2ce0a0b1fef1a93 | 8,832 | py | Python | movo_common/movo_third_party/simple_grasping/scripts/pick_and_place.py | syc7446/kinova-movo | 28bec5bb61517f970071782a32ac58e92c67f0df | [
"BSD-3-Clause"
] | 1 | 2021-06-24T19:20:01.000Z | 2021-06-24T19:20:01.000Z | movo_common/movo_third_party/simple_grasping/scripts/pick_and_place.py | syc7446/kinova-movo | 28bec5bb61517f970071782a32ac58e92c67f0df | [
"BSD-3-Clause"
] | null | null | null | movo_common/movo_third_party/simple_grasping/scripts/pick_and_place.py | syc7446/kinova-movo | 28bec5bb61517f970071782a32ac58e92c67f0df | [
"BSD-3-Clause"
] | 1 | 2020-01-21T11:05:24.000Z | 2020-01-21T11:05:24.000Z | #!/usr/bin/env python
# Copyright 2013-2014, Unbounded Robotics, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Unbounded Robotics, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: Michael Ferguson
import argparse
import copy
import math
import sys
import rospy
import actionlib
from moveit_python import *
from moveit_python.geometry import rotate_pose_msg_by_euler_angles
from grasp_msgs.msg import *
from moveit_msgs.msg import MoveItErrorCodes, PlaceLocation
joint_names = ["shoulder_pan_joint", "shoulder_lift_joint", "upperarm_roll_joint",
"elbow_flex_joint", "forearm_roll_joint", "wrist_flex_joint", "wrist_roll_joint"]
ready_pose = [-1.393150, -0.103543, 0, -1.608378, -0.458660, -0.1, -2.611218]
def move_to_ready(interface):
result = interface.moveToJointPosition(joint_names, ready_pose)
if result.error_code.val != 1:
rospy.sleep(1.0)
rospy.logerr("Move arm to ready position failed, trying again...")
result = interface.moveToJointPosition(joint_names, ready_pose, 0.02)
if __name__=="__main__":
parser = argparse.ArgumentParser(description="Simple demo of pick and place")
parser.add_argument("--objects", help="Just do object perception", action="store_true")
parser.add_argument("--all", help="Just do object perception, but insert all objects", action="store_true")
parser.add_argument("--once", help="Run once.", action="store_true")
parser.add_argument("--ready", help="Move the arm to the ready position.", action="store_true")
parser.add_argument("--plan", help="Only do planning, no execution", action="store_true")
parser.add_argument("--x", help="Recommended x offset, how far out an object should roughly be.", type=float, default=0.5)
args, unknown = parser.parse_known_args()
rospy.init_node("pick_and_place_demo")
move_group = MoveGroupInterface("arm", "base_link", plan_only = args.plan)
# if all we want to do is prepare the arm, do it now
if args.ready:
move_to_ready(move_group)
exit(0)
scene = PlanningSceneInterface("base_link")
pickplace = PickPlaceInterface("arm", "gripper", plan_only = args.plan, verbose = True)
rospy.loginfo("Connecting to basic_grasping_perception/find_objects...")
find_objects = actionlib.SimpleActionClient("basic_grasping_perception/find_objects", FindGraspableObjectsAction)
find_objects.wait_for_server()
rospy.loginfo("...connected")
while not rospy.is_shutdown():
goal = FindGraspableObjectsGoal()
goal.plan_grasps = True
find_objects.send_goal(goal)
find_objects.wait_for_result(rospy.Duration(5.0))
find_result = find_objects.get_result()
rospy.loginfo("Found %d objects" % len(find_result.objects))
# remove all previous objects
for name in scene.getKnownCollisionObjects():
scene.removeCollisionObject(name, False)
for name in scene.getKnownAttachedObjects():
scene.removeAttachedObject(name, False)
scene.waitForSync()
# clear colors
scene._colors = dict()
# insert objects, find the one to grasp
the_object = None
the_object_dist = 0.35
count = -1
for obj in find_result.objects:
count += 1
scene.addSolidPrimitive("object%d"%count,
obj.object.primitives[0],
obj.object.primitive_poses[0],
wait = False)
# object must have usable grasps
if len(obj.grasps) < 1:
continue
# choose object in front of robot
dx = obj.object.primitive_poses[0].position.x - args.x
dy = obj.object.primitive_poses[0].position.y
d = math.sqrt((dx * dx) + (dy * dy))
if d < the_object_dist:
the_object_dist = d
the_object = count
if the_object == None:
rospy.logerr("Nothing to grasp! try again...")
continue
# insert table
for obj in find_result.support_surfaces:
# extend surface to floor
height = obj.primitive_poses[0].position.z
obj.primitives[0].dimensions = [obj.primitives[0].dimensions[0],
2.0, # make table wider
obj.primitives[0].dimensions[2] + height]
obj.primitive_poses[0].position.z += -height/2.0
# add to scene
scene.addSolidPrimitive(obj.name,
obj.primitives[0],
obj.primitive_poses[0],
wait = False)
obj_name = "object%d"%the_object
# sync
scene.waitForSync()
# set color of object we are grabbing
scene.setColor(obj_name, 223.0/256.0, 90.0/256.0, 12.0/256.0) # orange
scene.setColor(find_result.objects[the_object].object.support_surface, 0, 0, 0) # black
scene.sendColors()
# exit now if we are just doing object update
if args.objects:
if args.once:
exit(0)
else:
continue
# get grasps (we checked that they exist above)
grasps = find_result.objects[the_object].grasps
support_surface = find_result.objects[the_object].object.support_surface
# call move_group to pick the object
rospy.loginfo("Beginning to pick.")
success, pick_result = pickplace.pick_with_retry(obj_name, grasps, support_name=support_surface, scene=scene)
if not success:
exit(-1)
# create a set of place locations for the cube
places = list()
l = PlaceLocation()
l.place_pose.pose = find_result.objects[the_object].object.primitive_poses[0]
l.place_pose.header.frame_id = find_result.objects[the_object].object.header.frame_id
# invert the y of the pose
l.place_pose.pose.position.y = -l.place_pose.pose.position.y
# copy the posture, approach and retreat from the grasp used
l.post_place_posture = pick_result.grasp.pre_grasp_posture
l.pre_place_approach = pick_result.grasp.pre_grasp_approach
l.post_place_retreat = pick_result.grasp.post_grasp_retreat
places.append(copy.deepcopy(l))
# create another several places, rotate each by 90 degrees in yaw direction
l.place_pose.pose = rotate_pose_msg_by_euler_angles(l.place_pose.pose, 0, 0, 1.57)
places.append(copy.deepcopy(l))
l.place_pose.pose = rotate_pose_msg_by_euler_angles(l.place_pose.pose, 0, 0, 1.57)
places.append(copy.deepcopy(l))
l.place_pose.pose = rotate_pose_msg_by_euler_angles(l.place_pose.pose, 0, 0, 1.57)
places.append(copy.deepcopy(l))
# drop it like it's hot
rospy.loginfo("Beginning to place.")
while not rospy.is_shutdown():
# can't fail here or moveit needs to be restarted
success, place_result = pickplace.place_with_retry(obj_name, places, support_name=support_surface, scene=scene)
if success:
break
# place arm back at side
move_to_ready(move_group)
rospy.loginfo("Ready...")
# rinse and repeat
if args.once:
exit(0)
| 42.873786 | 126 | 0.660666 |
dd48fbfb8571b262736fd6ec3ef747b4c7acc768 | 1,891 | py | Python | rastervision/data/label_store/chip_classification_geojson_store.py | carderne/raster-vision | 915fbcd3263d8f2193e65c2cd0eb53e050a47a01 | [
"Apache-2.0"
] | 3 | 2020-07-05T04:04:18.000Z | 2021-02-05T16:19:55.000Z | rastervision/data/label_store/chip_classification_geojson_store.py | carderne/raster-vision | 915fbcd3263d8f2193e65c2cd0eb53e050a47a01 | [
"Apache-2.0"
] | null | null | null | rastervision/data/label_store/chip_classification_geojson_store.py | carderne/raster-vision | 915fbcd3263d8f2193e65c2cd0eb53e050a47a01 | [
"Apache-2.0"
] | 1 | 2020-04-27T15:21:53.000Z | 2020-04-27T15:21:53.000Z | from rastervision.data.label import ChipClassificationLabels
from rastervision.data.label_store import LabelStore
from rastervision.data.label_store.utils import boxes_to_geojson
from rastervision.data.label_source import ChipClassificationLabelSource
from rastervision.data.vector_source import GeoJSONVectorSource
from rastervision.utils.files import json_to_file
class ChipClassificationGeoJSONStore(LabelStore):
"""A GeoJSON file with classification labels in it.
"""
def __init__(self, uri, crs_transformer, class_map):
"""Construct ClassificationLabelStore backed by a GeoJSON file.
Args:
uri: uri of GeoJSON file containing labels
crs_transformer: CRSTransformer to convert from map coords in label
in GeoJSON file to pixel coords.
class_map: ClassMap used to infer class_ids from class_name
(or label) field
"""
self.uri = uri
self.crs_transformer = crs_transformer
self.class_map = class_map
def save(self, labels):
"""Save labels to URI if writable.
Note that if the grid is inferred from polygons, only the grid will be
written, not the original polygons.
"""
boxes = labels.get_cells()
class_ids = labels.get_class_ids()
scores = list(labels.get_scores())
geojson = boxes_to_geojson(
boxes,
class_ids,
self.crs_transformer,
self.class_map,
scores=scores)
json_to_file(geojson, self.uri)
def get_labels(self):
vector_source = GeoJSONVectorSource(self.uri, self.crs_transformer)
source = ChipClassificationLabelSource(
vector_source, self.crs_transformer, self.class_map)
return source.get_labels()
def empty_labels(self):
return ChipClassificationLabels()
| 36.365385 | 79 | 0.684823 |
be18177c10ddd0dbdd5deadc20af44b5dbed48f5 | 824 | py | Python | 1. Python/Control Structures/3. Smallest Positive Number/smallese_positive.py | theparitoshkumar/Data-Structures-Algorithms-using-python | 445b9dee56bca637f21267114cc1686d333ea4c4 | [
"Apache-2.0"
] | 1 | 2021-12-05T18:02:15.000Z | 2021-12-05T18:02:15.000Z | 1. Python/Control Structures/3. Smallest Positive Number/smallese_positive.py | theparitoshkumar/Data-Structures-Algorithms-using-python | 445b9dee56bca637f21267114cc1686d333ea4c4 | [
"Apache-2.0"
] | null | null | null | 1. Python/Control Structures/3. Smallest Positive Number/smallese_positive.py | theparitoshkumar/Data-Structures-Algorithms-using-python | 445b9dee56bca637f21267114cc1686d333ea4c4 | [
"Apache-2.0"
] | null | null | null | """
In the following exercise you will finish writing smallest_positive which is a function that finds the smallest positive number in a list.
"""
def smallest_positive(in_list):
# TODO: Define a control structure that finds the smallest positive
# number in in_list and returns the correct smallest number.
smallest_pos = None
for num in in_list:
if num > 0:
# Note: we use a logical "or" in this solution to form
# the conditional statement, although this was
# not introduced above.
if smallest_pos == None or num < smallest_pos:
smallest_pos = num
return smallest_pos
# Test cases
print(smallest_positive([4, -6, 7, 2, -4, 10]))
# Correct output: 2
print(smallest_positive([.2, 5, 3, -.1, 7, 7, 6]))
# Correct output: 0.2 | 32.96 | 138 | 0.658981 |
4aa983a4b81f0080c47d86182164f607ecfb6269 | 18,295 | py | Python | rllib/utils/exploration/curiosity.py | jamesliu/ray | 11ab412db1fa3603a3006e8ed414e80dd1f11c0c | [
"Apache-2.0"
] | 3 | 2020-12-12T05:10:44.000Z | 2021-04-12T21:52:47.000Z | rllib/utils/exploration/curiosity.py | jamesliu/ray | 11ab412db1fa3603a3006e8ed414e80dd1f11c0c | [
"Apache-2.0"
] | 227 | 2021-10-01T08:00:01.000Z | 2021-12-28T16:47:26.000Z | rllib/utils/exploration/curiosity.py | gramhagen/ray | c18caa4db36d466718bdbcb2229aa0b2dc03da1f | [
"Apache-2.0"
] | 1 | 2020-12-02T06:26:20.000Z | 2020-12-02T06:26:20.000Z | from gym.spaces import Discrete, MultiDiscrete, Space
import numpy as np
from typing import Optional, Tuple, Union
from ray.rllib.models.action_dist import ActionDistribution
from ray.rllib.models.catalog import ModelCatalog
from ray.rllib.models.modelv2 import ModelV2
from ray.rllib.models.tf.tf_action_dist import Categorical, MultiCategorical
from ray.rllib.models.torch.misc import SlimFC
from ray.rllib.models.torch.torch_action_dist import TorchCategorical, \
TorchMultiCategorical
from ray.rllib.models.utils import get_activation_fn
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils import NullContextManager
from ray.rllib.utils.annotations import override
from ray.rllib.utils.exploration.exploration import Exploration
from ray.rllib.utils.framework import try_import_tf, \
try_import_torch
from ray.rllib.utils.from_config import from_config
from ray.rllib.utils.tf_utils import get_placeholder, one_hot as tf_one_hot
from ray.rllib.utils.torch_utils import one_hot
from ray.rllib.utils.typing import FromConfigSpec, ModelConfigDict, TensorType
tf1, tf, tfv = try_import_tf()
torch, nn = try_import_torch()
F = None
if nn is not None:
F = nn.functional
class Curiosity(Exploration):
"""Implementation of:
[1] Curiosity-driven Exploration by Self-supervised Prediction
Pathak, Agrawal, Efros, and Darrell - UC Berkeley - ICML 2017.
https://arxiv.org/pdf/1705.05363.pdf
Learns a simplified model of the environment based on three networks:
1) Embedding observations into latent space ("feature" network).
2) Predicting the action, given two consecutive embedded observations
("inverse" network).
3) Predicting the next embedded obs, given an obs and action
("forward" network).
The less the agent is able to predict the actually observed next feature
vector, given obs and action (through the forwards network), the larger the
"intrinsic reward", which will be added to the extrinsic reward.
Therefore, if a state transition was unexpected, the agent becomes
"curious" and will further explore this transition leading to better
exploration in sparse rewards environments.
"""
def __init__(self,
action_space: Space,
*,
framework: str,
model: ModelV2,
feature_dim: int = 288,
feature_net_config: Optional[ModelConfigDict] = None,
inverse_net_hiddens: Tuple[int] = (256, ),
inverse_net_activation: str = "relu",
forward_net_hiddens: Tuple[int] = (256, ),
forward_net_activation: str = "relu",
beta: float = 0.2,
eta: float = 1.0,
lr: float = 1e-3,
sub_exploration: Optional[FromConfigSpec] = None,
**kwargs):
"""Initializes a Curiosity object.
Uses as defaults the hyperparameters described in [1].
Args:
feature_dim: The dimensionality of the feature (phi)
vectors.
feature_net_config: Optional model
configuration for the feature network, producing feature
vectors (phi) from observations. This can be used to configure
fcnet- or conv_net setups to properly process any observation
space.
inverse_net_hiddens: Tuple of the layer sizes of the
inverse (action predicting) NN head (on top of the feature
outputs for phi and phi').
inverse_net_activation: Activation specifier for the inverse
net.
forward_net_hiddens: Tuple of the layer sizes of the
forward (phi' predicting) NN head.
forward_net_activation: Activation specifier for the forward
net.
beta: Weight for the forward loss (over the inverse loss,
which gets weight=1.0-beta) in the common loss term.
eta: Weight for intrinsic rewards before being added to
extrinsic ones.
lr: The learning rate for the curiosity-specific
optimizer, optimizing feature-, inverse-, and forward nets.
sub_exploration: The config dict for
the underlying Exploration to use (e.g. epsilon-greedy for
DQN). If None, uses the FromSpecDict provided in the Policy's
default config.
"""
if not isinstance(action_space, (Discrete, MultiDiscrete)):
raise ValueError(
"Only (Multi)Discrete action spaces supported for Curiosity "
"so far!")
super().__init__(
action_space, model=model, framework=framework, **kwargs)
if self.policy_config["num_workers"] != 0:
raise ValueError(
"Curiosity exploration currently does not support parallelism."
" `num_workers` must be 0!")
self.feature_dim = feature_dim
if feature_net_config is None:
feature_net_config = self.policy_config["model"].copy()
self.feature_net_config = feature_net_config
self.inverse_net_hiddens = inverse_net_hiddens
self.inverse_net_activation = inverse_net_activation
self.forward_net_hiddens = forward_net_hiddens
self.forward_net_activation = forward_net_activation
self.action_dim = self.action_space.n if isinstance(
self.action_space, Discrete) else np.sum(self.action_space.nvec)
self.beta = beta
self.eta = eta
self.lr = lr
# TODO: (sven) if sub_exploration is None, use Trainer's default
# Exploration config.
if sub_exploration is None:
raise NotImplementedError
self.sub_exploration = sub_exploration
# Creates modules/layers inside the actual ModelV2.
self._curiosity_feature_net = ModelCatalog.get_model_v2(
self.model.obs_space,
self.action_space,
self.feature_dim,
model_config=self.feature_net_config,
framework=self.framework,
name="feature_net",
)
self._curiosity_inverse_fcnet = self._create_fc_net(
[2 * self.feature_dim] + list(self.inverse_net_hiddens) +
[self.action_dim],
self.inverse_net_activation,
name="inverse_net")
self._curiosity_forward_fcnet = self._create_fc_net(
[self.feature_dim + self.action_dim] + list(
self.forward_net_hiddens) + [self.feature_dim],
self.forward_net_activation,
name="forward_net")
# This is only used to select the correct action
self.exploration_submodule = from_config(
cls=Exploration,
config=self.sub_exploration,
action_space=self.action_space,
framework=self.framework,
policy_config=self.policy_config,
model=self.model,
num_workers=self.num_workers,
worker_index=self.worker_index,
)
@override(Exploration)
def get_exploration_action(self,
*,
action_distribution: ActionDistribution,
timestep: Union[int, TensorType],
explore: bool = True):
# Simply delegate to sub-Exploration module.
return self.exploration_submodule.get_exploration_action(
action_distribution=action_distribution,
timestep=timestep,
explore=explore)
@override(Exploration)
def get_exploration_optimizer(self, optimizers):
# Create, but don't add Adam for curiosity NN updating to the policy.
# If we added and returned it here, it would be used in the policy's
# update loop, which we don't want (curiosity updating happens inside
# `postprocess_trajectory`).
if self.framework == "torch":
feature_params = list(self._curiosity_feature_net.parameters())
inverse_params = list(self._curiosity_inverse_fcnet.parameters())
forward_params = list(self._curiosity_forward_fcnet.parameters())
# Now that the Policy's own optimizer(s) have been created (from
# the Model parameters (IMPORTANT: w/o(!) the curiosity params),
# we can add our curiosity sub-modules to the Policy's Model.
self.model._curiosity_feature_net = \
self._curiosity_feature_net.to(self.device)
self.model._curiosity_inverse_fcnet = \
self._curiosity_inverse_fcnet.to(self.device)
self.model._curiosity_forward_fcnet = \
self._curiosity_forward_fcnet.to(self.device)
self._optimizer = torch.optim.Adam(
forward_params + inverse_params + feature_params, lr=self.lr)
else:
self.model._curiosity_feature_net = self._curiosity_feature_net
self.model._curiosity_inverse_fcnet = self._curiosity_inverse_fcnet
self.model._curiosity_forward_fcnet = self._curiosity_forward_fcnet
# Feature net is a RLlib ModelV2, the other 2 are keras Models.
self._optimizer_var_list = \
self._curiosity_feature_net.base_model.variables + \
self._curiosity_inverse_fcnet.variables + \
self._curiosity_forward_fcnet.variables
self._optimizer = tf1.train.AdamOptimizer(learning_rate=self.lr)
# Create placeholders and initialize the loss.
if self.framework == "tf":
self._obs_ph = get_placeholder(
space=self.model.obs_space, name="_curiosity_obs")
self._next_obs_ph = get_placeholder(
space=self.model.obs_space, name="_curiosity_next_obs")
self._action_ph = get_placeholder(
space=self.model.action_space, name="_curiosity_action")
self._forward_l2_norm_sqared, self._update_op = \
self._postprocess_helper_tf(
self._obs_ph, self._next_obs_ph, self._action_ph)
return optimizers
@override(Exploration)
def postprocess_trajectory(self, policy, sample_batch, tf_sess=None):
"""Calculates phi values (obs, obs', and predicted obs') and ri.
Also calculates forward and inverse losses and updates the curiosity
module on the provided batch using our optimizer.
"""
if self.framework != "torch":
self._postprocess_tf(policy, sample_batch, tf_sess)
else:
self._postprocess_torch(policy, sample_batch)
def _postprocess_tf(self, policy, sample_batch, tf_sess):
# tf1 static-graph: Perform session call on our loss and update ops.
if self.framework == "tf":
forward_l2_norm_sqared, _ = tf_sess.run(
[self._forward_l2_norm_sqared, self._update_op],
feed_dict={
self._obs_ph: sample_batch[SampleBatch.OBS],
self._next_obs_ph: sample_batch[SampleBatch.NEXT_OBS],
self._action_ph: sample_batch[SampleBatch.ACTIONS],
})
# tf-eager: Perform model calls, loss calculations, and optimizer
# stepping on the fly.
else:
forward_l2_norm_sqared, _ = self._postprocess_helper_tf(
sample_batch[SampleBatch.OBS],
sample_batch[SampleBatch.NEXT_OBS],
sample_batch[SampleBatch.ACTIONS],
)
# Scale intrinsic reward by eta hyper-parameter.
sample_batch[SampleBatch.REWARDS] = \
sample_batch[SampleBatch.REWARDS] + \
self.eta * forward_l2_norm_sqared
return sample_batch
def _postprocess_helper_tf(self, obs, next_obs, actions):
with (tf.GradientTape()
if self.framework != "tf" else NullContextManager()) as tape:
# Push both observations through feature net to get both phis.
phis, _ = self.model._curiosity_feature_net({
SampleBatch.OBS: tf.concat([obs, next_obs], axis=0)
})
phi, next_phi = tf.split(phis, 2)
# Predict next phi with forward model.
predicted_next_phi = self.model._curiosity_forward_fcnet(
tf.concat(
[phi, tf_one_hot(actions, self.action_space)], axis=-1))
# Forward loss term (predicted phi', given phi and action vs
# actually observed phi').
forward_l2_norm_sqared = 0.5 * tf.reduce_sum(
tf.square(predicted_next_phi - next_phi), axis=-1)
forward_loss = tf.reduce_mean(forward_l2_norm_sqared)
# Inverse loss term (prediced action that led from phi to phi' vs
# actual action taken).
phi_cat_next_phi = tf.concat([phi, next_phi], axis=-1)
dist_inputs = self.model._curiosity_inverse_fcnet(phi_cat_next_phi)
action_dist = Categorical(dist_inputs, self.model) if \
isinstance(self.action_space, Discrete) else \
MultiCategorical(
dist_inputs, self.model, self.action_space.nvec)
# Neg log(p); p=probability of observed action given the inverse-NN
# predicted action distribution.
inverse_loss = -action_dist.logp(tf.convert_to_tensor(actions))
inverse_loss = tf.reduce_mean(inverse_loss)
# Calculate the ICM loss.
loss = (1.0 - self.beta) * inverse_loss + self.beta * forward_loss
# Step the optimizer.
if self.framework != "tf":
grads = tape.gradient(loss, self._optimizer_var_list)
grads_and_vars = [(g, v)
for g, v in zip(grads, self._optimizer_var_list)
if g is not None]
update_op = self._optimizer.apply_gradients(grads_and_vars)
else:
update_op = self._optimizer.minimize(
loss, var_list=self._optimizer_var_list)
# Return the squared l2 norm and the optimizer update op.
return forward_l2_norm_sqared, update_op
def _postprocess_torch(self, policy, sample_batch):
# Push both observations through feature net to get both phis.
phis, _ = self.model._curiosity_feature_net({
SampleBatch.OBS: torch.cat([
torch.from_numpy(sample_batch[SampleBatch.OBS]),
torch.from_numpy(sample_batch[SampleBatch.NEXT_OBS])
])
})
phi, next_phi = torch.chunk(phis, 2)
actions_tensor = torch.from_numpy(
sample_batch[SampleBatch.ACTIONS]).long().to(policy.device)
# Predict next phi with forward model.
predicted_next_phi = self.model._curiosity_forward_fcnet(
torch.cat(
[phi, one_hot(actions_tensor, self.action_space).float()],
dim=-1))
# Forward loss term (predicted phi', given phi and action vs actually
# observed phi').
forward_l2_norm_sqared = 0.5 * torch.sum(
torch.pow(predicted_next_phi - next_phi, 2.0), dim=-1)
forward_loss = torch.mean(forward_l2_norm_sqared)
# Scale intrinsic reward by eta hyper-parameter.
sample_batch[SampleBatch.REWARDS] = \
sample_batch[SampleBatch.REWARDS] + \
self.eta * forward_l2_norm_sqared.detach().cpu().numpy()
# Inverse loss term (prediced action that led from phi to phi' vs
# actual action taken).
phi_cat_next_phi = torch.cat([phi, next_phi], dim=-1)
dist_inputs = self.model._curiosity_inverse_fcnet(phi_cat_next_phi)
action_dist = TorchCategorical(dist_inputs, self.model) if \
isinstance(self.action_space, Discrete) else \
TorchMultiCategorical(
dist_inputs, self.model, self.action_space.nvec)
# Neg log(p); p=probability of observed action given the inverse-NN
# predicted action distribution.
inverse_loss = -action_dist.logp(actions_tensor)
inverse_loss = torch.mean(inverse_loss)
# Calculate the ICM loss.
loss = (1.0 - self.beta) * inverse_loss + self.beta * forward_loss
# Perform an optimizer step.
self._optimizer.zero_grad()
loss.backward()
self._optimizer.step()
# Return the postprocessed sample batch (with the corrected rewards).
return sample_batch
def _create_fc_net(self, layer_dims, activation, name=None):
"""Given a list of layer dimensions (incl. input-dim), creates FC-net.
Args:
layer_dims (Tuple[int]): Tuple of layer dims, including the input
dimension.
activation (str): An activation specifier string (e.g. "relu").
Examples:
If layer_dims is [4,8,6] we'll have a two layer net: 4->8 (8 nodes)
and 8->6 (6 nodes), where the second layer (6 nodes) does not have
an activation anymore. 4 is the input dimension.
"""
layers = [
tf.keras.layers.Input(
shape=(layer_dims[0], ), name="{}_in".format(name))
] if self.framework != "torch" else []
for i in range(len(layer_dims) - 1):
act = activation if i < len(layer_dims) - 2 else None
if self.framework == "torch":
layers.append(
SlimFC(
in_size=layer_dims[i],
out_size=layer_dims[i + 1],
initializer=torch.nn.init.xavier_uniform_,
activation_fn=act))
else:
layers.append(
tf.keras.layers.Dense(
units=layer_dims[i + 1],
activation=get_activation_fn(act),
name="{}_{}".format(name, i)))
if self.framework == "torch":
return nn.Sequential(*layers)
else:
return tf.keras.Sequential(layers)
| 45.397022 | 79 | 0.623558 |
393316a3d8ccfe5bf4d6123a037737cb7503f8fd | 1,447 | py | Python | library/webdriver_manager/chrome.py | batu1579/NCUT-library-auto-answer | d8b8a37548e3b58b6e030323b116450cc4c3178a | [
"MIT"
] | 1 | 2021-08-12T15:45:06.000Z | 2021-08-12T15:45:06.000Z | library/webdriver_manager/chrome.py | batu1579/NCUT-library-auto-answer | d8b8a37548e3b58b6e030323b116450cc4c3178a | [
"MIT"
] | null | null | null | library/webdriver_manager/chrome.py | batu1579/NCUT-library-auto-answer | d8b8a37548e3b58b6e030323b116450cc4c3178a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding=utf-8
import logging
import os
from .driver import ChromeDriver
from .logger import log
from .manager import DriverManager
from .utils import ChromeType, os_type
class ChromeDriverManager(DriverManager):
def __init__(self, version="latest",
os_type=os_type(),
path=None,
name="chromedriver",
url="https://chromedriver.storage.googleapis.com",
latest_release_url="https://chromedriver.storage.googleapis.com/LATEST_RELEASE",
chrome_type=ChromeType.GOOGLE,
log_level=logging.INFO,
print_first_line=True,
cache_valid_range=1):
super().__init__(path, log_level=log_level, print_first_line=print_first_line,
cache_valid_range=cache_valid_range)
self.driver = ChromeDriver(name=name,
version=version,
os_type=os_type,
url=url,
latest_release_url=latest_release_url,
chrome_type=chrome_type)
def install(self):
log(f"Current {self.driver.chrome_type} version is {self.driver.browser_version}", first_line=True)
driver_path = self._get_driver_path(self.driver)
os.chmod(driver_path, 0o755)
return driver_path
| 37.102564 | 107 | 0.587422 |
8dc87648ab80cd40fb9305e1e131aa8adf587d72 | 14,778 | py | Python | DylMath.py | FrankWSamuelson/merge-sort | 524b3f355228f97e3b4ac8b10b993e8558a20cd9 | [
"CC0-1.0"
] | null | null | null | DylMath.py | FrankWSamuelson/merge-sort | 524b3f355228f97e3b4ac8b10b993e8558a20cd9 | [
"CC0-1.0"
] | null | null | null | DylMath.py | FrankWSamuelson/merge-sort | 524b3f355228f97e3b4ac8b10b993e8558a20cd9 | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/python3.6
import ROC1
import numpy as np
np.set_printoptions(threshold=np.inf)
np.seterr(all="ignore")
from multiprocessing import Pool
from scipy.interpolate import interp1d
from scipy.stats import norm
try:
import matplotlib
matplotlib.use('QT4Agg')
import matplotlib.pyplot as plt
font: dict = {'size' : 56}
#matplotlib.rc('font', **font)
from matplotlib.collections import PatchCollection
from matplotlib.patches import Rectangle
except BaseException as e:
pass
from DylData import *
unbiasedMeanMatrixVar = ROC1.unbiasedMeanMatrixVar
def paramToParams(predicted: list, D0: list=None, D1: list=None) -> (list, list, list):
"""Takes one parameter and splits it into three if predicted is a 2d list"""
if isinstance(predicted[0], (list, tuple)):
return predicted[0], predicted[1], predicted[2]
else:
return predicted, D0, D1
def auc(results: tuple, D0: list=None, D1: list=None) -> float:
""" Takes an ROC curve from genROC and returns the AUC.
If results is a prediction not an ROC curve, generates the ROC curve.
If results is already an ROC curve, D0 and D1 are not required."""
if not isinstance(results[0], (list, tuple)):
results: list = genROC(results, D0, D1)
total: float = 0.0
for i,(x,y) in enumerate(results[:-1], start=1):
# start=1 means i is actually i + 1
total += 0.5*(y + results[i][1]) * (x - results[i][0])
return abs(total)
def hanleyMcNeil(auc: float, n0: int, n1: int) -> float:
"""The very good power-law variance estimate from Hanley/McNeil"""
auc2=auc*auc
q1=auc/(2.-auc)
q2=2.*auc2/(1.+auc)
return( (auc-auc2+(n1-1.)*(q1-auc2)+(n0-1.)*(q2-auc2))/n0/n1 )
def calcNLayers(arr: list) -> int:
"""Returns the number of layers that would be needed to sort.
If arr is the a tuple or list, uses the length.
If arr is already the length, uses that."""
if isinstance(arr, int):
length: int = arr
else:
length: int = len(arr)
return np.ceil(np.log2(length))
def genSep(dist: str, auc: float) -> float:
"""Returns the sep parameter needed for the target AUC for the given distribution."""
if dist == 'exponential':
return abs(auc/(1-auc))
elif dist == 'normal':
return norm.ppf(auc)*(2**0.5)
raise NotImplementedError("Cannot gen sep for that distribution")
def MSE(sep: float, dist: str, ROC: list, rocEmpiric: list=None) -> (float, float, float):
"""Returns the MSE of the given ROC with respect to:
If sep and dist are not None: the true ROC from sep and dist
If rocEmpiric is not None: the MSE between the Empiric and ROC
If sep and dist are None, the first value returned is always 0
The last value returned is always the AUC of the ROC"""
step: float = 10**-4
fpf = np.arange(0, 1, step)
if len(ROC) == 2:
approx = interp1d(*((ROC['x'], ROC['y']) if isinstance(ROC, dict) else ROC))(fpf)
else:
approx = interp1d(*zip(*ROC))(fpf)
if dist == 'exponential':
mseTrue: float = np.mean((approx - (fpf**(1/sep)))**2)
elif dist == 'normal':
mseTrue: float = np.mean((approx - (1-norm.cdf(norm.ppf(1-fpf) - sep)))**2)
else:
mseTrue: float = 0.0
if rocEmpiric != None:
if len(rocEmpiric) == 2:
trueApprox = interp1d(rocEmpiric['x'], rocEmpiric['y'])
else:
trueApprox = interp1d(*zip(*rocEmpiric))
mseEmpiric: float = np.mean((approx - (trueApprox(fpf)))**2)
calcAUC: float = np.trapz(approx) / (1/step)
return (mseTrue, calcAUC) if rocEmpiric == None else (mseTrue, mseEmpiric, calcAUC)
def genX0X1(predicted: tuple, D1: tuple=None, D0: tuple=None) -> (list, list):
"""Generates x0 and x1 vectors out of the given parameters.
D1 and D0 should never be smaller than the predicted array, but are often bigger."""
predicted, D0, D1 = paramToParams(predicted, D0, D1)
x0, x1 = genD0D1((D0, D1), predicted)
return np.array(x0), np.array(x1)
def genD0D1(d0d1: list, arr: list) -> tuple:
"""Generates filtered D0 and D1 vectors.
d0d1 is (D0, D1) together as a tuple/list."""
D0, D1 = list(), list()
for item in arr:
if item in d0d1[0]:
D0.append(item)
elif item in d0d1[1]:
D1.append(item)
return D0, D1
def genROC(predicted: tuple, D1: list=None, D0: list=None) -> list:
"""Returns a list of collections of x,y coordinates in order of the threshold"""
predicted, D0, D1 = paramToParams(predicted, D0, D1)
x0 = list()
x1 = list()
for i, val in enumerate(predicted):
if val in D1:
x1.append(i)
elif val in D0:
x0.append(i)
roc = ROC1.rocxy(x1, x0)
return list(zip(roc['x'], roc['y']))
def graphROC(predicted: tuple, D0: list=None, D1: list=None):
"""Generates and graphs a single ROC curve and displays the results."""
predicted, D0, D1 = paramToParams(predicted, D0, D1)
fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.plot(*zip(*genROC(predicted, D0, D1)))
ax.plot((0,1),(0,1),c="r", linestyle="--")
ax.set_ylim(top=1.1,bottom=-0.1)
ax.set_xlim(left=-0.1,right=1.1)
ax.set_title(f"AUC: {auc(predicted, D0, D1):.5f}")
ax.set_xlabel("FPF")
ax.set_ylabel("TPF")
plt.show()
def graphROCs(arrays: list, withPatches: bool=False, withLine: bool=True, D0: list=None, D1: list=None):
"""Graphs a collection of array predictions. Takes the arrays as they would come out of DylSort sorts.
If withPatches, puts a color coded success matrix behind the line.
If withLine, graphs the line.
Returns the plt handle, does not display the results."""
rows: int = int(np.ceil(np.sqrt(len(arrays))))
cols: int = int(np.ceil(len(arrays) / rows))
fig, axes = plt.subplots(rows, cols, sharex=True, sharey=True, num="plots")
fig.suptitle("ROC curves")
if withLine:
params: list = [(array, D0, D1) for array in arrays]
if len(arrays[0]) < 1024:
results: list = list(map(genROC, params))
else:
with Pool() as p:
results: list = list(p.imap(genROC,params))
for i, ax in enumerate(axes.flat if (rows * cols > 1) else [axes]):
if i >= len(arrays):
continue
ax.set(xlabel="False Positive Fraction", ylabel="True Positive Fraction")
ax.label_outer()
ax.plot((0,1),(0,1),c='red', linestyle=":")
if withLine:
ax.plot(*zip(*results[i]), c='blue')
ax.set_ylim(top=1.02, bottom=0)
ax.set_xlim(left=-0.01, right=1)
if not withPatches:
ax.set_title(f"Iteration #{i} AUC: {auc(results[i]):.5f}")
if withPatches:
sm: np.ndarray = successMatrix(arrays[i], D0, D1)
yes: list = []
no: list = []
yLen: int = len(D1)
xLen: int = len(D0)
for (y,x), value in np.ndenumerate(sm):
if value:
yes.append(Rectangle((x/xLen,y/yLen),1/xLen,1/yLen))
else:
no.append(Rectangle((x/xLen,y/yLen),1/xLen,1/yLen))
patches = PatchCollection(no, facecolor = 'r', alpha=0.75, edgecolor='None')
ax.add_collection(patches)
patches = PatchCollection(yes, facecolor = 'g', alpha=0.75, edgecolor='None')
ax.add_collection(patches)
area = len(yes) / (len(yes) + len(no))
ax.set_ylim(top=1, bottom=0)
ax.set_xlim(left=0, right=1)
ax.set_title(f"Iteration #{i} AUC: {area:.5f}")
ax.set_aspect('equal', 'box')
figManager = plt.get_current_fig_manager()
figManager.window.showMaximized()
#plt.show()
return plt
def avROC(rocs: list) -> tuple:
""" Averages ROC curves. Rocs parameter are ROC curves from genROC."""
#hard coded SeSp
#e = 9*sys.float_info.epsilon
# convert [(x1, y1), (x2, y2) ...] into np array for better arithmatic
rocs: list = [np.array(roc) for roc in rocs]
rotrocs: list = [{'u': tuple((roc[:,0] + roc[:,1])/2), 'v': tuple((roc[:,1]-roc[:,0])/2)} for roc in rocs]
stdA: list = list()
for roc in rotrocs:
stdA.extend(roc['u'])
stdA: np.ndarray = np.array(sorted(set(stdA)))
aprotrocs: np.ndarray = np.zeros((len(rotrocs), len(stdA)))
for iRoc, roc in enumerate(rotrocs):
inter = interp1d(roc['u'], roc['v'])
for iU, u in enumerate(stdA):
aprotrocs[iRoc][iU]: float = inter(u)
ymean: np.ndarray = np.zeros((1, len(stdA)))
for apro in aprotrocs:
ymean += apro
ymean /= len(aprotrocs)
fpout: np.ndarray = stdA - ymean
tpout: np.ndarray = stdA + ymean
ret = tpout.tolist(), fpout.tolist()
return ret[0][0], ret[1][0]
def successMatrix(predicted: list, D0: list, D1: list):
"""Creates the success matrix for the predicted ordering.
Checks to make sure it got every entry filled."""
arr: np.ndarray = np.full((len(D1), len(D0)), -1)
indecies: dict = dict()
for val in D0 + D1:
indecies[val]: int = predicted.index(val)
for col, x in enumerate(reversed(D0)):
for row, y in enumerate(reversed(D1)):
arr[row, col]: bool = indecies[x] < indecies[y]
if -1 in arr:
raise EnvironmentError("failed to create success matrix")
return arr
def runStats(groups: list, params: list, comp) -> list:
"""Runs stats on the groups provided.
Params parameter must be: ((d0d1), dist, targetAUC, n, currLayer, len(mergers))"""
aucs, varOfSM, hanleyMcNeils, estimates = list(), list(), list(), list()
d0d1, dist, targetAUC, n, *_ = params
rocs: list = list()
for group in groups:
D0, D1 = genD0D1(d0d1, group)
if D0 and D1:
rocs.append(genROC(group, D0, D1))
sm: np.ndarray = successMatrix(group, D0, D1)
auc: float = np.mean(sm)
if auc == auc:
aucs.append(auc)
hanleyMcNeils.append((len(D0), len(D1)))
smVAR: float = unbiasedMeanMatrixVar(sm)
if smVAR == smVAR and len(D0) > 3 and len(D1) > 3: # if not NaN
varOfSM.append(smVAR)
rocs: list = list(filter(lambda roc: np.min(np.isfinite(roc)), rocs))
varOfAverageAUC = np.var(aucs, ddof=1) / len(aucs)
aucs: np.ndarray = np.array(aucs)
avgAUC: float = np.mean(aucs)
estimateNs: list = [list()]
for ns in hanleyMcNeils:
estimateNs[0].append(ns)
# while there are groups to 'merge'
while len(estimateNs[-1]) != 1:
# get the previous layer and sort by N0 + N1
oldNs: list = sorted(estimateNs[-1], key=sum)
# roughly the same code as mergers creation
estimateNs.append(list())
while oldNs:
i: int = 0
toMerge: list = list()
segments: int = min(n, len(oldNs) - i)
for _ in range(segments):
toMerge.append(oldNs.pop(0))
estimateNs[-1].append([sum((x[0] for x in toMerge)), sum((x[1] for x in toMerge))])
estimateNs[-1].sort(key=sum)
estimates.append(hanleyMcNeil(avgAUC, estimateNs[-1][-1][0], estimateNs[-1][-1][1]) / len(estimateNs[-1]))
for i, (N0, N1) in enumerate(hanleyMcNeils):
hanleyMcNeils[i]: float = hanleyMcNeil(avgAUC, N0, N1)
if len(varOfSM) == 0:
varEstimate: float = float(varOfAverageAUC)
else:
varEstimate: float = (sum(varOfSM) / (len(varOfSM)**2))
avgROC: tuple = avROC(rocs)
empiricROC: tuple = comp.empiricROC()
sep: float = genSep(dist, float(targetAUC)) # float in case it's a string
stats: list = [avgAUC, varEstimate, sum(hanleyMcNeils) / len(hanleyMcNeils)**2, estimates, *MSE(sep, dist, avgROC, empiricROC)[:2]]
return stats
if __name__ == "__main__":
from DylSort import mergeSort
test: int = 9
if test == 1:
#print(D0, D1)
newData, D0, D1 = continuousScale("sampledata.csv")
print(auc(genROC(newData)))
arrays: list = [newData[:]]
for _ in mergeSort(newData):
arrays.append(newData[:])
print(arrays)
graphROCs(arrays)
elif test == 3:
predicted: list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19]
print(aucSM(successMatrix(predicted, [*range(10)], [*range(10,20)])))
elif test == 4:
arrays: list = [[0, 1, 4, 2, 5, 3, 6],
[0, 1, 2, 4, 3, 5, 6],
[0, 1, 2, 4, 3, 5, 6],
[0, 1, 2, 3, 4, 5, 6]]
graphROCs(arrays, D0=[0, 1, 2, 3], D1=[4, 5, 6])
elif test == 5:
graphROC([4, 1, 2, 3], [1, 2], [3, 4])
elif test == 6:
from DylSort import treeMergeSort
from DylComp import Comparator
from DylData import continuousScale
import matplotlib
font: dict = {'size' : 10}
matplotlib.rc('font', **font)
data, D0, D1 = continuousScale(128, 128)
comp: Comparator = Comparator(data, rand=True, level=0, seed=15)
for arr in treeMergeSort(data, comp=comp):
pass
D0.sort(key = comp.getLatentScore)
D1.sort(key = comp.getLatentScore)
roc: dict = ROC1.rocxy(comp.getLatentScore(D1), comp.getLatentScore(D0))
graphROCs([arr], True, True, D0, D1)
elif test == 7:
roc1: list = [[0, 0], [0, 1], [1, 1]]
roc3 = roc2 = roc1
roc4: list = [[0, 0], [0.5, 0], [0.5, 0.5], [1, 1]]
avgROC: tuple = avROC([roc1, roc2, roc3, roc4])
fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.plot(*zip(*roc1), 'm', label='chunk1', ls='-')
ax.plot(*zip(*roc2), 'b', label='chunk2', ls='--')
ax.plot(*zip(*roc3), 'g', label='chunk3', ls=':')
ax.plot(*zip(*roc4), 'c', label='chunk4')
ax.plot(*avgROC, 'orange', label='avg')
ax.plot((0,1),(0,1),c="r", linestyle="--")
ax.set_ylim(top=1.1,bottom=-0.1)
ax.set_xlim(left=-0.1,right=1.1)
ax.set_xlabel("FPF")
ax.set_ylabel("TPF")
ax.legend()
plt.show()
elif test == 8:
roc1: list = [[0,0],[0,0.05],[0,0.1],[0,0.15],[0,0.2],[0,0.25],[0,0.3],[0,0.35],[0,0.4],[0,0.45],[0.1,0.45],[0.1,0.5],[0.1,0.55],[0.1,0.6],[0.1,0.65],[0.2,0.65],[0.3,0.65],[0.3,0.7],[0.4,0.7],[0.5,0.7],[0.5,0.75],[0.5,0.8],[0.5,0.85],[0.5,0.9],[0.5,0.95],[0.5,1],[0.6,1],[0.7,1],[0.8,1],[0.9,1],[1,1]]
roc2: list = [[0,0],[0,0.1],[0,0.2],[0,0.3],[0,0.4],[0,0.5],[0.06666667,0.5],[0.13333333,0.5],[0.2,0.5],[0.26666667,0.5],[0.26666667,0.6],[0.26666667,0.7],[0.33333333,0.7],[0.4,0.7],[0.4,0.8],[0.4,0.9],[0.4,1],[0.46666667,1],[0.53333333,1],[0.6,1],[0.66666667,1],[0.73333333,1],[0.8,1],[0.86666667,1],[0.93333333,1],[1,1]]
avgROC: tuple = avROC([roc1, roc2])
fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.plot(*zip(*roc1), 'm', label='chunk1', ls=':', marker='o')
ax.plot(*zip(*roc2), 'b', label='chunk2', ls='--', marker='o')
ax.plot(*avgROC, 'orange', label='avg', marker='o')
ax.legend()
plt.show()
elif test == 9:
from DylSort import treeMergeSort
from DylComp import Comparator
import matplotlib.pyplot as plt
from time import time
t1: float = time()
data, D0, D1 = continuousScale(2048, 2048)
comp: Comparator = Comparator(data, rand=True)
comp.genRand(len(D0), len(D1), 7.72, 'exponential')
fig = plt.figure()
for level, groups in enumerate(treeMergeSort(data, comp, combGroups=False)):
rocs: list = list()
for group in groups:
roc: list = genROC(group, D0, D1)
rocs.append(roc)
avgROC:tuple = avROC(rocs)
rocs: list = list(zip(*avgROC))
rocs.reverse()
mse: float = MSE(7.72, 'exponential', rocs)
#print(*mse, auc(rocs))
print(f"{mse[0]:03.3e}, {auc(rocs):0.3f}, {len(comp)}")
ax = fig.add_subplot(3, 4, level + 1)
ax.set_aspect('equal', 'box')
approx = interp1d(*zip(*rocs), 'linear')
ax.plot(list(np.arange(0, 1 - 10**-4, 10**-4)), [approx(fp) for fp in np.arange(0, 1 - 10**-4, 10**-4)])
ax.plot(list(np.arange(0, 1 - 10**-4, 10**-4)), [fp**(1/7.72) for fp in np.arange(0, 1 - 10**-4, 10**-4)])
ax.set(title=f"{mse[0]:03.6e}:{len(comp)}")
#plt.subplots_adjust(hspace=0.25)
plt.show()
print(time() - t1)
| 38.584856 | 324 | 0.649817 |
d4eb842e9ecbd9f52f8f62a46f68a5b7323e1c80 | 466 | py | Python | examples/raven_django_example/urls.py | ucamhal/ravenpy | 64d2a94c160f2d81ba622858b2367d55e1912129 | [
"MIT"
] | 1 | 2019-04-26T23:58:13.000Z | 2019-04-26T23:58:13.000Z | examples/raven_django_example/urls.py | ucamhal/ravenpy | 64d2a94c160f2d81ba622858b2367d55e1912129 | [
"MIT"
] | null | null | null | examples/raven_django_example/urls.py | ucamhal/ravenpy | 64d2a94c160f2d81ba622858b2367d55e1912129 | [
"MIT"
] | null | null | null | from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'raven_django_example.app.views.home', name='home'),
url(r'^private/$', 'raven_django_example.app.views.private', name='private'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/login/', 'raven.raven_django.views.raven_login'),
)
| 33.285714 | 81 | 0.716738 |
96c0bdf4b01d6cc46228b48a41a1b0b5335f8697 | 1,650 | py | Python | keras/mixed_precision/get_layer_policy_test.py | Halo9Pan/dive-keras | 7d4c5572fa3a9fc2542a1314d06c555f67575cb0 | [
"Apache-2.0"
] | 37,222 | 2017-12-13T00:52:55.000Z | 2022-03-31T22:34:35.000Z | keras/mixed_precision/get_layer_policy_test.py | amirsadafi/keras | f1e9c76675981ee6683f54a3ce569212d551d12d | [
"Apache-2.0"
] | 7,624 | 2017-12-13T01:03:40.000Z | 2022-03-31T23:57:24.000Z | keras/mixed_precision/get_layer_policy_test.py | amirsadafi/keras | f1e9c76675981ee6683f54a3ce569212d551d12d | [
"Apache-2.0"
] | 14,914 | 2017-12-13T02:30:46.000Z | 2022-03-30T14:49:16.000Z | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests the get_layer_policy function."""
import tensorflow.compat.v2 as tf
from keras.engine import base_layer_utils
from keras.layers import core
from keras.mixed_precision import get_layer_policy
from keras.mixed_precision import policy
class GetLayerPolicyTest(tf.test.TestCase):
def test_get_layer_policy(self):
layer = core.Dense(4)
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float32')
p = policy.Policy('mixed_float16')
layer = core.Dense(4, dtype=p)
self.assertIs(get_layer_policy.get_layer_policy(layer), p)
layer = core.Dense(4, dtype='float64')
self.assertEqual(get_layer_policy.get_layer_policy(layer).name, 'float64')
def test_error(self):
with self.assertRaisesRegex(
ValueError, 'get_policy can only be called on a layer, but got: 1'):
get_layer_policy.get_layer_policy(1)
if __name__ == '__main__':
base_layer_utils.enable_v2_dtype_behavior()
tf.test.main()
| 35.106383 | 80 | 0.72303 |
9f35d24ec08f85d2f9131f0ebb5c26c8b7d9609f | 8,461 | py | Python | modules/photons_canvas/animations/registered/color_cycle.py | Djelibeybi/photons | bc0aa91771d8e88fd3c691fb58f18cb876f292ec | [
"MIT"
] | 51 | 2020-07-03T08:34:48.000Z | 2022-03-16T10:56:08.000Z | modules/photons_canvas/animations/registered/color_cycle.py | delfick/photons | bc0aa91771d8e88fd3c691fb58f18cb876f292ec | [
"MIT"
] | 81 | 2020-07-03T08:13:59.000Z | 2022-03-31T23:02:54.000Z | modules/photons_canvas/animations/registered/color_cycle.py | Djelibeybi/photons | bc0aa91771d8e88fd3c691fb58f18cb876f292ec | [
"MIT"
] | 8 | 2020-07-24T23:48:20.000Z | 2021-05-24T17:20:16.000Z | from photons_canvas.animations import Animation, an_animation
from photons_canvas import point_helpers as php
from delfick_project.norms import dictobj, sb
from collections import defaultdict
import random
import math
changers = {}
class changer:
def __init__(self, name):
self.name = name
def __call__(self, kls):
changers[self.name] = kls
return kls
class Options(dictobj.Spec):
changer = dictobj.Field(lambda: sb.string_choice_spec(list(changers)), default="vertical_morph")
brightness = dictobj.Field(sb.float_spec, default=0.5)
saturation = dictobj.Field(sb.float_spec, default=1)
class Changer:
def __init__(self, options):
self.options = options
self.setup()
def setup(self):
pass
def _new_device(self, event, state):
self.point_to_part = {}
return self.new_device(event, state)
def new_device(self, event, state):
pass
def initial_iteration(self, animation):
animation.every = 1
animation.duration = 1
def change_iterations(self, animation):
pass
def progress(self, event, state):
pass
def key(self, point, state, canvas):
raise NotImplementedError()
def color(self, point, canvas, event, state):
raise NotImplementedError()
def part(self, point, canvas):
part = self.point_to_part.get(point)
if part is None:
part = self.point_to_part[point] = list(canvas.point_to_parts[point])[0]
return part
def from_hue(self, hue):
return (hue, self.options.saturation, self.options.brightness, 3500)
def from_mod_hue(self, hue):
return (hue % 360, self.options.saturation, self.options.brightness, 3500)
@changer("vertical_morph")
class VerticalMorph(Changer):
def setup(self):
self.i = random.randrange(0, 360)
def new_device(self, event, state):
num_parts = len(event.canvas.parts)
divs = ((1, num_parts), (0, num_parts * 2), (0, num_parts * 6))
return {"divs": divs}
def change_iterations(self, animation):
animation.every = 0.2
animation.duration = 0.3
def progress(self, event, state):
self.i = (self.i + 30) % 360
def key(self, point, state, canvas):
return (point, self.i)
def color(self, point, canvas, event, state):
vs = []
col, row = point
for divthentimes, div in state["divs"]:
if divthentimes:
col1 = col / div
row1 = row * div
else:
col1 = col * div
row1 = row / div
vs.append(col1 + row1)
colors = []
for v in vs:
h = (v + self.i) % 360
colors.append((h, self.options.saturation, self.options.brightness, 3500))
return php.average_color(colors)
@changer("cycle")
class Cycle(Changer):
def setup(self):
self.i = random.randrange(0, 360)
def change_iterations(self, animation):
animation.every = 0.5
animation.duration = 0.5
def progress(self, event, state):
self.i = (self.i + 20) % 360
def key(self, point, state, canvas):
return (point, self.i)
def color(self, point, canvas, event, state):
return self.from_hue(self.i)
@changer("cycle_parts")
class CycleParts(Changer):
def new_device(self, event, state):
if state is None:
state = {}
for part in event.canvas.parts:
if part not in state:
state[part] = [random.randrange(0, 360), random.randrange(5, 10)]
return state
def change_iterations(self, animation):
animation.every = 0.2
animation.duration = 0.2
def progress(self, event, state):
for p in state.values():
p[0] = (p[0] + p[1]) % 360
def key(self, point, state, canvas):
return (point, tuple(state[self.part(point, canvas)]))
def color(self, point, canvas, event, state):
return self.from_hue(state[self.part(point, canvas)][0])
@changer("wave")
class Wave(Changer):
def setup(self):
self.i = random.randrange(0, 360)
def change_iterations(self, animation):
animation.every = 2
animation.duration = 2
def progress(self, event, state):
self.i = self.i + 200
def key(self, point, state, canvas):
return (point, self.i)
def color(self, point, canvas, event, state):
col, row = point
return self.from_mod_hue(col * row + self.i)
@changer("patches")
class Patches(Changer):
def setup(self):
self.i = random.randrange(0, 360)
self.size = 4
self.points = defaultdict(lambda: random.randrange(0, 360))
def change_iterations(self, animation):
animation.every = 2
animation.duration = 2
def progress(self, event, state):
count = 1
(left_x, right_x), (top_y, bottom_y), (width, height) = event.canvas.bounds
bounds = (left_x - self.size, right_x), (top_y, bottom_y - self.size), (width, height)
for point in php.Points.all_points(bounds):
col, row = point
if col % self.size == 0 and row % self.size == 0:
self.i = (self.i + math.sin(count * self.i) + random.randrange(0, 360)) % 360
self.points[point] = self.i
count += 1
def key(self, point, state, canvas):
return
def color(self, point, canvas, event, state):
col, row = point
col = col - (col % self.size)
row = row - (row % self.size)
return self.from_mod_hue(self.points[(col, row)])
class Layer:
def __init__(self, changer):
self.event = None
self.changer = changer
def layer(self, point, canvas):
key = self.changer.key(point, self.event.state["state"], canvas)
if key is not None:
c = self.event.state["colors"].get(key)
if key is None or c is None:
c = self.event.state["colors"][key] = self.changer.color(
point, canvas, self.event, self.event.state["state"]
)
return c
def next_layer(self, changer, event):
self.event = event
changer.progress(event, event.state["state"])
return self.layer
@an_animation("color_cycle", Options)
class TileColorCycleAnimation(Animation):
"""
Display pretty colour transitions on the tiles. This animation is a bit
special in that it's many animations in one.
Note that for simplification of the code, there are no options per animation.
The following are the options:
changer - the animation to run - default vertical_morph
This is the style off the animation
* vertical_morph
The closest to a the MORPH firmware effect I could create.
Maths is hard.
* cycle
The entire canvas cycles between colours.
* cycle_parts
Each panel will cycle between colours.
* wave
Such a pretty wave of colours.
* patches
Each panel is split into 4 patches, which will each cycle colours.
brightness - float - default 0.5
The brightness of the colours
saturation - float - default 1
The saturation of the colors
This is a good set of options for this animation:
https://gist.github.com/delfick/22e984ff9587401a255b175f4db6b309
run with::
lifx lan:animate -- file://instructions.json
"""
Cache = {}
def setup(self):
self.changer = changers[self.options.changer](self.options)
self.changer.initial_iteration(self)
self.layer = Layer(self.changer)
self.counter = 0
async def process_event(self, event):
if event.is_new_device:
key = (tuple(sorted(event.canvas.parts)), self.changer.__class__)
existing_state = (event.state or {}).get("state")
event.state = TileColorCycleAnimation.Cache[key] = {
"colors": {},
"state": self.changer._new_device(event, existing_state),
}
elif event.is_tick:
if self.counter == 1:
self.changer.change_iterations(self)
self.counter += 1
elif self.counter == 0:
self.counter += 1
return self.layer.next_layer(self.changer, event)
| 27.924092 | 100 | 0.600756 |
a928c878dec9538acd23ade3ad869f6a4fa04362 | 415 | py | Python | graphene_django_app/wsgi.py | piash222/graphql_django_app | 00556b7b40010d59973da1483d19c8f5bab6ead6 | [
"MIT"
] | null | null | null | graphene_django_app/wsgi.py | piash222/graphql_django_app | 00556b7b40010d59973da1483d19c8f5bab6ead6 | [
"MIT"
] | 4 | 2021-03-19T01:00:13.000Z | 2021-09-22T18:46:54.000Z | graphene_django_app/wsgi.py | piash222/graphql_django_app | 00556b7b40010d59973da1483d19c8f5bab6ead6 | [
"MIT"
] | null | null | null | """
WSGI config for graphene_django_app project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'graphene_django_app.settings')
application = get_wsgi_application()
| 24.411765 | 79 | 0.79759 |
e7f73dabc777389832b0d2a8d5119668ad1ef85a | 1,055 | py | Python | aston/spectra/math.py | bovee/Aston | 315871346df72b3e8fcfa9943e8a3519e60299ff | [
"BSD-3-Clause"
] | 13 | 2015-07-16T19:02:50.000Z | 2022-03-29T08:17:51.000Z | aston/spectra/math.py | larsyunker/Aston | 315871346df72b3e8fcfa9943e8a3519e60299ff | [
"BSD-3-Clause"
] | 13 | 2015-12-16T05:17:35.000Z | 2021-06-07T10:45:53.000Z | aston/spectra/math.py | larsyunker/Aston | 315871346df72b3e8fcfa9943e8a3519e60299ff | [
"BSD-3-Clause"
] | 6 | 2017-03-24T20:18:26.000Z | 2021-12-01T04:16:59.000Z | import numpy as np
from scipy.sparse import dia_matrix
from scipy.sparse import lil_matrix
# TODO: implementations of Stein & Scott 1994 algorithms
def find_spectrum_match(spec, spec_lib, method='euclidian'):
"""
Find spectrum in spec_lib most similar to spec.
"""
# filter out any points with abundance below 1 %
# spec[spec / np.sum(spec) < 0.01] = 0
# normalize everything to sum to 1
spec = spec / np.max(spec)
if method == 'dot':
d1 = (spec_lib * lil_matrix(spec).T).sum(axis=1).A ** 2
d2 = np.sum(spec ** 2) * spec_lib.multiply(spec_lib).sum(axis=1).A
dist = d1 / d2
elif method == 'euclidian':
# st_spc = spectrum[np.newaxis, :].repeat(spec_lib.shape[0], axis=0)
st_spc = dia_matrix((spec, [0]), shape=(len(spec), len(spec)))
# calculate the residual sum of squares from spectrum to library
dist_sp = spec_lib.multiply(spec_lib) - 2 * spec_lib.dot(st_spc)
dist = dist_sp.sum(axis=1).A + np.sum(spec ** 2)
return (dist.argmin(), dist.min())
| 37.678571 | 76 | 0.637915 |
f1b327b26bf23d7d8fb39fcdab10f1aa2a8f6813 | 8,132 | py | Python | src/oscar/apps/basket/middleware.py | akiyoko/oscar_sandbox | b384f1c0b5f297fd4b84509a575f6766a48630a5 | [
"BSD-3-Clause"
] | 68 | 2016-11-06T05:07:57.000Z | 2021-12-17T09:17:38.000Z | src/oscar/apps/basket/middleware.py | akiyoko/oscar_sandbox | b384f1c0b5f297fd4b84509a575f6766a48630a5 | [
"BSD-3-Clause"
] | null | null | null | src/oscar/apps/basket/middleware.py | akiyoko/oscar_sandbox | b384f1c0b5f297fd4b84509a575f6766a48630a5 | [
"BSD-3-Clause"
] | 28 | 2016-12-04T07:12:50.000Z | 2021-02-06T21:13:15.000Z | from django.conf import settings
from django.contrib import messages
from django.core.signing import BadSignature, Signer
from django.utils.functional import SimpleLazyObject, empty
from django.utils.translation import ugettext_lazy as _
from oscar.core.loading import get_class, get_model
Applicator = get_class('offer.utils', 'Applicator')
Basket = get_model('basket', 'basket')
Selector = get_class('partner.strategy', 'Selector')
selector = Selector()
class BasketMiddleware(object):
# Middleware interface methods
def process_request(self, request):
# Keep track of cookies that need to be deleted (which can only be done
# when we're processing the response instance).
request.cookies_to_delete = []
# Load stock/price strategy and assign to request (it will later be
# assigned to the basket too).
strategy = selector.strategy(request=request, user=request.user)
request.strategy = strategy
# We lazily load the basket so use a private variable to hold the
# cached instance.
request._basket_cache = None
def load_full_basket():
"""
Return the basket after applying offers.
"""
basket = self.get_basket(request)
basket.strategy = request.strategy
self.apply_offers_to_basket(request, basket)
return basket
def load_basket_hash():
"""
Load the basket and return the basket hash
Note that we don't apply offers or check that every line has a
stockrecord here.
"""
basket = self.get_basket(request)
if basket.id:
return self.get_basket_hash(basket.id)
# Use Django's SimpleLazyObject to only perform the loading work
# when the attribute is accessed.
request.basket = SimpleLazyObject(load_full_basket)
request.basket_hash = SimpleLazyObject(load_basket_hash)
def process_response(self, request, response):
# Delete any surplus cookies
cookies_to_delete = getattr(request, 'cookies_to_delete', [])
for cookie_key in cookies_to_delete:
response.delete_cookie(cookie_key)
if not hasattr(request, 'basket'):
return response
# If the basket was never initialized we can safely return
if (isinstance(request.basket, SimpleLazyObject)
and request.basket._wrapped is empty):
return response
cookie_key = self.get_cookie_key(request)
# Check if we need to set a cookie. If the cookies is already available
# but is set in the cookies_to_delete list then we need to re-set it.
has_basket_cookie = (
cookie_key in request.COOKIES
and cookie_key not in cookies_to_delete)
# If a basket has had products added to it, but the user is anonymous
# then we need to assign it to a cookie
if (request.basket.id and not request.user.is_authenticated()
and not has_basket_cookie):
cookie = self.get_basket_hash(request.basket.id)
response.set_cookie(
cookie_key, cookie,
max_age=settings.OSCAR_BASKET_COOKIE_LIFETIME,
secure=settings.OSCAR_BASKET_COOKIE_SECURE, httponly=True)
return response
def get_cookie_key(self, request):
"""
Returns the cookie name to use for storing a cookie basket.
The method serves as a useful hook in multi-site scenarios where
different baskets might be needed.
"""
return settings.OSCAR_BASKET_COOKIE_OPEN
def process_template_response(self, request, response):
if hasattr(response, 'context_data'):
if response.context_data is None:
response.context_data = {}
if 'basket' not in response.context_data:
response.context_data['basket'] = request.basket
else:
# Occasionally, a view will want to pass an alternative basket
# to be rendered. This can happen as part of checkout
# processes where the submitted basket is frozen when the
# customer is redirected to another site (eg PayPal). When the
# customer returns and we want to show the order preview
# template, we need to ensure that the frozen basket gets
# rendered (not request.basket). We still keep a reference to
# the request basket (just in case).
response.context_data['request_basket'] = request.basket
return response
# Helper methods
def get_basket(self, request):
"""
Return the open basket for this request
"""
if request._basket_cache is not None:
return request._basket_cache
num_baskets_merged = 0
manager = Basket.open
cookie_key = self.get_cookie_key(request)
cookie_basket = self.get_cookie_basket(cookie_key, request, manager)
if hasattr(request, 'user') and request.user.is_authenticated():
# Signed-in user: if they have a cookie basket too, it means
# that they have just signed in and we need to merge their cookie
# basket into their user basket, then delete the cookie.
try:
basket, __ = manager.get_or_create(owner=request.user)
except Basket.MultipleObjectsReturned:
# Not sure quite how we end up here with multiple baskets.
# We merge them and create a fresh one
old_baskets = list(manager.filter(owner=request.user))
basket = old_baskets[0]
for other_basket in old_baskets[1:]:
self.merge_baskets(basket, other_basket)
num_baskets_merged += 1
# Assign user onto basket to prevent further SQL queries when
# basket.owner is accessed.
basket.owner = request.user
if cookie_basket:
self.merge_baskets(basket, cookie_basket)
num_baskets_merged += 1
request.cookies_to_delete.append(cookie_key)
elif cookie_basket:
# Anonymous user with a basket tied to the cookie
basket = cookie_basket
else:
# Anonymous user with no basket - instantiate a new basket
# instance. No need to save yet.
basket = Basket()
# Cache basket instance for the during of this request
request._basket_cache = basket
if num_baskets_merged > 0:
messages.add_message(request, messages.WARNING,
_("We have merged a basket from a previous session. Its contents "
"might have changed."))
return basket
def merge_baskets(self, master, slave):
"""
Merge one basket into another.
This is its own method to allow it to be overridden
"""
master.merge(slave, add_quantities=False)
def get_cookie_basket(self, cookie_key, request, manager):
"""
Looks for a basket which is referenced by a cookie.
If a cookie key is found with no matching basket, then we add
it to the list to be deleted.
"""
basket = None
if cookie_key in request.COOKIES:
basket_hash = request.COOKIES[cookie_key]
try:
basket_id = Signer().unsign(basket_hash)
basket = Basket.objects.get(pk=basket_id, owner=None,
status=Basket.OPEN)
except (BadSignature, Basket.DoesNotExist):
request.cookies_to_delete.append(cookie_key)
return basket
def apply_offers_to_basket(self, request, basket):
if not basket.is_empty:
Applicator().apply(basket, request.user, request)
def get_basket_hash(self, basket_id):
return Signer().sign(basket_id)
| 39.285024 | 99 | 0.622356 |
a3fef552a3c7c0c51ab7b6c3635a9fa25c41c561 | 7,976 | py | Python | eggs/Paste-1.7.5.1-py2.7.egg/paste/modpython.py | salayhin/talkofacta | 8b5a14245dd467bb1fda75423074c4840bd69fb7 | [
"MIT"
] | 19 | 2015-05-01T19:59:03.000Z | 2021-12-09T08:03:16.000Z | eggs/Paste-1.7.5.1-py2.7.egg/paste/modpython.py | salayhin/talkofacta | 8b5a14245dd467bb1fda75423074c4840bd69fb7 | [
"MIT"
] | 1 | 2018-01-03T15:26:49.000Z | 2018-01-03T15:26:49.000Z | eggs/Paste-1.7.5.1-py2.7.egg/paste/modpython.py | salayhin/talkofacta | 8b5a14245dd467bb1fda75423074c4840bd69fb7 | [
"MIT"
] | 30 | 2015-03-25T19:40:07.000Z | 2021-05-28T22:59:26.000Z | """WSGI Paste wrapper for mod_python. Requires Python 2.2 or greater.
Example httpd.conf section for a Paste app with an ini file::
<Location />
SetHandler python-program
PythonHandler paste.modpython
PythonOption paste.ini /some/location/your/pasteconfig.ini
</Location>
Or if you want to load a WSGI application under /your/homedir in the module
``startup`` and the WSGI app is ``app``::
<Location />
SetHandler python-program
PythonHandler paste.modpython
PythonPath "['/virtual/project/directory'] + sys.path"
PythonOption wsgi.application startup::app
</Location>
If you'd like to use a virtual installation, make sure to add it in the path
like so::
<Location />
SetHandler python-program
PythonHandler paste.modpython
PythonPath "['/virtual/project/directory', '/virtual/lib/python2.4/'] + sys.path"
PythonOption paste.ini /virtual/project/directory/pasteconfig.ini
</Location>
Some WSGI implementations assume that the SCRIPT_NAME environ variable will
always be equal to "the root URL of the app"; Apache probably won't act as
you expect in that case. You can add another PythonOption directive to tell
modpython_gateway to force that behavior:
PythonOption SCRIPT_NAME /mcontrol
Some WSGI applications need to be cleaned up when Apache exits. You can
register a cleanup handler with yet another PythonOption directive:
PythonOption wsgi.cleanup module::function
The module.function will be called with no arguments on server shutdown,
once for each child process or thread.
This module highly based on Robert Brewer's, here:
http://projects.amor.org/misc/svn/modpython_gateway.py
"""
import traceback
try:
from mod_python import apache
except:
pass
from paste.deploy import loadapp
class InputWrapper(object):
def __init__(self, req):
self.req = req
def close(self):
pass
def read(self, size=-1):
return self.req.read(size)
def readline(self, size=-1):
return self.req.readline(size)
def readlines(self, hint=-1):
return self.req.readlines(hint)
def __iter__(self):
line = self.readline()
while line:
yield line
# Notice this won't prefetch the next line; it only
# gets called if the generator is resumed.
line = self.readline()
class ErrorWrapper(object):
def __init__(self, req):
self.req = req
def flush(self):
pass
def write(self, msg):
self.req.log_error(msg)
def writelines(self, seq):
self.write(''.join(seq))
bad_value = ("You must provide a PythonOption '%s', either 'on' or 'off', "
"when running a version of mod_python < 3.1")
class Handler(object):
def __init__(self, req):
self.started = False
options = req.get_options()
# Threading and forking
try:
q = apache.mpm_query
threaded = q(apache.AP_MPMQ_IS_THREADED)
forked = q(apache.AP_MPMQ_IS_FORKED)
except AttributeError:
threaded = options.get('multithread', '').lower()
if threaded == 'on':
threaded = True
elif threaded == 'off':
threaded = False
else:
raise ValueError(bad_value % "multithread")
forked = options.get('multiprocess', '').lower()
if forked == 'on':
forked = True
elif forked == 'off':
forked = False
else:
raise ValueError(bad_value % "multiprocess")
env = self.environ = dict(apache.build_cgi_env(req))
if 'SCRIPT_NAME' in options:
# Override SCRIPT_NAME and PATH_INFO if requested.
env['SCRIPT_NAME'] = options['SCRIPT_NAME']
env['PATH_INFO'] = req.uri[len(options['SCRIPT_NAME']):]
else:
env['SCRIPT_NAME'] = ''
env['PATH_INFO'] = req.uri
env['wsgi.input'] = InputWrapper(req)
env['wsgi.errors'] = ErrorWrapper(req)
env['wsgi.version'] = (1, 0)
env['wsgi.run_once'] = False
if env.get("HTTPS") in ('yes', 'on', '1'):
env['wsgi.url_scheme'] = 'https'
else:
env['wsgi.url_scheme'] = 'http'
env['wsgi.multithread'] = threaded
env['wsgi.multiprocess'] = forked
self.request = req
def run(self, application):
try:
result = application(self.environ, self.start_response)
for data in result:
self.write(data)
if not self.started:
self.request.set_content_length(0)
if hasattr(result, 'close'):
result.close()
except:
traceback.print_exc(None, self.environ['wsgi.errors'])
if not self.started:
self.request.status = 500
self.request.content_type = 'text/plain'
data = "A server error occurred. Please contact the administrator."
self.request.set_content_length(len(data))
self.request.write(data)
def start_response(self, status, headers, exc_info=None):
if exc_info:
try:
if self.started:
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None
self.request.status = int(status[:3])
for key, val in headers:
if key.lower() == 'content-length':
self.request.set_content_length(int(val))
elif key.lower() == 'content-type':
self.request.content_type = val
else:
self.request.headers_out.add(key, val)
return self.write
def write(self, data):
if not self.started:
self.started = True
self.request.write(data)
startup = None
cleanup = None
wsgiapps = {}
def handler(req):
options = req.get_options()
# Run a startup function if requested.
global startup
if 'wsgi.startup' in options and not startup:
func = options['wsgi.startup']
if func:
module_name, object_str = func.split('::', 1)
module = __import__(module_name, globals(), locals(), [''])
startup = apache.resolve_object(module, object_str)
startup(req)
# Register a cleanup function if requested.
global cleanup
if 'wsgi.cleanup' in options and not cleanup:
func = options['wsgi.cleanup']
if func:
module_name, object_str = func.split('::', 1)
module = __import__(module_name, globals(), locals(), [''])
cleanup = apache.resolve_object(module, object_str)
def cleaner(data):
cleanup()
try:
# apache.register_cleanup wasn't available until 3.1.4.
apache.register_cleanup(cleaner)
except AttributeError:
req.server.register_cleanup(req, cleaner)
# Import the wsgi 'application' callable and pass it to Handler.run
global wsgiapps
appini = options.get('paste.ini')
app = None
if appini:
if appini not in wsgiapps:
wsgiapps[appini] = loadapp("config:%s" % appini)
app = wsgiapps[appini]
# Import the wsgi 'application' callable and pass it to Handler.run
appwsgi = options.get('wsgi.application')
if appwsgi and not appini:
modname, objname = appwsgi.split('::', 1)
module = __import__(modname, globals(), locals(), [''])
app = getattr(module, objname)
Handler(req).run(app)
# status was set in Handler; always return apache.OK
return apache.OK
| 31.525692 | 89 | 0.587262 |
ad768dc759142b771f842938fbdd318ada95944a | 1,574 | py | Python | tests/artifactcli/operation/test_list.py | arcizan/artifact-cli | 1b4ddcd8bb3c32899fa385eefd128829c8cdd6e5 | [
"Apache-2.0"
] | 10 | 2015-01-11T14:43:57.000Z | 2020-05-08T06:18:30.000Z | tests/artifactcli/operation/test_list.py | arcizan/artifact-cli | 1b4ddcd8bb3c32899fa385eefd128829c8cdd6e5 | [
"Apache-2.0"
] | 34 | 2015-01-04T17:37:54.000Z | 2019-11-11T17:11:24.000Z | tests/artifactcli/operation/test_list.py | arcizan/artifact-cli | 1b4ddcd8bb3c32899fa385eefd128829c8cdd6e5 | [
"Apache-2.0"
] | 4 | 2015-01-11T07:05:33.000Z | 2021-11-17T04:26:32.000Z | import unittest
from datetime import datetime
from artifactcli.artifact import *
from artifactcli.driver import *
from artifactcli.operation import *
from artifactcli.repository import Repository
class TestListOperation(unittest.TestCase):
def test_run(self):
arts = [
Artifact(BasicInfo('com.github.mogproject', 'art-test', '0.0.1', 'jar', 1),
FileInfo('host1', 'user1', 4567890, datetime(2014, 12, 31, 9, 12, 34),
'ffffeeeeddddccccbbbbaaaa99998888'),
GitInfo('master', ['release 0.0.1'], 'mogproject', 'x@example.com',
datetime(2014, 12, 30, 8, 11, 29), 'first commit',
'111122223333444455556666777788889999aaaa')),
Artifact(BasicInfo('com.github.mogproject', 'art-test', '0.0.1', 'jar', 2),
FileInfo('host1', 'user1', 4567891, datetime(2014, 12, 31, 9, 12, 34),
'ffffeeeeddddccccbbbbaaaa99998887'),
GitInfo('master', ['release 0.0.1'], 'mogproject', 'x@example.com',
datetime(2014, 12, 30, 8, 11, 29), 'first commit',
'111122223333444455556666777788889999aaaa')),
]
r = Repository(MockDriver(), 'com.github.mogproject')
r.upload('/path/to/art-test-0.0.1.jar', arts[0])
r.upload('/path/to/art-test-0.0.1.jar', arts[1])
r.artifacts = {}
rc = ListOperation('com.github.mogproject', []).run(r)
self.assertEqual(rc, 0)
| 46.294118 | 91 | 0.559085 |
2c759063e4527468359fa2020243efc55c5b071b | 10,281 | py | Python | tests_obsolete/extension/pipeline_/multi_output/test_pipeline_multi_output.py | akmaru/veriloggen | 74f998139e8cf613f7703fa4cffd571bbf069bbc | [
"Apache-2.0"
] | 232 | 2015-09-01T16:07:48.000Z | 2022-03-28T14:53:28.000Z | tests_obsolete/extension/pipeline_/multi_output/test_pipeline_multi_output.py | akmaru/veriloggen | 74f998139e8cf613f7703fa4cffd571bbf069bbc | [
"Apache-2.0"
] | 34 | 2015-08-21T09:13:03.000Z | 2022-03-21T23:52:44.000Z | tests_obsolete/extension/pipeline_/multi_output/test_pipeline_multi_output.py | akmaru/veriloggen | 74f998139e8cf613f7703fa4cffd571bbf069bbc | [
"Apache-2.0"
] | 46 | 2015-09-24T14:39:57.000Z | 2022-02-23T21:59:56.000Z | from __future__ import absolute_import
from __future__ import print_function
import veriloggen
import pipeline_multi_output
expected_verilog = """
module test;
reg CLK;
reg RST;
reg [32-1:0] x;
reg vx;
wire rx;
reg [32-1:0] y;
reg vy;
wire ry;
wire [32-1:0] z;
wire vz;
reg rz;
wire [32-1:0] a;
wire va;
reg ra;
blinkled
uut
(
.CLK(CLK),
.RST(RST),
.x(x),
.vx(vx),
.rx(rx),
.y(y),
.vy(vy),
.ry(ry),
.z(z),
.vz(vz),
.rz(rz),
.a(a),
.va(va),
.ra(ra)
);
reg reset_done;
initial begin
$dumpfile("uut.vcd");
$dumpvars(0, uut);
end
initial begin
CLK = 0;
forever begin
#5 CLK = !CLK;
end
end
initial begin
RST = 0;
reset_done = 0;
x = 0;
y = 0;
vx = 0;
vy = 0;
rz = 0;
ra = 0;
#100;
RST = 1;
#100;
RST = 0;
#1000;
reset_done = 1;
@(posedge CLK);
#1;
#10000;
$finish;
end
reg [32-1:0] _tmp_0;
reg [32-1:0] _tmp_1;
reg [32-1:0] _tmp_2;
reg [32-1:0] _tmp_3;
reg [32-1:0] xfsm;
localparam xfsm_init = 0;
localparam xfsm_1 = 1;
localparam xfsm_2 = 2;
always @(posedge CLK) begin
if(RST) begin
xfsm <= xfsm_init;
_tmp_0 <= 0;
end else begin
case(xfsm)
xfsm_init: begin
vx <= 0;
if(reset_done) begin
xfsm <= xfsm_1;
end
end
xfsm_1: begin
vx <= 1;
if(rx) begin
x <= x + 1;
end
if(rx) begin
_tmp_0 <= _tmp_0 + 1;
end
if((_tmp_0 == 10) && rx) begin
xfsm <= xfsm_2;
end
end
xfsm_2: begin
vx <= 0;
end
endcase
end
end
reg [32-1:0] yfsm;
localparam yfsm_init = 0;
localparam yfsm_1 = 1;
localparam yfsm_2 = 2;
localparam yfsm_3 = 3;
localparam yfsm_4 = 4;
localparam yfsm_5 = 5;
localparam yfsm_6 = 6;
localparam yfsm_7 = 7;
localparam yfsm_8 = 8;
localparam yfsm_9 = 9;
localparam yfsm_10 = 10;
localparam yfsm_11 = 11;
localparam yfsm_12 = 12;
localparam yfsm_13 = 13;
always @(posedge CLK) begin
if(RST) begin
yfsm <= yfsm_init;
_tmp_1 <= 0;
end else begin
case(yfsm)
yfsm_init: begin
vy <= 0;
if(reset_done) begin
yfsm <= yfsm_1;
end
end
yfsm_1: begin
yfsm <= yfsm_2;
end
yfsm_2: begin
yfsm <= yfsm_3;
end
yfsm_3: begin
yfsm <= yfsm_4;
end
yfsm_4: begin
yfsm <= yfsm_5;
end
yfsm_5: begin
yfsm <= yfsm_6;
end
yfsm_6: begin
yfsm <= yfsm_7;
end
yfsm_7: begin
yfsm <= yfsm_8;
end
yfsm_8: begin
yfsm <= yfsm_9;
end
yfsm_9: begin
yfsm <= yfsm_10;
end
yfsm_10: begin
yfsm <= yfsm_11;
end
yfsm_11: begin
vy <= 1;
yfsm <= yfsm_12;
end
yfsm_12: begin
if(ry) begin
y <= y + 2;
end
if(ry) begin
_tmp_1 <= _tmp_1 + 1;
end
if((_tmp_1 == 10) && ry) begin
yfsm <= yfsm_13;
end
end
yfsm_13: begin
vy <= 0;
end
endcase
end
end
reg [32-1:0] zfsm;
localparam zfsm_init = 0;
localparam zfsm_1 = 1;
localparam zfsm_2 = 2;
localparam zfsm_3 = 3;
localparam zfsm_4 = 4;
localparam zfsm_5 = 5;
localparam zfsm_6 = 6;
localparam zfsm_7 = 7;
localparam zfsm_8 = 8;
localparam zfsm_9 = 9;
localparam zfsm_10 = 10;
localparam zfsm_11 = 11;
localparam zfsm_12 = 12;
localparam zfsm_13 = 13;
always @(posedge CLK) begin
if(RST) begin
zfsm <= zfsm_init;
end else begin
case(zfsm)
zfsm_init: begin
rz <= 0;
if(reset_done) begin
zfsm <= zfsm_1;
end
end
zfsm_1: begin
zfsm <= zfsm_2;
end
zfsm_2: begin
if(vz) begin
rz <= 1;
end
if(vz) begin
zfsm <= zfsm_3;
end
end
zfsm_3: begin
rz <= 0;
zfsm <= zfsm_4;
end
zfsm_4: begin
rz <= 0;
zfsm <= zfsm_5;
end
zfsm_5: begin
rz <= 0;
zfsm <= zfsm_6;
end
zfsm_6: begin
rz <= 0;
zfsm <= zfsm_7;
end
zfsm_7: begin
rz <= 0;
zfsm <= zfsm_8;
end
zfsm_8: begin
rz <= 0;
zfsm <= zfsm_9;
end
zfsm_9: begin
rz <= 0;
zfsm <= zfsm_10;
end
zfsm_10: begin
rz <= 0;
zfsm <= zfsm_11;
end
zfsm_11: begin
rz <= 0;
zfsm <= zfsm_12;
end
zfsm_12: begin
rz <= 0;
zfsm <= zfsm_13;
end
zfsm_13: begin
zfsm <= zfsm_2;
end
endcase
end
end
reg [32-1:0] afsm;
localparam afsm_init = 0;
localparam afsm_1 = 1;
localparam afsm_2 = 2;
localparam afsm_3 = 3;
localparam afsm_4 = 4;
localparam afsm_5 = 5;
localparam afsm_6 = 6;
localparam afsm_7 = 7;
localparam afsm_8 = 8;
localparam afsm_9 = 9;
localparam afsm_10 = 10;
localparam afsm_11 = 11;
localparam afsm_12 = 12;
localparam afsm_13 = 13;
localparam afsm_14 = 14;
localparam afsm_15 = 15;
localparam afsm_16 = 16;
localparam afsm_17 = 17;
localparam afsm_18 = 18;
localparam afsm_19 = 19;
localparam afsm_20 = 20;
localparam afsm_21 = 21;
localparam afsm_22 = 22;
localparam afsm_23 = 23;
always @(posedge CLK) begin
if(RST) begin
afsm <= afsm_init;
end else begin
case(afsm)
afsm_init: begin
ra <= 0;
if(reset_done) begin
afsm <= afsm_1;
end
end
afsm_1: begin
afsm <= afsm_2;
end
afsm_2: begin
if(va) begin
ra <= 1;
end
if(va) begin
afsm <= afsm_3;
end
end
afsm_3: begin
ra <= 0;
afsm <= afsm_4;
end
afsm_4: begin
ra <= 0;
afsm <= afsm_5;
end
afsm_5: begin
ra <= 0;
afsm <= afsm_6;
end
afsm_6: begin
ra <= 0;
afsm <= afsm_7;
end
afsm_7: begin
ra <= 0;
afsm <= afsm_8;
end
afsm_8: begin
ra <= 0;
afsm <= afsm_9;
end
afsm_9: begin
ra <= 0;
afsm <= afsm_10;
end
afsm_10: begin
ra <= 0;
afsm <= afsm_11;
end
afsm_11: begin
ra <= 0;
afsm <= afsm_12;
end
afsm_12: begin
ra <= 0;
afsm <= afsm_13;
end
afsm_13: begin
ra <= 0;
afsm <= afsm_14;
end
afsm_14: begin
ra <= 0;
afsm <= afsm_15;
end
afsm_15: begin
ra <= 0;
afsm <= afsm_16;
end
afsm_16: begin
ra <= 0;
afsm <= afsm_17;
end
afsm_17: begin
ra <= 0;
afsm <= afsm_18;
end
afsm_18: begin
ra <= 0;
afsm <= afsm_19;
end
afsm_19: begin
ra <= 0;
afsm <= afsm_20;
end
afsm_20: begin
ra <= 0;
afsm <= afsm_21;
end
afsm_21: begin
ra <= 0;
afsm <= afsm_22;
end
afsm_22: begin
ra <= 0;
afsm <= afsm_23;
end
afsm_23: begin
afsm <= afsm_2;
end
endcase
end
end
always @(posedge CLK) begin
if(reset_done) begin
if(vx && rx) begin
$display("x=%d", x);
end
if(vy && ry) begin
$display("y=%d", y);
end
if(vz && rz) begin
$display("z=%d", z);
end
if(va && ra) begin
$display("a=%d", a);
end
end
end
endmodule
module blinkled
(
input CLK,
input RST,
input [32-1:0] x,
input vx,
output rx,
input [32-1:0] y,
input vy,
output ry,
output [32-1:0] z,
output vz,
input rz,
output [32-1:0] a,
output va,
input ra
);
reg [32-1:0] _df_data_0;
reg _df_valid_0;
wire _df_ready_0;
reg [32-1:0] _df_data_1;
reg _df_valid_1;
wire _df_ready_1;
assign ry = (_df_ready_0 || !_df_valid_0) && (vx && vy) && ((_df_ready_1 || !_df_valid_1) && (vy && vx));
assign rx = (_df_ready_0 || !_df_valid_0) && (vx && vy) && ((_df_ready_1 || !_df_valid_1) && (vy && vx));
assign z = _df_data_0;
assign vz = _df_valid_0;
assign _df_ready_0 = rz;
assign a = _df_data_1;
assign va = _df_valid_1;
assign _df_ready_1 = ra;
always @(posedge CLK) begin
if(RST) begin
_df_data_0 <= 0;
_df_valid_0 <= 0;
_df_data_1 <= 0;
_df_valid_1 <= 0;
end else begin
if(vx && vy && (rx && ry) && (_df_ready_0 || !_df_valid_0)) begin
_df_data_0 <= x + y;
end
if(_df_valid_0 && _df_ready_0) begin
_df_valid_0 <= 0;
end
if(rx && ry && (_df_ready_0 || !_df_valid_0)) begin
_df_valid_0 <= vx && vy;
end
if(vy && vx && (ry && rx) && (_df_ready_1 || !_df_valid_1)) begin
_df_data_1 <= y - x;
end
if(_df_valid_1 && _df_ready_1) begin
_df_valid_1 <= 0;
end
if(ry && rx && (_df_ready_1 || !_df_valid_1)) begin
_df_valid_1 <= vy && vx;
end
end
end
endmodule
"""
def test():
veriloggen.reset()
test_module = pipeline_multi_output.mkTest()
code = test_module.to_verilog()
from pyverilog.vparser.parser import VerilogParser
from pyverilog.ast_code_generator.codegen import ASTCodeGenerator
parser = VerilogParser()
expected_ast = parser.parse(expected_verilog)
codegen = ASTCodeGenerator()
expected_code = codegen.visit(expected_ast)
assert(expected_code == code)
| 19.809249 | 107 | 0.481276 |
8d2c4c7fff4b901e1ebbf43a20958cbdc097198c | 6,102 | py | Python | utils/dataset_DCL.py | LichenYang-Jeffrey/DCL-with-Efficient-B7 | 84940c96a8c7926c630a7a6d5bfd5c6e52a57c2e | [
"MIT"
] | 4 | 2020-05-28T08:47:39.000Z | 2020-11-16T08:23:26.000Z | utils/dataset_DCL.py | LichenYang-Jeffrey/DCL-with-Efficient-B7 | 84940c96a8c7926c630a7a6d5bfd5c6e52a57c2e | [
"MIT"
] | null | null | null | utils/dataset_DCL.py | LichenYang-Jeffrey/DCL-with-Efficient-B7 | 84940c96a8c7926c630a7a6d5bfd5c6e52a57c2e | [
"MIT"
] | null | null | null | # coding=utf8
from __future__ import division
import os
import torch
import torch.utils.data as data
import pandas
import random
import numpy as np
import PIL.Image as Image
from PIL import ImageStat
import pdb
def random_sample(img_names, labels):
anno_dict = {}
img_list = []
anno_list = []
for img, anno in zip(img_names, labels):
if not anno in anno_dict:
anno_dict[anno] = [img]
else:
anno_dict[anno].append(img)
for anno in anno_dict.keys():
anno_len = len(anno_dict[anno])
fetch_keys = random.sample(list(range(anno_len)), anno_len//10)
img_list.extend([anno_dict[anno][x] for x in fetch_keys])
anno_list.extend([anno for x in fetch_keys])
return img_list, anno_list
class dataset(data.Dataset):
def __init__(self, Config, anno, swap_size=[7,7], common_aug=None, swap=None, totensor=None, train=False, train_val=False, test=False):
self.root_path = Config.rawdata_root
self.numcls = Config.numcls
self.dataset = Config.dataset
self.use_cls_2 = Config.cls_2
self.use_cls_mul = Config.cls_2xmul
if isinstance(anno, pandas.core.frame.DataFrame):
self.paths = anno['ImageName'].tolist()
self.labels = anno['label'].tolist()
elif isinstance(anno, dict):
self.paths = anno['img_name']
self.labels = anno['label']
if train_val:
self.paths, self.labels = random_sample(self.paths, self.labels)
self.common_aug = common_aug
self.swap = swap
self.totensor = totensor
self.cfg = Config
self.train = train
self.swap_size = swap_size
self.test = test
def __len__(self):
return len(self.paths)
def __getitem__(self, item):
img_path = os.path.join(self.root_path, self.paths[item])
img = self.pil_loader(img_path)
if self.test:
img = self.totensor(img)
label = self.labels[item]
return img, label, self.paths[item]
img_unswap = self.common_aug(img) if not self.common_aug is None else img
image_unswap_list = self.crop_image(img_unswap, self.swap_size)
swap_range = self.swap_size[0] * self.swap_size[1]
swap_law1 = [(i-(swap_range//2))/swap_range for i in range(swap_range)]
if self.train:
img_swap = self.swap(img_unswap)
image_swap_list = self.crop_image(img_swap, self.swap_size)
unswap_stats = [sum(ImageStat.Stat(im).mean) for im in image_unswap_list]
swap_stats = [sum(ImageStat.Stat(im).mean) for im in image_swap_list]
swap_law2 = []
for swap_im in swap_stats:
distance = [abs(swap_im - unswap_im) for unswap_im in unswap_stats]
index = distance.index(min(distance))
swap_law2.append((index-(swap_range//2))/swap_range)
img_swap = self.totensor(img_swap)
label = self.labels[item]
if self.use_cls_mul:
label_swap = label + self.numcls
if self.use_cls_2:
label_swap = -1
img_unswap = self.totensor(img_unswap)
return img_unswap, img_swap, label, label_swap, swap_law1, swap_law2, self.paths[item]
else:
label = self.labels[item]
swap_law2 = [(i-(swap_range//2))/swap_range for i in range(swap_range)]
label_swap = label
img_unswap = self.totensor(img_unswap)
return img_unswap, label, label_swap, swap_law1, swap_law2, self.paths[item]
def pil_loader(self,imgpath):
with open(imgpath, 'rb') as f:
with Image.open(f) as img:
return img.convert('RGB')
def crop_image(self, image, cropnum):
width, high = image.size
crop_x = [int((width / cropnum[0]) * i) for i in range(cropnum[0] + 1)]
crop_y = [int((high / cropnum[1]) * i) for i in range(cropnum[1] + 1)]
im_list = []
for j in range(len(crop_y) - 1):
for i in range(len(crop_x) - 1):
im_list.append(image.crop((crop_x[i], crop_y[j], min(crop_x[i + 1], width), min(crop_y[j + 1], high))))
return im_list
def get_weighted_sampler(self):
img_nums = len(self.labels)
weights = [self.labels.count(x) for x in range(self.numcls)]
return torch.utils.data.sampler.WeightedRandomSampler(weights, num_samples=img_nums)
def collate_fn4train(batch):
imgs = []
label = []
label_swap = []
law_swap = []
img_name = []
for sample in batch:
imgs.append(sample[0])
imgs.append(sample[1])
label.append(sample[2])
label.append(sample[2])
label_swap.append(np.append(sample[2],np.array([0]*4)))
label_swap.append(sample[3])
law_swap.append(sample[4])
law_swap.append(sample[5])
img_name.append(sample[-1])
return torch.stack(imgs, 0), label, label_swap, law_swap, img_name
def collate_fn4val(batch):
imgs = []
label = []
label_swap = []
law_swap = []
img_name = []
for sample in batch:
imgs.append(sample[0])
label.append(sample[1])
if sample[3] == -1:
label_swap.append(1)
else:
label_swap.append(sample[2])
law_swap.append(sample[3])
img_name.append(sample[-1])
return torch.stack(imgs, 0), label, label_swap, law_swap, img_name
def collate_fn4backbone(batch):
imgs = []
label = []
img_name = []
for sample in batch:
imgs.append(sample[0])
if len(sample) == 7:
label.append(sample[2])
else:
label.append(sample[1])
img_name.append(sample[-1])
return torch.stack(imgs, 0), label, img_name
def collate_fn4test(batch):
imgs = []
label = []
img_name = []
for sample in batch:
imgs.append(sample[0])
label.append(sample[1])
img_name.append(sample[-1])
return torch.stack(imgs, 0), label, img_name
| 34.089385 | 139 | 0.603573 |
949ede9cabc23cbb3bac987a4cfc2a30f47bdb68 | 1,540 | py | Python | r3c0n/engines/virustotal.py | markgacoka/r3c0n | ac64614d10d176b9de2170ce8758a6aa75d75f54 | [
"MIT"
] | 4 | 2022-03-06T16:42:23.000Z | 2022-03-09T02:29:08.000Z | r3c0n/engines/virustotal.py | markgacoka/r3c0n | ac64614d10d176b9de2170ce8758a6aa75d75f54 | [
"MIT"
] | null | null | null | r3c0n/engines/virustotal.py | markgacoka/r3c0n | ac64614d10d176b9de2170ce8758a6aa75d75f54 | [
"MIT"
] | 1 | 2022-03-07T03:37:51.000Z | 2022-03-07T03:37:51.000Z | from r3c0nutils.user_agent import GET_UA
import requests
class VirusTotal:
def __init__(self, domain):
self.domain = domain
self.subdomain_lst = []
def virustotal_script(self, domain):
VT_API_KEY = ""
if VT_API_KEY == "":
print("No VirusTotal API key configured", "red")
return self.subdomain_lst
else:
parameters = {"domain": domain, "apikey": VT_API_KEY}
headers = {"User-Agent": GET_UA()}
try:
res = requests.get("https://www.virustotal.com/vtapi/v2/domain/report", params=parameters, headers=headers, timeout=10)
res.raise_for_status()
response_dict = res.json()
if "subdomains" in response_dict:
for sd in response_dict["subdomains"]:
self.subdomain_lst.append(sd)
except requests.exceptions.RequestException as err:
raise Exception("Request Exception:", err)
except requests.exceptions.HTTPError as errh:
raise Exception ("HTTP Error:", errh)
except requests.exceptions.ConnectionError as errc:
raise Exception("Connection Error:", errc)
except requests.exceptions.Timeout as errt:
raise Exception("Timeout Error:", errt)
else:
return self.subdomain_lst
def subdomains(self):
subdomain_result = self.virustotal_script(self.domain)
return subdomain_result | 40.526316 | 135 | 0.592857 |
014345399a4480b4e10c8d289234af1a57629612 | 8,633 | py | Python | docs/conf.py | fakedrtom/cancer_gemini | e2dd5cf056004409b58f1e2d98589f847a7e0e2f | [
"MIT"
] | 221 | 2015-01-13T20:04:50.000Z | 2022-02-02T10:41:02.000Z | docs/conf.py | fakedrtom/cancer_gemini | e2dd5cf056004409b58f1e2d98589f847a7e0e2f | [
"MIT"
] | 441 | 2015-01-12T20:00:41.000Z | 2022-01-21T08:58:41.000Z | docs/conf.py | fakedrtom/cancer_gemini | e2dd5cf056004409b58f1e2d98589f847a7e0e2f | [
"MIT"
] | 111 | 2015-01-27T10:53:47.000Z | 2021-12-22T14:21:19.000Z | # -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../'))
# from mpld3
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join(os.path.abspath('../'), "gemini", "version.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
version = get_version()
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo',
'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'gemini'
copyright = u'2012,2013,2014,2015'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = version
# The full version, including alpha/beta/rc tags.
release = version
print version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'rtd'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["themes"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = project + " v" + release
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = 'gemini.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'gemini.png'
html_style = 'labibi.css'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': ['sidebar-intro.html', 'sourcelink.html', 'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'gemini-docs'
# Google analytics
# googleanalytics_id = "UA-24167610-15"
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'gemini.tex', u'gemini Documentation', u'Quinlan lab @ UVa', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'gemini', u'gemini Documentation', [u'UVa'], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
class Mock(object):
def __init__(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
return Mock()
@classmethod
def __getattr__(cls, name):
if name in ('__file__', '__path__'):
return '/dev/null'
elif name[0] == name[0].upper():
return type(name, (), {})
else:
return Mock()
MOCK_MODULES = ['numpy', 'matplotlib', 'matplotlib.pyplot',
'inheritance',
'matplotlib.sphinxext', 'matplotlib.sphinxext.plot_directive']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
| 32.454887 | 104 | 0.699062 |
91db43f3b84a9f3581c5d4311378cd4fc20fd8b7 | 591 | py | Python | Chapter 3/sequence-tuples.py | JoeBugajski/python-examples | c32472900a68aca43d699c610f6f50638b9ddb98 | [
"MIT"
] | null | null | null | Chapter 3/sequence-tuples.py | JoeBugajski/python-examples | c32472900a68aca43d699c610f6f50638b9ddb98 | [
"MIT"
] | null | null | null | Chapter 3/sequence-tuples.py | JoeBugajski/python-examples | c32472900a68aca43d699c610f6f50638b9ddb98 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2009-2017 BHG http://bw.org/
# unlike lists, tuples are not mutable. They are bound in parentheses like this:
x = ( 1, 2, 3, 4, 5 )
# if you uncomment this next line and run the file:
# x[2] = 69
# you can see the error you'd get for trying to
# alter an immutable tuple
# Tuples are arguably favorable to lists due to
# the immutability property. In general, it can
# be good to start with tuples by default, and
# only swith to list brackets if you have a spe-
# cific reason you want to mutate values.
for i in x:
print('i is {}'.format(i))
| 28.142857 | 80 | 0.698816 |
add98c61650b845bd60146215b0352b2338bfde4 | 67,425 | py | Python | tests/unit/modules/test_debian_ip.py | fake-name/salt | d8f04936e4407f51946e32e8166159778f6c31a5 | [
"Apache-2.0"
] | null | null | null | tests/unit/modules/test_debian_ip.py | fake-name/salt | d8f04936e4407f51946e32e8166159778f6c31a5 | [
"Apache-2.0"
] | null | null | null | tests/unit/modules/test_debian_ip.py | fake-name/salt | d8f04936e4407f51946e32e8166159778f6c31a5 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
"""
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
from collections import OrderedDict as odict
import tempfile
import os
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase, skipIf
# Import Salt Libs
import salt.modules.debian_ip as debian_ip
import salt.utils.files
import salt.utils.platform
# IPv4-only interface; single address
{'iface_name': 'eth1', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'enable_ipv6': False,
'noifupdown': True,
},
'get_interface': odict([('eth1', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '192.168.4.9'),
('netmask', '255.255.255.0'),
('gateway', '192.168.4.1'),
])),
]))]))]),
'return': [
'auto eth1\n',
'iface eth1 inet static\n',
' address 192.168.4.9\n',
' netmask 255.255.255.0\n',
' gateway 192.168.4.1\n',
'\n']},
# IPv6-only; single address
{'iface_name': 'eth2', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:beef::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:beef::1',
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('eth2', odict([('enabled', True), ('data', odict([
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:beef::3'),
('netmask', 64),
('gateway', '2001:db8:dead:beef::1'),
])),
]))]))]),
'return': [
'auto eth2\n',
'iface eth2 inet6 static\n',
' address 2001:db8:dead:beef::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:beef::1\n',
'\n']},
# IPv6-only; multiple addrs; no gw; first addr from ipv6addr
{'iface_name': 'eth3', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:beef::5/64',
'ipv6ipaddrs': [
'2001:db8:dead:beef::7/64',
'2001:db8:dead:beef::8/64',
'2001:db8:dead:beef::9/64'],
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('eth3', odict([('enabled', True), ('data', odict([
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:beef::5/64'),
('addresses', [
'2001:db8:dead:beef::7/64',
'2001:db8:dead:beef::8/64',
'2001:db8:dead:beef::9/64',
]),
])),
]))]))]),
'return': [
'auto eth3\n',
'iface eth3 inet6 static\n',
' address 2001:db8:dead:beef::5/64\n',
' address 2001:db8:dead:beef::7/64\n',
' address 2001:db8:dead:beef::8/64\n',
' address 2001:db8:dead:beef::9/64\n',
'\n']},
# IPv6-only; multiple addresses
{'iface_name': 'eth4', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'ipv6proto': 'static',
'ipv6ipaddrs': [
'2001:db8:dead:beef::5/64',
'2001:db8:dead:beef::7/64',
'2001:db8:dead:beef::8/64',
'2001:db8:dead:beef::9/64'],
'ipv6gateway': '2001:db8:dead:beef::1',
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('eth4', odict([('enabled', True), ('data', odict([
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:beef::5/64'),
('addresses', [
'2001:db8:dead:beef::7/64',
'2001:db8:dead:beef::8/64',
'2001:db8:dead:beef::9/64',
]),
('gateway', '2001:db8:dead:beef::1'),
])),
]))]))]),
'return': [
'auto eth4\n',
'iface eth4 inet6 static\n',
' address 2001:db8:dead:beef::5/64\n',
' address 2001:db8:dead:beef::7/64\n',
' address 2001:db8:dead:beef::8/64\n',
' address 2001:db8:dead:beef::9/64\n',
' gateway 2001:db8:dead:beef::1\n',
'\n']},
# IPv4 and IPv6 settings with v4 disabled
{'iface_name': 'eth5', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:beef::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:beef::1',
'enable_ipv4': False,
'noifupdown': True,
},
'get_interface': odict([('eth5', odict([('enabled', True), ('data', odict([
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:beef::3'),
('netmask', 64),
('gateway', '2001:db8:dead:beef::1'),
])),
]))]))]),
'return': [
'auto eth5\n',
'iface eth5 inet6 static\n',
' address 2001:db8:dead:beef::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:beef::1\n',
'\n']},
# IPv4 and IPv6 settings with v6 disabled
{'iface_name': 'eth6', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:beef::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:beef::1',
'enable_ipv6': False,
'noifupdown': True,
},
'get_interface': odict([('eth6', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '192.168.4.9'),
('netmask', '255.255.255.0'),
('gateway', '192.168.4.1'),
])),
]))]))]),
'return': [
'auto eth6\n',
'iface eth6 inet static\n',
' address 192.168.4.9\n',
' netmask 255.255.255.0\n',
' gateway 192.168.4.1\n',
'\n']},
# IPv4 and IPv6; shared/overridden settings
{'iface_name': 'eth7', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:beef::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:beef::1',
'ttl': '18', # shared
'ipv6ttl': '15', # overridden for v6
'mtu': '1480', # shared
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('eth7', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '192.168.4.9'),
('netmask', '255.255.255.0'),
('gateway', '192.168.4.1'),
('ttl', 18),
('mtu', 1480),
])),
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:beef::3'),
('netmask', 64),
('gateway', '2001:db8:dead:beef::1'),
('ttl', 15),
('mtu', 1480),
])),
]))]))]),
'return': [
'auto eth7\n',
'iface eth7 inet static\n',
' address 192.168.4.9\n',
' netmask 255.255.255.0\n',
' gateway 192.168.4.1\n',
' ttl 18\n',
' mtu 1480\n',
'iface eth7 inet6 static\n',
' address 2001:db8:dead:beef::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:beef::1\n',
' ttl 15\n',
' mtu 1480\n',
'\n']},
# Slave iface
{'iface_name': 'eth8', 'iface_type': 'slave', 'enabled': True,
'build_interface': {
'master': 'bond0',
'noifupdown': True,
},
'get_interface': odict([('eth8', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'manual'),
('filename', None),
('bonding', odict([
('master', 'bond0'),
])),
('bonding_keys', ['master']),
])),
]))]))]),
'return': [
'auto eth8\n',
'iface eth8 inet manual\n',
' bond-master bond0\n',
'\n']},
# Bond; with address IPv4 and IPv6 address; slaves as string
{'iface_name': 'bond9', 'iface_type': 'bond', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '10.1.0.14',
'netmask': '255.255.255.0',
'gateway': '10.1.0.1',
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:c0::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:c0::1',
'mode': '802.3ad',
'slaves': 'eth4 eth5',
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('bond9', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '10.1.0.14'),
('netmask', '255.255.255.0'),
('gateway', '10.1.0.1'),
('bonding', odict([
('ad_select', '0'),
('downdelay', '200'),
('lacp_rate', '0'),
('miimon', '100'),
('mode', '4'),
('slaves', 'eth4 eth5'),
('updelay', '0'),
('use_carrier', 'on'),
])),
('bonding_keys', [
'ad_select',
'downdelay',
'lacp_rate',
'miimon',
'mode',
'slaves',
'updelay',
'use_carrier',
]),
])),
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:c0::3'),
('netmask', 64),
('gateway', '2001:db8:dead:c0::1'),
('bonding', odict([
('ad_select', '0'),
('downdelay', '200'),
('lacp_rate', '0'),
('miimon', '100'),
('mode', '4'),
('slaves', 'eth4 eth5'),
('updelay', '0'),
('use_carrier', 'on'),
])),
('bonding_keys', [
'ad_select',
'downdelay',
'lacp_rate',
'miimon',
'mode',
'slaves',
'updelay',
'use_carrier',
]),
])),
]))]))]),
'return': [
'auto bond9\n',
'iface bond9 inet static\n',
' address 10.1.0.14\n',
' netmask 255.255.255.0\n',
' gateway 10.1.0.1\n',
' bond-ad_select 0\n',
' bond-downdelay 200\n',
' bond-lacp_rate 0\n',
' bond-miimon 100\n',
' bond-mode 4\n',
' bond-slaves eth4 eth5\n',
' bond-updelay 0\n',
' bond-use_carrier on\n',
'iface bond9 inet6 static\n',
' address 2001:db8:dead:c0::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:c0::1\n',
' bond-ad_select 0\n',
' bond-downdelay 200\n',
' bond-lacp_rate 0\n',
' bond-miimon 100\n',
' bond-mode 4\n',
' bond-slaves eth4 eth5\n',
' bond-updelay 0\n',
' bond-use_carrier on\n',
'\n']},
# Bond; with address IPv4 and IPv6 address; slaves as list
{'iface_name': 'bond10', 'iface_type': 'bond', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '10.1.0.14',
'netmask': '255.255.255.0',
'gateway': '10.1.0.1',
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:c0::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:c0::1',
'mode': '802.3ad',
'slaves': ['eth4', 'eth5'],
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('bond10', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '10.1.0.14'),
('netmask', '255.255.255.0'),
('gateway', '10.1.0.1'),
('bonding', odict([
('ad_select', '0'),
('downdelay', '200'),
('lacp_rate', '0'),
('miimon', '100'),
('mode', '4'),
('slaves', 'eth4 eth5'),
('updelay', '0'),
('use_carrier', 'on'),
])),
('bonding_keys', [
'ad_select',
'downdelay',
'lacp_rate',
'miimon',
'mode',
'slaves',
'updelay',
'use_carrier',
]),
])),
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:c0::3'),
('netmask', 64),
('gateway', '2001:db8:dead:c0::1'),
('bonding', odict([
('ad_select', '0'),
('downdelay', '200'),
('lacp_rate', '0'),
('miimon', '100'),
('mode', '4'),
('slaves', 'eth4 eth5'),
('updelay', '0'),
('use_carrier', 'on'),
])),
('bonding_keys', [
'ad_select',
'downdelay',
'lacp_rate',
'miimon',
'mode',
'slaves',
'updelay',
'use_carrier',
]),
])),
]))]))]),
'return': [
'auto bond10\n',
'iface bond10 inet static\n',
' address 10.1.0.14\n',
' netmask 255.255.255.0\n',
' gateway 10.1.0.1\n',
' bond-ad_select 0\n',
' bond-downdelay 200\n',
' bond-lacp_rate 0\n',
' bond-miimon 100\n',
' bond-mode 4\n',
' bond-slaves eth4 eth5\n',
' bond-updelay 0\n',
' bond-use_carrier on\n',
'iface bond10 inet6 static\n',
' address 2001:db8:dead:c0::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:c0::1\n',
' bond-ad_select 0\n',
' bond-downdelay 200\n',
' bond-lacp_rate 0\n',
' bond-miimon 100\n',
' bond-mode 4\n',
' bond-slaves eth4 eth5\n',
' bond-updelay 0\n',
' bond-use_carrier on\n',
'\n']},
# Bond VLAN; with IPv4 address
{'iface_name': 'bond0.11', 'iface_type': 'vlan', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '10.7.0.8',
'netmask': '255.255.255.0',
'gateway': '10.7.0.1',
'slaves': 'eth6 eth7',
'mode': '802.3ad',
'enable_ipv6': False,
'noifupdown': True,
},
'get_interface': odict([('bond0.11', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('vlan_raw_device', 'bond1'),
('address', '10.7.0.8'),
('netmask', '255.255.255.0'),
('gateway', '10.7.0.1'),
('mode', '802.3ad'),
])),
]))]))]),
'return': [
'auto bond0.11\n',
'iface bond0.11 inet static\n',
' vlan-raw-device bond1\n',
' address 10.7.0.8\n',
' netmask 255.255.255.0\n',
' gateway 10.7.0.1\n',
' mode 802.3ad\n',
'\n']},
# Bond; without address
{'iface_name': 'bond0.12', 'iface_type': 'vlan', 'enabled': True,
'build_interface': {
'proto': 'static',
'slaves': 'eth6 eth7',
'mode': '802.3ad',
'enable_ipv6': False,
'noifupdown': True,
},
'get_interface': odict([('bond0.12', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('vlan_raw_device', 'bond1'),
('mode', '802.3ad'),
])),
]))]))]),
'return': [
'auto bond0.12\n',
'iface bond0.12 inet static\n',
' vlan-raw-device bond1\n',
' mode 802.3ad\n',
'\n']},
# Big pile of interface data for unit tests
# To skip, search for 'DebianIpTestCase'
test_interfaces = [
# Structure
#{'iface_name': 'ethX', 'iface_type': 'eth', 'enabled': True,
# 'skip_test': bool(), # True to disable this test
# 'build_interface': dict(), # data read from sls
# 'get_interface(): OrderedDict(), # data read from interfaces file
# 'return': list()}, # jinja-rendered data
# IPv4-only interface; single address
{'iface_name': 'eth1', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'enable_ipv6': False,
'noifupdown': True,
},
'get_interface': odict([('eth1', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '192.168.4.9'),
('netmask', '255.255.255.0'),
('gateway', '192.168.4.1'),
])),
]))]))]),
'return': [
'auto eth1\n',
'iface eth1 inet static\n',
' address 192.168.4.9\n',
' netmask 255.255.255.0\n',
' gateway 192.168.4.1\n',
'\n']},
# IPv6-only; single address
{'iface_name': 'eth2', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:beef::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:beef::1',
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('eth2', odict([('enabled', True), ('data', odict([
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:beef::3'),
('netmask', 64),
('gateway', '2001:db8:dead:beef::1'),
])),
]))]))]),
'return': [
'auto eth2\n',
'iface eth2 inet6 static\n',
' address 2001:db8:dead:beef::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:beef::1\n',
'\n']},
# IPv6-only; multiple addrs; no gw; first addr from ipv6addr
{'iface_name': 'eth3', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:beef::5/64',
'ipv6ipaddrs': [
'2001:db8:dead:beef::7/64',
'2001:db8:dead:beef::8/64',
'2001:db8:dead:beef::9/64'],
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('eth3', odict([('enabled', True), ('data', odict([
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:beef::5/64'),
('addresses', [
'2001:db8:dead:beef::7/64',
'2001:db8:dead:beef::8/64',
'2001:db8:dead:beef::9/64',
]),
])),
]))]))]),
'return': [
'auto eth3\n',
'iface eth3 inet6 static\n',
' address 2001:db8:dead:beef::5/64\n',
' address 2001:db8:dead:beef::7/64\n',
' address 2001:db8:dead:beef::8/64\n',
' address 2001:db8:dead:beef::9/64\n',
'\n']},
# IPv6-only; multiple addresses
{'iface_name': 'eth4', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'ipv6proto': 'static',
'ipv6ipaddrs': [
'2001:db8:dead:beef::5/64',
'2001:db8:dead:beef::7/64',
'2001:db8:dead:beef::8/64',
'2001:db8:dead:beef::9/64'],
'ipv6gateway': '2001:db8:dead:beef::1',
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('eth4', odict([('enabled', True), ('data', odict([
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:beef::5/64'),
('addresses', [
'2001:db8:dead:beef::7/64',
'2001:db8:dead:beef::8/64',
'2001:db8:dead:beef::9/64',
]),
('gateway', '2001:db8:dead:beef::1'),
])),
]))]))]),
'return': [
'auto eth4\n',
'iface eth4 inet6 static\n',
' address 2001:db8:dead:beef::5/64\n',
' address 2001:db8:dead:beef::7/64\n',
' address 2001:db8:dead:beef::8/64\n',
' address 2001:db8:dead:beef::9/64\n',
' gateway 2001:db8:dead:beef::1\n',
'\n']},
# IPv4 and IPv6 settings with v4 disabled
{'iface_name': 'eth5', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:beef::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:beef::1',
'enable_ipv4': False,
'noifupdown': True,
},
'get_interface': odict([('eth5', odict([('enabled', True), ('data', odict([
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:beef::3'),
('netmask', 64),
('gateway', '2001:db8:dead:beef::1'),
])),
]))]))]),
'return': [
'auto eth5\n',
'iface eth5 inet6 static\n',
' address 2001:db8:dead:beef::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:beef::1\n',
'\n']},
# IPv4 and IPv6 settings with v6 disabled
{'iface_name': 'eth6', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:beef::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:beef::1',
'enable_ipv6': False,
'noifupdown': True,
},
'get_interface': odict([('eth6', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '192.168.4.9'),
('netmask', '255.255.255.0'),
('gateway', '192.168.4.1'),
])),
]))]))]),
'return': [
'auto eth6\n',
'iface eth6 inet static\n',
' address 192.168.4.9\n',
' netmask 255.255.255.0\n',
' gateway 192.168.4.1\n',
'\n']},
# IPv4 and IPv6; shared/overridden settings
{'iface_name': 'eth7', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:beef::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:beef::1',
'ttl': '18', # shared
'ipv6ttl': '15', # overriden for v6
'mtu': '1480', # shared
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('eth7', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '192.168.4.9'),
('netmask', '255.255.255.0'),
('gateway', '192.168.4.1'),
('ttl', 18),
('mtu', 1480),
])),
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:beef::3'),
('netmask', 64),
('gateway', '2001:db8:dead:beef::1'),
('ttl', 15),
('mtu', 1480),
])),
]))]))]),
'return': [
'auto eth7\n',
'iface eth7 inet static\n',
' address 192.168.4.9\n',
' netmask 255.255.255.0\n',
' gateway 192.168.4.1\n',
' ttl 18\n',
' mtu 1480\n',
'iface eth7 inet6 static\n',
' address 2001:db8:dead:beef::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:beef::1\n',
' ttl 15\n',
' mtu 1480\n',
'\n']},
# Slave iface
{'iface_name': 'eth8', 'iface_type': 'slave', 'enabled': True,
'build_interface': {
'master': 'bond0',
'noifupdown': True,
},
'get_interface': odict([('eth8', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'manual'),
('filename', None),
('bonding', odict([
('master', 'bond0'),
])),
('bonding_keys', ['master']),
])),
]))]))]),
'return': [
'auto eth8\n',
'iface eth8 inet manual\n',
' bond-master bond0\n',
'\n']},
# Bond; with address IPv4 and IPv6 address; slaves as string
{'iface_name': 'bond9', 'iface_type': 'bond', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '10.1.0.14',
'netmask': '255.255.255.0',
'gateway': '10.1.0.1',
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:c0::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:c0::1',
'mode': '802.3ad',
'slaves': 'eth4 eth5',
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('bond9', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '10.1.0.14'),
('netmask', '255.255.255.0'),
('gateway', '10.1.0.1'),
('bonding', odict([
('ad_select', '0'),
('downdelay', '200'),
('lacp_rate', '0'),
('miimon', '100'),
('mode', '4'),
('slaves', 'eth4 eth5'),
('updelay', '0'),
('use_carrier', 'on'),
])),
('bonding_keys', [
'ad_select',
'downdelay',
'lacp_rate',
'miimon',
'mode',
'slaves',
'updelay',
'use_carrier',
]),
])),
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:c0::3'),
('netmask', 64),
('gateway', '2001:db8:dead:c0::1'),
('bonding', odict([
('ad_select', '0'),
('downdelay', '200'),
('lacp_rate', '0'),
('miimon', '100'),
('mode', '4'),
('slaves', 'eth4 eth5'),
('updelay', '0'),
('use_carrier', 'on'),
])),
('bonding_keys', [
'ad_select',
'downdelay',
'lacp_rate',
'miimon',
'mode',
'slaves',
'updelay',
'use_carrier',
]),
])),
]))]))]),
'return': [
'auto bond9\n',
'iface bond9 inet static\n',
' address 10.1.0.14\n',
' netmask 255.255.255.0\n',
' gateway 10.1.0.1\n',
' bond-ad_select 0\n',
' bond-downdelay 200\n',
' bond-lacp_rate 0\n',
' bond-miimon 100\n',
' bond-mode 4\n',
' bond-slaves eth4 eth5\n',
' bond-updelay 0\n',
' bond-use_carrier on\n',
'iface bond9 inet6 static\n',
' address 2001:db8:dead:c0::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:c0::1\n',
' bond-ad_select 0\n',
' bond-downdelay 200\n',
' bond-lacp_rate 0\n',
' bond-miimon 100\n',
' bond-mode 4\n',
' bond-slaves eth4 eth5\n',
' bond-updelay 0\n',
' bond-use_carrier on\n',
'\n']},
# Bond; with address IPv4 and IPv6 address; slaves as list
{'iface_name': 'bond10', 'iface_type': 'bond', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '10.1.0.14',
'netmask': '255.255.255.0',
'gateway': '10.1.0.1',
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:c0::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:c0::1',
'mode': '802.3ad',
'slaves': ['eth4', 'eth5'],
'enable_ipv6': True,
'noifupdown': True,
},
'get_interface': odict([('bond10', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '10.1.0.14'),
('netmask', '255.255.255.0'),
('gateway', '10.1.0.1'),
('bonding', odict([
('ad_select', '0'),
('downdelay', '200'),
('lacp_rate', '0'),
('miimon', '100'),
('mode', '4'),
('slaves', 'eth4 eth5'),
('updelay', '0'),
('use_carrier', 'on'),
])),
('bonding_keys', [
'ad_select',
'downdelay',
'lacp_rate',
'miimon',
'mode',
'slaves',
'updelay',
'use_carrier',
]),
])),
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:c0::3'),
('netmask', 64),
('gateway', '2001:db8:dead:c0::1'),
('bonding', odict([
('ad_select', '0'),
('downdelay', '200'),
('lacp_rate', '0'),
('miimon', '100'),
('mode', '4'),
('slaves', 'eth4 eth5'),
('updelay', '0'),
('use_carrier', 'on'),
])),
('bonding_keys', [
'ad_select',
'downdelay',
'lacp_rate',
'miimon',
'mode',
'slaves',
'updelay',
'use_carrier',
]),
])),
]))]))]),
'return': [
'auto bond10\n',
'iface bond10 inet static\n',
' address 10.1.0.14\n',
' netmask 255.255.255.0\n',
' gateway 10.1.0.1\n',
' bond-ad_select 0\n',
' bond-downdelay 200\n',
' bond-lacp_rate 0\n',
' bond-miimon 100\n',
' bond-mode 4\n',
' bond-slaves eth4 eth5\n',
' bond-updelay 0\n',
' bond-use_carrier on\n',
'iface bond10 inet6 static\n',
' address 2001:db8:dead:c0::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:c0::1\n',
' bond-ad_select 0\n',
' bond-downdelay 200\n',
' bond-lacp_rate 0\n',
' bond-miimon 100\n',
' bond-mode 4\n',
' bond-slaves eth4 eth5\n',
' bond-updelay 0\n',
' bond-use_carrier on\n',
'\n']},
# Bond VLAN; with IPv4 address
{'iface_name': 'bond0.11', 'iface_type': 'vlan', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '10.7.0.8',
'netmask': '255.255.255.0',
'gateway': '10.7.0.1',
'slaves': 'eth6 eth7',
'mode': '802.3ad',
'enable_ipv6': False,
'noifupdown': True,
},
'get_interface': odict([('bond0.11', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('vlan_raw_device', 'bond1'),
('address', '10.7.0.8'),
('netmask', '255.255.255.0'),
('gateway', '10.7.0.1'),
('mode', '802.3ad'),
])),
]))]))]),
'return': [
'auto bond0.11\n',
'iface bond0.11 inet static\n',
' vlan-raw-device bond1\n',
' address 10.7.0.8\n',
' netmask 255.255.255.0\n',
' gateway 10.7.0.1\n',
' mode 802.3ad\n',
'\n']},
# Bond; without address
{'iface_name': 'bond0.12', 'iface_type': 'vlan', 'enabled': True,
'build_interface': {
'proto': 'static',
'slaves': 'eth6 eth7',
'mode': '802.3ad',
'enable_ipv6': False,
'noifupdown': True,
},
'get_interface': odict([('bond0.12', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('vlan_raw_device', 'bond1'),
('mode', '802.3ad'),
])),
]))]))]),
'return': [
'auto bond0.12\n',
'iface bond0.12 inet static\n',
' vlan-raw-device bond1\n',
' mode 802.3ad\n',
'\n']},
# DNS NS as list
{'iface_name': 'eth13', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'enable_ipv6': False,
'noifupdown': True,
'dns': ['8.8.8.8', '8.8.4.4'],
},
'get_interface': odict([('eth13', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '192.168.4.9'),
('netmask', '255.255.255.0'),
('gateway', '192.168.4.1'),
('dns_nameservers', ['8.8.8.8', '8.8.4.4']),
])),
]))]))]),
'return': [
'auto eth13\n',
'iface eth13 inet static\n',
' address 192.168.4.9\n',
' netmask 255.255.255.0\n',
' gateway 192.168.4.1\n',
' dns-nameservers 8.8.8.8 8.8.4.4\n',
'\n']},
# DNS NS as string
{'iface_name': 'eth14', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'static',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'enable_ipv6': False,
'noifupdown': True,
'dns': '8.8.8.8 8.8.4.4',
},
'get_interface': odict([('eth14', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'static'),
('filename', None),
('address', '192.168.4.9'),
('netmask', '255.255.255.0'),
('gateway', '192.168.4.1'),
('dns_nameservers', ['8.8.8.8', '8.8.4.4']),
])),
]))]))]),
'return': [
'auto eth14\n',
'iface eth14 inet static\n',
' address 192.168.4.9\n',
' netmask 255.255.255.0\n',
' gateway 192.168.4.1\n',
' dns-nameservers 8.8.8.8 8.8.4.4\n',
'\n']},
# Loopback; with IPv4 and IPv6 address
{'iface_name': 'lo15', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'loopback',
'ipaddr': '192.168.4.9',
'netmask': '255.255.255.0',
'gateway': '192.168.4.1',
'enable_ipv6': True,
'ipv6proto': 'loopback',
'ipv6ipaddr': 'fc00::1',
'ipv6netmask': '128',
'ipv6_autoconf': False,
'noifupdown': True,
},
'get_interface': odict([('lo15', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'loopback'),
('filename', None),
('address', '192.168.4.9'),
('netmask', '255.255.255.0'),
('gateway', '192.168.4.1'),
])),
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'loopback'),
('filename', None),
('address', 'fc00::1'),
('netmask', 128),
])),
]))]))]),
'return': [
'auto lo15\n',
'iface lo15 inet loopback\n',
' address 192.168.4.9\n',
' netmask 255.255.255.0\n',
' gateway 192.168.4.1\n',
'iface lo15 inet6 loopback\n',
' address fc00::1\n',
' netmask 128\n',
'\n']},
# Loopback; with only IPv6 address; enabled=False
{'iface_name': 'lo16', 'iface_type': 'eth', 'enabled': False,
'build_interface': {
'enable_ipv6': True,
'ipv6proto': 'loopback',
'ipv6ipaddr': 'fc00::1',
'ipv6netmask': '128',
'ipv6_autoconf': False,
'noifupdown': True,
},
'get_interface': odict([('lo16', odict([('data', odict([
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'loopback'),
('filename', None),
('address', 'fc00::1'),
('netmask', 128),
])),
]))]))]),
'return': [
'iface lo16 inet6 loopback\n',
' address fc00::1\n',
' netmask 128\n',
'\n']},
# Loopback; without address
{'iface_name': 'lo17', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'loopback',
'enable_ipv6': False,
'noifupdown': True,
},
'get_interface': odict([('lo17', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'loopback'),
('filename', None),
])),
]))]))]),
'return': [
'auto lo17\n',
'iface lo17 inet loopback\n',
'\n']},
# IPv4=DHCP; IPv6=Static; with IPv6 netmask
{'iface_name': 'eth18', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'dhcp',
'enable_ipv6': True,
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:c0::3',
'ipv6netmask': '64',
'ipv6gateway': '2001:db8:dead:c0::1',
'noifupdown': True,
},
'get_interface': odict([('eth18', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'dhcp'),
('filename', None),
])),
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:c0::3'),
('netmask', 64),
('gateway', '2001:db8:dead:c0::1'),
])),
]))]))]),
'return': [
'auto eth18\n',
'iface eth18 inet dhcp\n',
'iface eth18 inet6 static\n',
' address 2001:db8:dead:c0::3\n',
' netmask 64\n',
' gateway 2001:db8:dead:c0::1\n',
'\n']},
# IPv4=DHCP; IPv6=Static; without IPv6 netmask
{'iface_name': 'eth19', 'iface_type': 'eth', 'enabled': True,
'build_interface': {
'proto': 'dhcp',
'enable_ipv6': True,
'ipv6proto': 'static',
'ipv6ipaddr': '2001:db8:dead:c0::3/64',
'ipv6gateway': '2001:db8:dead:c0::1',
'noifupdown': True,
},
'get_interface': odict([('eth19', odict([('enabled', True), ('data', odict([
('inet', odict([
('addrfam', 'inet'),
('proto', 'dhcp'),
('filename', None),
])),
('inet6', odict([
('addrfam', 'inet6'),
('proto', 'static'),
('filename', None),
('address', '2001:db8:dead:c0::3/64'),
('gateway', '2001:db8:dead:c0::1'),
])),
]))]))]),
'return': [
'auto eth19\n',
'iface eth19 inet dhcp\n',
'iface eth19 inet6 static\n',
' address 2001:db8:dead:c0::3/64\n',
' gateway 2001:db8:dead:c0::1\n',
'\n']},
]
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(salt.utils.platform.is_windows(), 'Do not run these tests on Windows')
class DebianIpTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.modules.debian_ip
"""
def setup_loader_modules(self):
return {debian_ip: {}}
# 'build_bond' function tests: 3
def test_build_bond(self):
"""
Test if it create a bond script in /etc/modprobe.d with the passed
settings and load the bonding kernel module.
"""
with patch(
"salt.modules.debian_ip._parse_settings_bond", MagicMock(return_value={})
), patch("salt.modules.debian_ip._write_file", MagicMock(return_value=True)):
mock = MagicMock(return_value=1)
with patch.dict(debian_ip.__grains__, {"osrelease": mock}):
mock = MagicMock(return_value=True)
with patch.dict(
debian_ip.__salt__, {"kmod.load": mock, "pkg.install": mock}
):
self.assertEqual(debian_ip.build_bond("bond0"), "")
def test_error_message_iface_should_process_non_str_expected(self):
values = [1, True, False, "no-kaboom"]
iface = "ethtest"
option = "test"
msg = debian_ip._error_msg_iface(iface, option, values)
self.assertTrue(msg.endswith("[1|True|False|no-kaboom]"), msg)
def test_error_message_network_should_process_non_str_expected(self):
values = [1, True, False, "no-kaboom"]
msg = debian_ip._error_msg_network("fnord", values)
self.assertTrue(msg.endswith("[1|True|False|no-kaboom]"), msg)
def test_error_message_iface_should_process_non_str_expected(self):
values = [1, True, False, 'no-kaboom']
iface = 'ethtest'
option = 'test'
msg = debian_ip._error_msg_iface(iface, option, values)
self.assertTrue(msg.endswith('[1|True|False|no-kaboom]'), msg)
def test_error_message_network_should_process_non_str_expected(self):
values = [1, True, False, 'no-kaboom']
msg = debian_ip._error_msg_network('fnord', values)
self.assertTrue(msg.endswith('[1|True|False|no-kaboom]'), msg)
def test_build_bond_exception(self):
"""
Test if it create a bond script in /etc/modprobe.d with the passed
settings and load the bonding kernel module.
"""
with patch(
"salt.modules.debian_ip._parse_settings_bond", MagicMock(return_value={})
):
mock = MagicMock(return_value=1)
with patch.dict(debian_ip.__grains__, {"osrelease": mock}):
mock = MagicMock(
side_effect=jinja2.exceptions.TemplateNotFound("error")
)
with patch.object(jinja2.Environment, "get_template", mock):
self.assertEqual(debian_ip.build_bond("bond0"), "")
def test_build_bond_data(self):
"""
Test if it create a bond script in /etc/modprobe.d with the passed
settings and load the bonding kernel module.
"""
with patch(
"salt.modules.debian_ip._parse_settings_bond", MagicMock(return_value={})
), patch("salt.modules.debian_ip._read_temp", MagicMock(return_value=True)):
mock = MagicMock(return_value=1)
with patch.dict(debian_ip.__grains__, {"osrelease": mock}):
self.assertTrue(debian_ip.build_bond("bond0", test="True"))
# 'build_routes' function tests: 2
def test_build_routes(self):
"""
Test if it add route scripts for a network interface using up commands.
"""
with patch(
"salt.modules.debian_ip._parse_routes",
MagicMock(return_value={"routes": []}),
), patch(
"salt.modules.debian_ip._write_file_routes", MagicMock(return_value=True)
), patch(
"salt.modules.debian_ip._read_file", MagicMock(return_value="salt")
):
self.assertEqual(debian_ip.build_routes("eth0"), "saltsalt")
def test_build_routes_exception(self):
"""
Test if it add route scripts for a network interface using up commands.
"""
with patch(
"salt.modules.debian_ip._parse_routes",
MagicMock(return_value={"routes": []}),
):
self.assertTrue(debian_ip.build_routes("eth0", test="True"))
mock = MagicMock(side_effect=jinja2.exceptions.TemplateNotFound("err"))
with patch.object(jinja2.Environment, "get_template", mock):
self.assertEqual(debian_ip.build_routes("eth0"), "")
# 'down' function tests: 1
def test_down(self):
"""
Test if it shutdown a network interface
"""
self.assertEqual(debian_ip.down("eth0", "slave"), None)
mock = MagicMock(return_value="Salt")
with patch.dict(debian_ip.__salt__, {"cmd.run": mock}):
self.assertEqual(debian_ip.down("eth0", "eth"), "Salt")
# 'get_bond' function tests: 1
def test_get_bond(self):
"""
Test if it return the content of a bond script
"""
self.assertEqual(debian_ip.get_bond("bond0"), "")
# '_parse_interfaces' function tests: 1
def test_parse_interfaces(self):
"""
Test if it returns the correct data for parsed configuration file
"""
with tempfile.NamedTemporaryFile(mode="r", delete=True) as tfile:
for iface in test_interfaces:
iname = iface["iface_name"]
if iface.get("skip_test", False):
continue
with salt.utils.files.fopen(str(tfile.name), "w") as fh:
fh.writelines(iface["return"])
for inet in ["inet", "inet6"]:
if inet in iface["get_interface"][iname]["data"]:
iface["get_interface"][iname]["data"][inet]["filename"] = str(
tfile.name
)
self.assertDictEqual(
debian_ip._parse_interfaces([str(tfile.name)]),
iface["get_interface"],
)
# '_parse_interfaces' function tests: 1
def test_parse_interfaces(self):
'''
Test if it returns the correct data for parsed configuration file
'''
with tempfile.NamedTemporaryFile(mode='r', delete=True) as tfile:
for iface in test_interfaces:
iname = iface['iface_name']
if iface.get('skip_test', False):
continue
with salt.utils.files.fopen(str(tfile.name), 'w') as fh:
fh.writelines(iface['return'])
for inet in ['inet', 'inet6']:
if inet in iface['get_interface'][iname]['data']:
iface['get_interface'][iname]['data'][inet]['filename'] = str(tfile.name)
self.assertDictEqual(
debian_ip._parse_interfaces([str(tfile.name)]),
iface['get_interface'])
# 'get_interface' function tests: 1
def test_get_interface(self):
"""
Test if it return the contents of an interface script
'''
for iface in test_interfaces:
if iface.get('skip_test', False):
continue
with patch.object(
debian_ip, '_parse_interfaces',
MagicMock(return_value=iface['get_interface'])):
self.assertListEqual(
debian_ip.get_interface(iface['iface_name']),
iface['return'])
# 'build_interface' function tests: 1
def test_build_interface(self):
"""
Test if it builds an interface script for a network interface.
'''
with patch('salt.modules.debian_ip._write_file_ifaces',
MagicMock(return_value='salt')):
self.assertEqual(debian_ip.build_interface('eth0', 'eth', 'enabled'),
['s\n', 'a\n', 'l\n', 't\n'])
self.assertTrue(debian_ip.build_interface('eth0', 'eth', 'enabled', test='True'))
with patch.object(debian_ip, '_parse_settings_eth', MagicMock(return_value={'routes': []})):
for eth_t in ['bridge', 'slave', 'bond']:
self.assertRaises(AttributeError, debian_ip.build_interface, 'eth0', eth_t, 'enabled')
self.assertTrue(debian_ip.build_interface('eth0', 'eth', 'enabled', test='True'))
with tempfile.NamedTemporaryFile(mode='r', delete=False) as tfile:
with patch('salt.modules.debian_ip._DEB_NETWORK_FILE', str(tfile.name)):
for iface in test_interfaces:
if iface.get('skip_test', False):
continue
# Skip tests that require __salt__['pkg.install']()
if iface['iface_type'] in ['bridge', 'pppoe', 'vlan']:
continue
self.assertListEqual(
debian_ip.build_interface(
iface=iface['iface_name'],
iface_type=iface['iface_type'],
enabled=iface['enabled'],
interface_file=tfile.name,
**iface['build_interface']),
iface['return'])
os.remove(tfile.name)
# 'up' function tests: 1
def test_up(self):
"""
Test if it start up a network interface
"""
self.assertEqual(debian_ip.down("eth0", "slave"), None)
mock = MagicMock(return_value="Salt")
with patch.dict(debian_ip.__salt__, {"cmd.run": mock}):
self.assertEqual(debian_ip.up("eth0", "eth"), "Salt")
# 'get_network_settings' function tests: 1
def test_get_network_settings(self):
"""
Test if it return the contents of the global network script.
"""
with patch.dict(
debian_ip.__grains__, {"osfullname": "Ubuntu", "osrelease": "14"}
), patch(
"salt.modules.debian_ip._parse_hostname",
MagicMock(return_value="SaltStack"),
), patch(
"salt.modules.debian_ip._parse_domainname",
MagicMock(return_value="saltstack.com"),
):
mock_avai = MagicMock(return_value=True)
with patch.dict(
debian_ip.__salt__,
{"service.available": mock_avai, "service.status": mock_avai},
):
self.assertEqual(
debian_ip.get_network_settings(),
[
"NETWORKING=yes\n",
"HOSTNAME=SaltStack\n",
"DOMAIN=saltstack.com\n",
],
)
mock = MagicMock(
side_effect=jinja2.exceptions.TemplateNotFound("error")
)
with patch.object(jinja2.Environment, "get_template", mock):
self.assertEqual(debian_ip.get_network_settings(), "")
# 'get_routes' function tests: 1
def test_get_routes(self):
"""
Test if it return the routes for the interface
"""
with patch("salt.modules.debian_ip._read_file", MagicMock(return_value="salt")):
self.assertEqual(debian_ip.get_routes("eth0"), "saltsalt")
# 'apply_network_settings' function tests: 1
@skipIf(True, "SLOWTEST skip")
def test_apply_network_settings(self):
"""
Test if it apply global network configuration.
"""
mock = MagicMock(return_value=True)
with patch.dict(
debian_ip.__salt__,
{"network.mod_hostname": mock, "service.stop": mock, "service.start": mock},
):
self.assertEqual(debian_ip.apply_network_settings(), True)
# 'build_network_settings' function tests: 1
def test_build_network_settings(self):
"""
Test if it build the global network script.
"""
with patch(
"salt.modules.debian_ip._parse_network_settings",
MagicMock(
return_value={
"networking": "yes",
"hostname": "Salt.saltstack.com",
"domainname": "saltstack.com",
"search": "test.saltstack.com",
}
),
), patch(
"salt.modules.debian_ip._write_file_network", MagicMock(return_value=True)
):
with patch.dict(
debian_ip.__grains__, {"osfullname": "Ubuntu", "osrelease": "14"}
):
mock = MagicMock(return_value=True)
with patch.dict(
debian_ip.__salt__,
{
"service.available": mock,
"service.disable": mock,
"service.enable": mock,
},
):
self.assertEqual(
debian_ip.build_network_settings(),
[
"NETWORKING=yes\n",
"HOSTNAME=Salt\n",
"DOMAIN=saltstack.com\n",
"SEARCH=test.saltstack.com\n",
],
)
mock = MagicMock(
side_effect=jinja2.exceptions.TemplateNotFound("error")
)
with patch.object(jinja2.Environment, "get_template", mock):
self.assertEqual(debian_ip.build_network_settings(), "")
with patch.dict(
debian_ip.__grains__, {"osfullname": "Ubuntu", "osrelease": "10"}
):
mock = MagicMock(return_value=True)
with patch.dict(
debian_ip.__salt__,
{
"service.available": mock,
"service.disable": mock,
"service.enable": mock,
},
):
mock = MagicMock(
side_effect=jinja2.exceptions.TemplateNotFound("error")
)
with patch.object(jinja2.Environment, "get_template", mock):
self.assertEqual(debian_ip.build_network_settings(), "")
with patch.object(
debian_ip, "_read_temp", MagicMock(return_value=True)
):
self.assertTrue(debian_ip.build_network_settings(test="True"))
| 40.181764 | 106 | 0.39033 |
57afe9c903c677907e50d2e094d56d5750c56899 | 4,566 | py | Python | scripts/histogram.py | souryadey/mlp-ondevice-training | 5d740f9051c1d4348b43780bb3f3b67a8d888a21 | [
"MIT"
] | 2 | 2021-11-17T04:21:28.000Z | 2021-12-17T17:16:53.000Z | scripts/histogram.py | souryadey/mlp-ondevice-training | 5d740f9051c1d4348b43780bb3f3b67a8d888a21 | [
"MIT"
] | null | null | null | scripts/histogram.py | souryadey/mlp-ondevice-training | 5d740f9051c1d4348b43780bb3f3b67a8d888a21 | [
"MIT"
] | null | null | null | #Mahdi
#This file reads the output from the simulator
#and generates the histogram based on the values and accuracy
import matplotlib.pyplot as plt
import struct
import math
import sys
########################## ONLY CHANGE THIS SECTION ###########################
int_bits = 5
frac_bits = 10
base = 2
###############################################################################
# function for returning fractions
def calcul(val):
totalfrac = 0
intval = int(val[1:(int_bits+1)],base)
for i in range(int_bits + 1, frac_bits + int_bits + 1):
frac = int(val[i],base) * base**(-(i-int_bits))
totalfrac += frac
if (int(val[0], 0) == 0): return(totalfrac + intval)
else: return(-1 * (totalfrac + intval))
# function for returning fractions
def calcul2(val):
totalfrac = 0
intval = int(val[1:(int_bits+2)],2)
for i in range(int_bits + 2, frac_bits + int_bits + 1):
frac = int(val[i],0) * base**(-(i-int_bits-1))
totalfrac += frac
if (int(val[0], 0) == 0): return(totalfrac + intval)
else: return(-1 * (totalfrac + intval))
# function for parsing the data
def data_parser(text, dic):
for i, j in dic.iteritems():
text = text.replace(i,j)
return text
values = [1.2, 2.3, 1.2]
bins=[0.2, 1.1, 2.4, 3.5]
r = []
acc = []
reps = {'*':' ','+':' ',':':' ','=':' ',' ':' ','\n':''}
realreps = open("realreps.dat","r")
inputfile = open("out.dat","r")
outputfile = open("out2.dat","w")
#read the values in fixed.point format and do the computation
#every line has arguments
for line in realreps:
line2 = data_parser(line, reps)
temp = line2.split(" ")
print("Here is real rep values:")
print(float(temp[0]) + float(temp[2]))
print(float(temp[0]) * float(temp[2]))
#Exact values from computation: now in log domain
print("Here is real rep values in log:")
num1 = math.log(float(temp[0]), base)
num2 = math.log(float(temp[2]),base)
cf = math.log(1 + base ** (-abs(num1-num2)), base)
print(math.pow(base, max(num1, num2) + cf ))
print(math.pow(base, math.log(float(temp[0]),base) + math.log(float(temp[2]),base)) )
#Approximation: values from computation in log domain
print("Approximated cf in log:")
num1 = math.log(float(temp[0]), base)
num2 = math.log(float(temp[2]),base)
print(num1, num2)
cf = base ** (-abs(num1-num2)) #in hardware approx. using a shifter
print(math.pow(base, max(num1, num2) + cf ))
print(math.pow(base, math.log(float(temp[0]),base) + math.log(float(temp[2]),base)) )
opType = input()
#read back the generated values from verilog computation
print("Here is values read back from FPGA:")
if(opType == "add"):
for line in inputfile:
line2 = data_parser(line, reps)
temp = line2.split(" ")
#unpack the string values to binary
val1 = struct.unpack('16s', temp[0])[0] #X
val2 = struct.unpack('16s', temp[8])[0] #Y
res = struct.unpack('16s', temp[13])[0]
#compute exact and approx values
exact = (base ** calcul(res))
approx = (base**calcul(val1) + base**calcul(val2))
dev = abs(approx - exact)
print( calcul(val1) ,"+", calcul(val2), "=", calcul(res))
print( "Exact number:" , (base**calcul(val1) + base**calcul(val2)) )
print( "Hardware Approx. number:", base ** calcul(res))
print( "r is:", abs(calcul(val1) - calcul(val2)), "acc rate is:", abs(exact - dev)/exact )
r.insert (0, abs(calcul(val1) - calcul(val2))) # r = |X - Y|
acc.insert (0, abs(exact - dev) / exact) # acc% = approx./exact
elif (opType == "mult"):
for line in inputfile:
line2 = data_parser(line, reps)
temp = line2.split(" ")
#unpack the string values to binary
val1 = struct.unpack('16s', temp[0])[0] #X
val2 = struct.unpack('16s', temp[8])[0] #Y
res = struct.unpack('17s', temp[13])[0]
#compute exact and approx values
exact = (base ** calcul2(res))
approx = (base**calcul(val1) * base**calcul(val2))
dev = abs(approx - exact)
print( calcul(val1) ,"*", calcul(val2), "=", calcul2(res), "which was", res)
print( "Exact number:" , (base**calcul(val1) * base**calcul(val2)) )
print( "Hardware Approx. number:", base ** calcul2(res))
r.insert (0, abs(calcul(val1) - calcul(val2))) # r = |X - Y|
acc.insert (0, abs(exact - dev) / exact) # acc% = approx./exact
else:
print("Sorry, operator not supported.")
sys.exit(0)
plt.scatter(r,acc)
plt.xlabel('r = |X - Y|', fontsize=18)
plt.ylabel('accuracy %', fontsize=16)
plt.show()
#close files
inputfile.close()
outputfile.close()
| 31.93007 | 95 | 0.601621 |
c2f2d69d040c8b646e369fa93196145d1c869ced | 2,225 | py | Python | changewall.py | bisho1995/python-change-wallpaper | 7b7aedf0b51236ed382252baa2a6a3d4a19b593e | [
"MIT"
] | null | null | null | changewall.py | bisho1995/python-change-wallpaper | 7b7aedf0b51236ed382252baa2a6a3d4a19b593e | [
"MIT"
] | null | null | null | changewall.py | bisho1995/python-change-wallpaper | 7b7aedf0b51236ed382252baa2a6a3d4a19b593e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from urllib.request import Request, urlopen
import urllib
from bs4 import BeautifulSoup
import json
from random import randint
import requests
import subprocess
import imghdr
import os
if len(sys.argv)<2:
print("Provide Name.")
exit()
if len(sys.argv)>2:
print("Provide single argument only, if more than word, enclose in \"\"")
exit()
#Replace search string with + in place of whitespace
SEARCH_NAME = sys.argv[1]
SEARCH_NAME +=" HD DESKTOP WALLPAPER"
SEARCH_NAME = SEARCH_NAME.replace(' ','+')
#prepare google search url
SEARCH_URL = "https://www.google.co.in/search?q={}&source=lnms&tbm=isch&tbs=isz:ex,iszw:1920,iszh:1080".format(SEARCH_NAME)
print(SEARCH_URL)
#path for storing the photo
PHOTO_PATH = 'photo.jpg'
#manipulate user agent so as to make them believe we are just normal human downloading some image
hdr = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
#make a request and get html in beautiful suop
req=urllib.request.Request(SEARCH_URL, headers=hdr)
page =urllib.request.urlopen(req).read()
#req = urllib.request(SEARCH_URL,headers=hdr)
#page = urllib2.urlopen(req)
soup = BeautifulSoup(page, 'html.parser')
#print soup.prettify()
#scrap the html to find link of image (just refer to html printed in above prettify line to find what html google returns)
#google image will give us total 99 images (I think), just pick one randomly. And hope for the best.
invalid_jpeg = True
while invalid_jpeg:
for child in soup.find("div", {"data-ri":"{}".format(str(randint(0,99)))}).find("div", {"class":"rg_meta"}).children:
data_content = json.loads(child)
LINK = data_content["ou"]
#dowload the photo
print(LINK)
res = requests.get(LINK, headers=hdr)
with open(PHOTO_PATH, 'wb') as W:
W.write(res.content)
#check if photo is valid, if not, try another photo, keep trying until you get one
invalid_jpeg = (imghdr.what(PHOTO_PATH)!="jpeg")
abs_path_of_image=os.path.join(os.getcwd(),PHOTO_PATH)
print(abs_path_of_image)
import ctypes
ctypes.windll.user32.SystemParametersInfoW(20, 0, abs_path_of_image , 0)
| 31.785714 | 144 | 0.735281 |
7c86d4ac2ac986614bf92c4cdf46e03dd5003456 | 33,311 | py | Python | pypy/lib-python/3/test/test_contextlib.py | Clear-Sight/cython-vs-pypy-performance | a81df5e1dbc115468ddfd60670ddfad448a5c992 | [
"MIT"
] | 1 | 2021-06-02T23:02:09.000Z | 2021-06-02T23:02:09.000Z | pypy/lib-python/3/test/test_contextlib.py | Clear-Sight/cython-vs-pypy-performance | a81df5e1dbc115468ddfd60670ddfad448a5c992 | [
"MIT"
] | 1 | 2021-03-30T18:08:41.000Z | 2021-03-30T18:08:41.000Z | pypy/lib-python/3/test/test_contextlib.py | Clear-Sight/cython-vs-pypy-performance | a81df5e1dbc115468ddfd60670ddfad448a5c992 | [
"MIT"
] | null | null | null | """Unit tests for contextlib.py, and other context managers."""
import asyncio
import io
import sys
import tempfile
import threading
import unittest
from contextlib import * # Tests __all__
from test import support
import weakref
class TestAbstractContextManager(unittest.TestCase):
def test_enter(self):
class DefaultEnter(AbstractContextManager):
def __exit__(self, *args):
super().__exit__(*args)
manager = DefaultEnter()
self.assertIs(manager.__enter__(), manager)
def test_exit_is_abstract(self):
class MissingExit(AbstractContextManager):
pass
with self.assertRaises(TypeError):
MissingExit()
def test_structural_subclassing(self):
class ManagerFromScratch:
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
return None
self.assertTrue(issubclass(ManagerFromScratch, AbstractContextManager))
class DefaultEnter(AbstractContextManager):
def __exit__(self, *args):
super().__exit__(*args)
self.assertTrue(issubclass(DefaultEnter, AbstractContextManager))
class NoEnter(ManagerFromScratch):
__enter__ = None
self.assertFalse(issubclass(NoEnter, AbstractContextManager))
class NoExit(ManagerFromScratch):
__exit__ = None
self.assertFalse(issubclass(NoExit, AbstractContextManager))
class ContextManagerTestCase(unittest.TestCase):
def test_contextmanager_plain(self):
state = []
@contextmanager
def woohoo():
state.append(1)
yield 42
state.append(999)
with woohoo() as x:
self.assertEqual(state, [1])
self.assertEqual(x, 42)
state.append(x)
self.assertEqual(state, [1, 42, 999])
def test_contextmanager_finally(self):
state = []
@contextmanager
def woohoo():
state.append(1)
try:
yield 42
finally:
state.append(999)
with self.assertRaises(ZeroDivisionError):
with woohoo() as x:
self.assertEqual(state, [1])
self.assertEqual(x, 42)
state.append(x)
raise ZeroDivisionError()
self.assertEqual(state, [1, 42, 999])
def test_contextmanager_no_reraise(self):
@contextmanager
def whee():
yield
ctx = whee()
ctx.__enter__()
# Calling __exit__ should not result in an exception
self.assertFalse(ctx.__exit__(TypeError, TypeError("foo"), None))
def test_contextmanager_trap_yield_after_throw(self):
@contextmanager
def whoo():
try:
yield
except:
yield
ctx = whoo()
ctx.__enter__()
self.assertRaises(
RuntimeError, ctx.__exit__, TypeError, TypeError("foo"), None
)
def test_contextmanager_except(self):
state = []
@contextmanager
def woohoo():
state.append(1)
try:
yield 42
except ZeroDivisionError as e:
state.append(e.args[0])
self.assertEqual(state, [1, 42, 999])
with woohoo() as x:
self.assertEqual(state, [1])
self.assertEqual(x, 42)
state.append(x)
raise ZeroDivisionError(999)
self.assertEqual(state, [1, 42, 999])
def test_contextmanager_except_stopiter(self):
stop_exc = StopIteration('spam')
@contextmanager
def woohoo():
yield
try:
with self.assertWarnsRegex(DeprecationWarning,
"StopIteration"):
with woohoo():
raise stop_exc
except Exception as ex:
self.assertIs(ex, stop_exc)
else:
self.fail('StopIteration was suppressed')
def test_contextmanager_except_pep479(self):
code = """\
from __future__ import generator_stop
from contextlib import contextmanager
@contextmanager
def woohoo():
yield
"""
locals = {}
exec(code, locals, locals)
woohoo = locals['woohoo']
stop_exc = StopIteration('spam')
try:
with woohoo():
raise stop_exc
except Exception as ex:
self.assertIs(ex, stop_exc)
else:
self.fail('StopIteration was suppressed')
def test_contextmanager_do_not_unchain_non_stopiteration_exceptions(self):
@contextmanager
def test_issue29692():
try:
yield
except Exception as exc:
raise RuntimeError('issue29692:Chained') from exc
try:
with test_issue29692():
raise ZeroDivisionError
except Exception as ex:
self.assertIs(type(ex), RuntimeError)
self.assertEqual(ex.args[0], 'issue29692:Chained')
self.assertIsInstance(ex.__cause__, ZeroDivisionError)
try:
with test_issue29692():
raise StopIteration('issue29692:Unchained')
except Exception as ex:
self.assertIs(type(ex), StopIteration)
self.assertEqual(ex.args[0], 'issue29692:Unchained')
self.assertIsNone(ex.__cause__)
def _create_contextmanager_attribs(self):
def attribs(**kw):
def decorate(func):
for k,v in kw.items():
setattr(func,k,v)
return func
return decorate
@contextmanager
@attribs(foo='bar')
def baz(spam):
"""Whee!"""
return baz
def test_contextmanager_attribs(self):
baz = self._create_contextmanager_attribs()
self.assertEqual(baz.__name__,'baz')
self.assertEqual(baz.foo, 'bar')
@support.requires_docstrings
def test_contextmanager_doc_attrib(self):
baz = self._create_contextmanager_attribs()
self.assertEqual(baz.__doc__, "Whee!")
@support.requires_docstrings
def test_instance_docstring_given_cm_docstring(self):
baz = self._create_contextmanager_attribs()(None)
self.assertEqual(baz.__doc__, "Whee!")
def test_keywords(self):
# Ensure no keyword arguments are inhibited
@contextmanager
def woohoo(self, func, args, kwds):
yield (self, func, args, kwds)
with woohoo(self=11, func=22, args=33, kwds=44) as target:
self.assertEqual(target, (11, 22, 33, 44))
def test_nokeepref(self):
class A:
pass
@contextmanager
def woohoo(a, b):
a = weakref.ref(a)
b = weakref.ref(b)
support.gc_collect()
self.assertIsNone(a())
self.assertIsNone(b())
yield
with woohoo(A(), b=A()):
pass
def test_param_errors(self):
@contextmanager
def woohoo(a, *, b):
yield
with self.assertRaises(TypeError):
woohoo()
with self.assertRaises(TypeError):
woohoo(3, 5)
with self.assertRaises(TypeError):
woohoo(b=3)
def test_recursive(self):
depth = 0
@contextmanager
def woohoo():
nonlocal depth
before = depth
depth += 1
yield
depth -= 1
self.assertEqual(depth, before)
@woohoo()
def recursive():
if depth < 10:
recursive()
recursive()
self.assertEqual(depth, 0)
class ClosingTestCase(unittest.TestCase):
@support.requires_docstrings
def test_instance_docs(self):
# Issue 19330: ensure context manager instances have good docstrings
cm_docstring = closing.__doc__
obj = closing(None)
self.assertEqual(obj.__doc__, cm_docstring)
def test_closing(self):
state = []
class C:
def close(self):
state.append(1)
x = C()
self.assertEqual(state, [])
with closing(x) as y:
self.assertEqual(x, y)
self.assertEqual(state, [1])
def test_closing_error(self):
state = []
class C:
def close(self):
state.append(1)
x = C()
self.assertEqual(state, [])
with self.assertRaises(ZeroDivisionError):
with closing(x) as y:
self.assertEqual(x, y)
1 / 0
self.assertEqual(state, [1])
class NullcontextTestCase(unittest.TestCase):
def test_nullcontext(self):
class C:
pass
c = C()
with nullcontext(c) as c_in:
self.assertIs(c_in, c)
class FileContextTestCase(unittest.TestCase):
def testWithOpen(self):
tfn = tempfile.mktemp()
try:
f = None
with open(tfn, "w") as f:
self.assertFalse(f.closed)
f.write("Booh\n")
self.assertTrue(f.closed)
f = None
with self.assertRaises(ZeroDivisionError):
with open(tfn, "r") as f:
self.assertFalse(f.closed)
self.assertEqual(f.read(), "Booh\n")
1 / 0
self.assertTrue(f.closed)
finally:
support.unlink(tfn)
class LockContextTestCase(unittest.TestCase):
def boilerPlate(self, lock, locked):
self.assertFalse(locked())
with lock:
self.assertTrue(locked())
self.assertFalse(locked())
with self.assertRaises(ZeroDivisionError):
with lock:
self.assertTrue(locked())
1 / 0
self.assertFalse(locked())
def testWithLock(self):
lock = threading.Lock()
self.boilerPlate(lock, lock.locked)
def testWithRLock(self):
lock = threading.RLock()
self.boilerPlate(lock, lock._is_owned)
def testWithCondition(self):
lock = threading.Condition()
def locked():
return lock._is_owned()
self.boilerPlate(lock, locked)
def testWithSemaphore(self):
lock = threading.Semaphore()
def locked():
if lock.acquire(False):
lock.release()
return False
else:
return True
self.boilerPlate(lock, locked)
def testWithBoundedSemaphore(self):
lock = threading.BoundedSemaphore()
def locked():
if lock.acquire(False):
lock.release()
return False
else:
return True
self.boilerPlate(lock, locked)
class mycontext(ContextDecorator):
"""Example decoration-compatible context manager for testing"""
started = False
exc = None
catch = False
def __enter__(self):
self.started = True
return self
def __exit__(self, *exc):
self.exc = exc
return self.catch
class TestContextDecorator(unittest.TestCase):
@support.requires_docstrings
def test_instance_docs(self):
# Issue 19330: ensure context manager instances have good docstrings
cm_docstring = mycontext.__doc__
obj = mycontext()
self.assertEqual(obj.__doc__, cm_docstring)
def test_contextdecorator(self):
context = mycontext()
with context as result:
self.assertIs(result, context)
self.assertTrue(context.started)
self.assertEqual(context.exc, (None, None, None))
def test_contextdecorator_with_exception(self):
context = mycontext()
with self.assertRaisesRegex(NameError, 'foo'):
with context:
raise NameError('foo')
self.assertIsNotNone(context.exc)
self.assertIs(context.exc[0], NameError)
context = mycontext()
context.catch = True
with context:
raise NameError('foo')
self.assertIsNotNone(context.exc)
self.assertIs(context.exc[0], NameError)
def test_decorator(self):
context = mycontext()
@context
def test():
self.assertIsNone(context.exc)
self.assertTrue(context.started)
test()
self.assertEqual(context.exc, (None, None, None))
def test_decorator_with_exception(self):
context = mycontext()
@context
def test():
self.assertIsNone(context.exc)
self.assertTrue(context.started)
raise NameError('foo')
with self.assertRaisesRegex(NameError, 'foo'):
test()
self.assertIsNotNone(context.exc)
self.assertIs(context.exc[0], NameError)
def test_decorating_method(self):
context = mycontext()
class Test(object):
@context
def method(self, a, b, c=None):
self.a = a
self.b = b
self.c = c
# these tests are for argument passing when used as a decorator
test = Test()
test.method(1, 2)
self.assertEqual(test.a, 1)
self.assertEqual(test.b, 2)
self.assertEqual(test.c, None)
test = Test()
test.method('a', 'b', 'c')
self.assertEqual(test.a, 'a')
self.assertEqual(test.b, 'b')
self.assertEqual(test.c, 'c')
test = Test()
test.method(a=1, b=2)
self.assertEqual(test.a, 1)
self.assertEqual(test.b, 2)
def test_typo_enter(self):
class mycontext(ContextDecorator):
def __unter__(self):
pass
def __exit__(self, *exc):
pass
with self.assertRaises(AttributeError):
with mycontext():
pass
def test_typo_exit(self):
class mycontext(ContextDecorator):
def __enter__(self):
pass
def __uxit__(self, *exc):
pass
with self.assertRaises(AttributeError):
with mycontext():
pass
def test_contextdecorator_as_mixin(self):
class somecontext(object):
started = False
exc = None
def __enter__(self):
self.started = True
return self
def __exit__(self, *exc):
self.exc = exc
class mycontext(somecontext, ContextDecorator):
pass
context = mycontext()
@context
def test():
self.assertIsNone(context.exc)
self.assertTrue(context.started)
test()
self.assertEqual(context.exc, (None, None, None))
def test_contextmanager_as_decorator(self):
@contextmanager
def woohoo(y):
state.append(y)
yield
state.append(999)
state = []
@woohoo(1)
def test(x):
self.assertEqual(state, [1])
state.append(x)
test('something')
self.assertEqual(state, [1, 'something', 999])
# Issue #11647: Ensure the decorated function is 'reusable'
state = []
test('something else')
self.assertEqual(state, [1, 'something else', 999])
class TestBaseExitStack:
exit_stack = None
@support.requires_docstrings
def test_instance_docs(self):
# Issue 19330: ensure context manager instances have good docstrings
cm_docstring = self.exit_stack.__doc__
obj = self.exit_stack()
self.assertEqual(obj.__doc__, cm_docstring)
def test_no_resources(self):
with self.exit_stack():
pass
def test_callback(self):
expected = [
((), {}),
((1,), {}),
((1,2), {}),
((), dict(example=1)),
((1,), dict(example=1)),
((1,2), dict(example=1)),
((1,2), dict(self=3, callback=4)),
]
result = []
def _exit(*args, **kwds):
"""Test metadata propagation"""
result.append((args, kwds))
with self.exit_stack() as stack:
for args, kwds in reversed(expected):
if args and kwds:
f = stack.callback(_exit, *args, **kwds)
elif args:
f = stack.callback(_exit, *args)
elif kwds:
f = stack.callback(_exit, **kwds)
else:
f = stack.callback(_exit)
self.assertIs(f, _exit)
for wrapper in stack._exit_callbacks:
self.assertIs(wrapper[1].__wrapped__, _exit)
self.assertNotEqual(wrapper[1].__name__, _exit.__name__)
self.assertIsNone(wrapper[1].__doc__, _exit.__doc__)
self.assertEqual(result, expected)
result = []
with self.exit_stack() as stack:
with self.assertRaises(TypeError):
stack.callback(arg=1)
with self.assertRaises(TypeError):
self.exit_stack.callback(arg=2)
stack.callback(callback=_exit, arg=3)
self.assertEqual(result, [((), {'arg': 3})])
def test_push(self):
exc_raised = ZeroDivisionError
def _expect_exc(exc_type, exc, exc_tb):
self.assertIs(exc_type, exc_raised)
def _suppress_exc(*exc_details):
return True
def _expect_ok(exc_type, exc, exc_tb):
self.assertIsNone(exc_type)
self.assertIsNone(exc)
self.assertIsNone(exc_tb)
class ExitCM(object):
def __init__(self, check_exc):
self.check_exc = check_exc
def __enter__(self):
self.fail("Should not be called!")
def __exit__(self, *exc_details):
self.check_exc(*exc_details)
with self.exit_stack() as stack:
stack.push(_expect_ok)
self.assertIs(stack._exit_callbacks[-1][1], _expect_ok)
cm = ExitCM(_expect_ok)
stack.push(cm)
self.assertIs(stack._exit_callbacks[-1][1].__self__, cm)
stack.push(_suppress_exc)
self.assertIs(stack._exit_callbacks[-1][1], _suppress_exc)
cm = ExitCM(_expect_exc)
stack.push(cm)
self.assertIs(stack._exit_callbacks[-1][1].__self__, cm)
stack.push(_expect_exc)
self.assertIs(stack._exit_callbacks[-1][1], _expect_exc)
stack.push(_expect_exc)
self.assertIs(stack._exit_callbacks[-1][1], _expect_exc)
1/0
def test_enter_context(self):
class TestCM(object):
def __enter__(self):
result.append(1)
def __exit__(self, *exc_details):
result.append(3)
result = []
cm = TestCM()
with self.exit_stack() as stack:
@stack.callback # Registered first => cleaned up last
def _exit():
result.append(4)
self.assertIsNotNone(_exit)
stack.enter_context(cm)
self.assertIs(stack._exit_callbacks[-1][1].__self__, cm)
result.append(2)
self.assertEqual(result, [1, 2, 3, 4])
def test_close(self):
result = []
with self.exit_stack() as stack:
@stack.callback
def _exit():
result.append(1)
self.assertIsNotNone(_exit)
stack.close()
result.append(2)
self.assertEqual(result, [1, 2])
def test_pop_all(self):
result = []
with self.exit_stack() as stack:
@stack.callback
def _exit():
result.append(3)
self.assertIsNotNone(_exit)
new_stack = stack.pop_all()
result.append(1)
result.append(2)
new_stack.close()
self.assertEqual(result, [1, 2, 3])
def test_exit_raise(self):
with self.assertRaises(ZeroDivisionError):
with self.exit_stack() as stack:
stack.push(lambda *exc: False)
1/0
def test_exit_suppress(self):
with self.exit_stack() as stack:
stack.push(lambda *exc: True)
1/0
def test_exit_exception_chaining_reference(self):
# Sanity check to make sure that ExitStack chaining matches
# actual nested with statements
class RaiseExc:
def __init__(self, exc):
self.exc = exc
def __enter__(self):
return self
def __exit__(self, *exc_details):
raise self.exc
class RaiseExcWithContext:
def __init__(self, outer, inner):
self.outer = outer
self.inner = inner
def __enter__(self):
return self
def __exit__(self, *exc_details):
try:
raise self.inner
except:
raise self.outer
class SuppressExc:
def __enter__(self):
return self
def __exit__(self, *exc_details):
type(self).saved_details = exc_details
return True
try:
with RaiseExc(IndexError):
with RaiseExcWithContext(KeyError, AttributeError):
with SuppressExc():
with RaiseExc(ValueError):
1 / 0
except IndexError as exc:
self.assertIsInstance(exc.__context__, KeyError)
self.assertIsInstance(exc.__context__.__context__, AttributeError)
# Inner exceptions were suppressed
self.assertIsNone(exc.__context__.__context__.__context__)
else:
self.fail("Expected IndexError, but no exception was raised")
# Check the inner exceptions
inner_exc = SuppressExc.saved_details[1]
self.assertIsInstance(inner_exc, ValueError)
self.assertIsInstance(inner_exc.__context__, ZeroDivisionError)
def test_exit_exception_chaining(self):
# Ensure exception chaining matches the reference behaviour
def raise_exc(exc):
raise exc
saved_details = None
def suppress_exc(*exc_details):
nonlocal saved_details
saved_details = exc_details
return True
try:
with self.exit_stack() as stack:
stack.callback(raise_exc, IndexError)
stack.callback(raise_exc, KeyError)
stack.callback(raise_exc, AttributeError)
stack.push(suppress_exc)
stack.callback(raise_exc, ValueError)
1 / 0
except IndexError as exc:
self.assertIsInstance(exc.__context__, KeyError)
self.assertIsInstance(exc.__context__.__context__, AttributeError)
# Inner exceptions were suppressed
self.assertIsNone(exc.__context__.__context__.__context__)
else:
self.fail("Expected IndexError, but no exception was raised")
# Check the inner exceptions
inner_exc = saved_details[1]
self.assertIsInstance(inner_exc, ValueError)
self.assertIsInstance(inner_exc.__context__, ZeroDivisionError)
def test_exit_exception_non_suppressing(self):
# http://bugs.python.org/issue19092
def raise_exc(exc):
raise exc
def suppress_exc(*exc_details):
return True
try:
with self.exit_stack() as stack:
stack.callback(lambda: None)
stack.callback(raise_exc, IndexError)
except Exception as exc:
self.assertIsInstance(exc, IndexError)
else:
self.fail("Expected IndexError, but no exception was raised")
try:
with self.exit_stack() as stack:
stack.callback(raise_exc, KeyError)
stack.push(suppress_exc)
stack.callback(raise_exc, IndexError)
except Exception as exc:
self.assertIsInstance(exc, KeyError)
else:
self.fail("Expected KeyError, but no exception was raised")
def test_exit_exception_with_correct_context(self):
# http://bugs.python.org/issue20317
@contextmanager
def gets_the_context_right(exc):
try:
yield
finally:
raise exc
exc1 = Exception(1)
exc2 = Exception(2)
exc3 = Exception(3)
exc4 = Exception(4)
# The contextmanager already fixes the context, so prior to the
# fix, ExitStack would try to fix it *again* and get into an
# infinite self-referential loop
try:
with self.exit_stack() as stack:
stack.enter_context(gets_the_context_right(exc4))
stack.enter_context(gets_the_context_right(exc3))
stack.enter_context(gets_the_context_right(exc2))
raise exc1
except Exception as exc:
self.assertIs(exc, exc4)
self.assertIs(exc.__context__, exc3)
self.assertIs(exc.__context__.__context__, exc2)
self.assertIs(exc.__context__.__context__.__context__, exc1)
self.assertIsNone(
exc.__context__.__context__.__context__.__context__)
def test_exit_exception_with_existing_context(self):
# Addresses a lack of test coverage discovered after checking in a
# fix for issue 20317 that still contained debugging code.
def raise_nested(inner_exc, outer_exc):
try:
raise inner_exc
finally:
raise outer_exc
exc1 = Exception(1)
exc2 = Exception(2)
exc3 = Exception(3)
exc4 = Exception(4)
exc5 = Exception(5)
try:
with self.exit_stack() as stack:
stack.callback(raise_nested, exc4, exc5)
stack.callback(raise_nested, exc2, exc3)
raise exc1
except Exception as exc:
self.assertIs(exc, exc5)
self.assertIs(exc.__context__, exc4)
self.assertIs(exc.__context__.__context__, exc3)
self.assertIs(exc.__context__.__context__.__context__, exc2)
self.assertIs(
exc.__context__.__context__.__context__.__context__, exc1)
self.assertIsNone(
exc.__context__.__context__.__context__.__context__.__context__)
def test_body_exception_suppress(self):
def suppress_exc(*exc_details):
return True
try:
with self.exit_stack() as stack:
stack.push(suppress_exc)
1/0
except IndexError as exc:
self.fail("Expected no exception, got IndexError")
def test_exit_exception_chaining_suppress(self):
with self.exit_stack() as stack:
stack.push(lambda *exc: True)
stack.push(lambda *exc: 1/0)
stack.push(lambda *exc: {}[1])
def test_excessive_nesting(self):
# The original implementation would die with RecursionError here
with self.exit_stack() as stack:
for i in range(10000):
stack.callback(int)
def test_instance_bypass(self):
class Example(object): pass
cm = Example()
cm.__exit__ = object()
stack = self.exit_stack()
self.assertRaises(AttributeError, stack.enter_context, cm)
stack.push(cm)
self.assertIs(stack._exit_callbacks[-1][1], cm)
def test_dont_reraise_RuntimeError(self):
# https://bugs.python.org/issue27122
class UniqueException(Exception): pass
class UniqueRuntimeError(RuntimeError): pass
@contextmanager
def second():
try:
yield 1
except Exception as exc:
raise UniqueException("new exception") from exc
@contextmanager
def first():
try:
yield 1
except Exception as exc:
raise exc
# The UniqueRuntimeError should be caught by second()'s exception
# handler which chain raised a new UniqueException.
with self.assertRaises(UniqueException) as err_ctx:
with self.exit_stack() as es_ctx:
es_ctx.enter_context(second())
es_ctx.enter_context(first())
raise UniqueRuntimeError("please no infinite loop.")
exc = err_ctx.exception
self.assertIsInstance(exc, UniqueException)
self.assertIsInstance(exc.__context__, UniqueRuntimeError)
self.assertIsNone(exc.__context__.__context__)
self.assertIsNone(exc.__context__.__cause__)
self.assertIs(exc.__cause__, exc.__context__)
class TestExitStack(TestBaseExitStack, unittest.TestCase):
exit_stack = ExitStack
class TestRedirectStream:
redirect_stream = None
orig_stream = None
@support.requires_docstrings
def test_instance_docs(self):
# Issue 19330: ensure context manager instances have good docstrings
cm_docstring = self.redirect_stream.__doc__
obj = self.redirect_stream(None)
self.assertEqual(obj.__doc__, cm_docstring)
def test_no_redirect_in_init(self):
orig_stdout = getattr(sys, self.orig_stream)
self.redirect_stream(None)
self.assertIs(getattr(sys, self.orig_stream), orig_stdout)
def test_redirect_to_string_io(self):
f = io.StringIO()
msg = "Consider an API like help(), which prints directly to stdout"
orig_stdout = getattr(sys, self.orig_stream)
with self.redirect_stream(f):
print(msg, file=getattr(sys, self.orig_stream))
self.assertIs(getattr(sys, self.orig_stream), orig_stdout)
s = f.getvalue().strip()
self.assertEqual(s, msg)
def test_enter_result_is_target(self):
f = io.StringIO()
with self.redirect_stream(f) as enter_result:
self.assertIs(enter_result, f)
def test_cm_is_reusable(self):
f = io.StringIO()
write_to_f = self.redirect_stream(f)
orig_stdout = getattr(sys, self.orig_stream)
with write_to_f:
print("Hello", end=" ", file=getattr(sys, self.orig_stream))
with write_to_f:
print("World!", file=getattr(sys, self.orig_stream))
self.assertIs(getattr(sys, self.orig_stream), orig_stdout)
s = f.getvalue()
self.assertEqual(s, "Hello World!\n")
def test_cm_is_reentrant(self):
f = io.StringIO()
write_to_f = self.redirect_stream(f)
orig_stdout = getattr(sys, self.orig_stream)
with write_to_f:
print("Hello", end=" ", file=getattr(sys, self.orig_stream))
with write_to_f:
print("World!", file=getattr(sys, self.orig_stream))
self.assertIs(getattr(sys, self.orig_stream), orig_stdout)
s = f.getvalue()
self.assertEqual(s, "Hello World!\n")
class TestRedirectStdout(TestRedirectStream, unittest.TestCase):
redirect_stream = redirect_stdout
orig_stream = "stdout"
class TestRedirectStderr(TestRedirectStream, unittest.TestCase):
redirect_stream = redirect_stderr
orig_stream = "stderr"
class TestSuppress(unittest.TestCase):
@support.requires_docstrings
def test_instance_docs(self):
# Issue 19330: ensure context manager instances have good docstrings
cm_docstring = suppress.__doc__
obj = suppress()
self.assertEqual(obj.__doc__, cm_docstring)
def test_no_result_from_enter(self):
with suppress(ValueError) as enter_result:
self.assertIsNone(enter_result)
def test_no_exception(self):
with suppress(ValueError):
self.assertEqual(pow(2, 5), 32)
def test_exact_exception(self):
with suppress(TypeError):
len(5)
def test_exception_hierarchy(self):
with suppress(LookupError):
'Hello'[50]
def test_other_exception(self):
with self.assertRaises(ZeroDivisionError):
with suppress(TypeError):
1/0
def test_no_args(self):
with self.assertRaises(ZeroDivisionError):
with suppress():
1/0
def test_multiple_exception_args(self):
with suppress(ZeroDivisionError, TypeError):
1/0
with suppress(ZeroDivisionError, TypeError):
len(5)
def test_cm_is_reentrant(self):
ignore_exceptions = suppress(Exception)
with ignore_exceptions:
pass
with ignore_exceptions:
len(5)
with ignore_exceptions:
with ignore_exceptions: # Check nested usage
len(5)
outer_continued = True
1/0
self.assertTrue(outer_continued)
if __name__ == "__main__":
unittest.main()
| 31.455146 | 80 | 0.576957 |
9dc7f10269fd139752b5183078e9f7461746cc4a | 30,214 | py | Python | data_gen/test3.py | UILXELA/Cooperative-3D-Object-Detection-Using-Shared-Raw-LIDAR-Data | 84b3c792fcea5c618737855cd0d65c7b7b6e16f6 | [
"MIT"
] | 6 | 2021-03-04T06:16:55.000Z | 2022-01-11T07:12:16.000Z | data_gen/test3.py | UILXELA/Cooperative-3D-Object-Detection-Using-Shared-Raw-LIDAR-Data | 84b3c792fcea5c618737855cd0d65c7b7b6e16f6 | [
"MIT"
] | null | null | null | data_gen/test3.py | UILXELA/Cooperative-3D-Object-Detection-Using-Shared-Raw-LIDAR-Data | 84b3c792fcea5c618737855cd0d65c7b7b6e16f6 | [
"MIT"
] | 2 | 2021-04-07T01:43:19.000Z | 2021-12-06T14:47:36.000Z | #!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
import math
import glob
import os
import sys
import argparse
import logging
import random
from constants import *
try:
sys.path.append(glob.glob('/home/alex/thesis_ssd/CARLA_nightly/PythonAPI/carla/dist/carla-*%d.%d-%s.egg' % (
sys.version_info.major,
sys.version_info.minor,
'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])
except IndexError:
pass
import carla
import random
try:
import pygame
except ImportError:
raise RuntimeError('cannot import pygame, make sure pygame package is installed')
try:
import numpy as np
except ImportError:
raise RuntimeError('cannot import numpy, make sure numpy package is installed')
try:
import queue
except ImportError:
import Queue as queue
from utils import vector3d_to_array, degrees_to_radians
from datadescriptor import KittiDescriptor
from dataexport import *
from bbox import create_kitti_datapoint
#from carla_utils import KeyboardHelper, MeasurementsDisplayHelper
from constants import *
import lidar_utils # from lidar_utils import project_point_cloud
import time
from math import cos, sin, ceil
from image_converter import *
class CarlaSyncMode(object):
"""
Context manager to synchronize output from different sensors. Synchronous
mode is enabled as long as we are inside this context
with CarlaSyncMode(world, sensors) as sync_mode:
while True:
data = sync_mode.tick(timeout=1.0)
"""
def __init__(self, world, *sensors, **kwargs):
self.world = world
self.sensors = sensors
self.frame = None
self.delta_seconds = 1.0 / kwargs.get('fps', 10)
self._queues = []
self._settings = None
def __enter__(self):
self._settings = self.world.get_settings()
self.frame = self.world.apply_settings(carla.WorldSettings(
no_rendering_mode=False,
synchronous_mode=True,
fixed_delta_seconds=self.delta_seconds))
def make_queue(register_event):
q = queue.Queue()
register_event(q.put)
self._queues.append(q)
make_queue(self.world.on_tick)
for sensor in self.sensors:
make_queue(sensor.listen)
return self
def tick(self, timeout):
#self.frame = self.world.tick()
world_snapshot = self.world.wait_for_tick()
self.frame=world_snapshot.frame
data = [self._retrieve_data(q, timeout) for q in self._queues]
assert all(x.frame == self.frame for x in data)
return data
def __exit__(self, *args, **kwargs):
self.world.apply_settings(self._settings)
def _retrieve_data(self, sensor_queue, timeout):
while True:
data = sensor_queue.get(timeout=timeout)
if data.frame == self.frame:
return data
PHASE = "training"
OUTPUT_FOLDER = os.path.join("/home/alex/thesis_ssd/CARLA_nightly/test_test/_out3", PHASE)
folders = ['calib', 'image_2', 'label_2', 'velodyne', 'planes', 'locational']
def maybe_create_dir(path):
if not os.path.exists(directory):
os.makedirs(directory)
for folder in folders:
directory = os.path.join(OUTPUT_FOLDER, folder)
maybe_create_dir(directory)
""" DATA SAVE PATHS """
GROUNDPLANE_PATH = os.path.join(OUTPUT_FOLDER, 'planes/{0:06}.txt')
LIDAR_PATH = os.path.join(OUTPUT_FOLDER, 'velodyne/{0:06}.bin')
LABEL_PATH = os.path.join(OUTPUT_FOLDER, 'label_2/{0:06}.txt')
IMAGE_PATH = os.path.join(OUTPUT_FOLDER, 'image_2/{0:06}.png')
CALIBRATION_PATH = os.path.join(OUTPUT_FOLDER, 'calib/{0:06}.txt')
LOCATIONAL_PATH = os.path.join(OUTPUT_FOLDER, 'locational/{0:06}.txt')
def draw_image(surface, image, blend=False):
#print(image.shape)
array = np.frombuffer(image, dtype=np.dtype("uint8"))
array = np.reshape(array, (WINDOW_HEIGHT, WINDOW_WIDTH, 3))
image_surface = pygame.surfarray.make_surface(array.swapaxes(0, 1))
if blend:
image_surface.set_alpha(100)
surface.blit(image_surface, (0, 0))
def get_font():
fonts = [x for x in pygame.font.get_fonts()]
default_font = 'ubuntumono'
font = default_font if default_font in fonts else fonts[0]
font = pygame.font.match_font(font)
return pygame.font.Font(font, 14)
def should_quit():
for event in pygame.event.get():
if event.type == pygame.QUIT:
return True
elif event.type == pygame.KEYUP:
if event.key == pygame.K_ESCAPE:
return True
return False
def sensor_setting(world):
"""Make a CarlaSettings object with the settings we need.
settings = CarlaSettings()
settings.set(
SynchronousMode=False,
SendNonPlayerAgentsInfo=True,
NumberOfVehicles=NUM_VEHICLES,
NumberOfPedestrians=NUM_PEDESTRIANS,
WeatherId=random.choice([1, 3, 7, 8, 14]),
QualityLevel=args.quality_level)
settings.randomize_seeds()
"""
cam_rgb_bp = world.get_blueprint_library().find('sensor.camera.rgb')
# Modify the attributes of the cam_rgb_bp to set image resolution and field of view.
cam_rgb_bp.set_attribute('image_size_x', str(WINDOW_WIDTH))
cam_rgb_bp.set_attribute('image_size_y', str(WINDOW_HEIGHT))
cam_rgb_bp.set_attribute('fov', '90.0')
# Set the time in seconds between sensor captures
#print cam_rgb_bp.get_attribute('sensor_tick')
#print "*)()()()()()()()()()()()"
#cam_rgb_bp.set_attribute('sensor_tick', '1.0')
# Provide the position of the sensor relative to the vehicle.
rgb_transform = carla.Transform(carla.Location(x=0, y=0, z=CAMERA_HEIGHT_POS),carla.Rotation(yaw=0,pitch=0))
cam_depth_bp = world.get_blueprint_library().find('sensor.camera.depth')
# Modify the attributes of the cam_depth_bp to set image resolution and field of view.
cam_depth_bp.set_attribute('image_size_x', str(WINDOW_WIDTH))
cam_depth_bp.set_attribute('image_size_y', str(WINDOW_HEIGHT))
cam_depth_bp.set_attribute('fov', '90.0')
# Set the time in seconCarlaSyncModeCarlaSyncModeds between sensor captures
#cam_depth_bp.set_attribute('sensor_tick', '1.0')
# Provide the position of the sensor relative to the vehicle.
depth_transform = carla.Transform(carla.Location(x=0, y=0, z=CAMERA_HEIGHT_POS),carla.Rotation(yaw=0,pitch=0))
lidar_bp = world.get_blueprint_library().find('sensor.lidar.ray_cast')
lidar_bp.set_attribute('channels', '40')
lidar_bp.set_attribute('range', str(MAX_RENDER_DEPTH_IN_METERS*100)) #cm to m
lidar_bp.set_attribute('points_per_second', '720000')
lidar_bp.set_attribute('rotation_frequency', '10.0')
lidar_bp.set_attribute('upper_fov', '7')
lidar_bp.set_attribute('lower_fov', '-16')
#lidar_bp.set_attribute('sensor_tick', '0.0')
lidar_transform = carla.Transform(carla.Location(x=0, y=0, z=LIDAR_HEIGHT_POS),carla.Rotation(yaw=0,pitch=0))
# (Intrinsic) K Matrix
# | f 0 Cu
# | 0 f Cv
# | 0 0 1
# (Cu, Cv) is center of image
k = np.identity(3)
k[0, 2] = WINDOW_WIDTH_HALF
k[1, 2] = WINDOW_HEIGHT_HALF
f = WINDOW_WIDTH / \
(2.0 * math.tan(90.0 * math.pi / 360.0))
k[0, 0] = k[1, 1] = f
camera_to_car_transform = get_matrix(rgb_transform)
to_unreal_transform = get_matrix(carla.Transform(carla.Location(x=0, y=0, z=0),carla.Rotation(yaw=90,roll=-90)), -1.0,1.0,1.0)
camera_to_car_transform = np.dot(camera_to_car_transform,to_unreal_transform)
lidar_to_car_transform = get_matrix(lidar_transform)
to_unreal_transform2 = get_matrix(carla.Transform(carla.Location(x=0, y=0, z=0),carla.Rotation(yaw=90)), 1.0,1.0,-1.0)
lidar_to_car_transform = np.dot(lidar_to_car_transform,to_unreal_transform2)
#returning as matrices
return k, cam_rgb_bp, cam_depth_bp, lidar_bp, camera_to_car_transform, lidar_to_car_transform, rgb_transform, depth_transform, lidar_transform
def get_matrix(transform,sc_x=1.0, sc_y=1.0,sc_z=1.0):
"""
Creates matrix from carla transform.
"""
rotation = transform.rotation
location = transform.location
c_y = np.cos(np.radians(rotation.yaw))
s_y = np.sin(np.radians(rotation.yaw))
c_r = np.cos(np.radians(rotation.roll))
s_r = np.sin(np.radians(rotation.roll))
c_p = np.cos(np.radians(rotation.pitch))
s_p = np.sin(np.radians(rotation.pitch))
matrix = np.matrix(np.identity(4))
matrix[0, 3] = location.x
matrix[1, 3] = location.y
matrix[2, 3] = location.z
matrix[0, 0] = sc_x*c_p * c_y
matrix[0, 1] = sc_y*(c_y * s_p * s_r - s_y * c_r)
matrix[0, 2] = -sc_z*(c_y * s_p * c_r + s_y * s_r)
matrix[1, 0] = sc_x*s_y * c_p
matrix[1, 1] = sc_y*(s_y * s_p * s_r + c_y * c_r)
matrix[1, 2] = sc_z*(-s_y * s_p * c_r + c_y * s_r)
matrix[2, 0] = sc_x*s_p
matrix[2, 1] = -sc_y*(c_p * s_r)
matrix[2, 2] = sc_z*(c_p * c_r)
return matrix
def transform_points(points, txm_mat):
"""
Given a 4x4 transformation matrix, transform an array of 3D points.
Expected point foramt: [[X0,Y0,Z0],..[Xn,Yn,Zn]]
"""
# Needed foramt: [[X0,..Xn],[Z0,..Zn],[Z0,..Zn]]. So let's transpose
# the point matrix.
points = points.transpose()
# Add 0s row: [[X0..,Xn],[Y0..,Yn],[Z0..,Zn],[0,..0]]
points = np.append(points, np.ones((1, points.shape[1])), axis=0)
# Point transformation
points = txm_mat * points
# Return all but last row
return points[0:3].transpose()
def generate_datapoints(world,image, intrinsic, extrinsic, depth_image,player, agents, gen_time):
""" Returns a list of datapoints (labels cv2cv2cv2and such) that are generated this frame together with the main image image """
datapoints = []
image = image.copy()
# Stores all datapoints for the current frames
#print(agents, flush=True)
if image is not None and gen_time:
#for agent_id in agents:
# agent=world.get_actor(agent_id)
for agent in world.get_actors():
if "vehicle" not in agent.type_id:
continue
#if should_detect_class(agent) and GEN_DATA:
if True:
#print("asdsadsadsadsadsadsad", flush=True)
image, kitti_datapoint = create_kitti_datapoint(
agent, intrinsic, extrinsic, image, depth_image, player)
if kitti_datapoint:
datapoints.append(kitti_datapoint)
if image is not None and datapoints is not None:
return image, datapoints
else:
logging.debug(
"Datapoints or Image is None during gen time")
else:
if image is not None:
return image, datapoints
else:
logging.debug(
"Datapoints or Image is None")
def processing(world,image_rgb, image_depth, image_lidar, intrinsic, player,agents,camera_to_car_transform,lidar_to_car_transform,gen_time):
if image_rgb is not None and image_depth is not None:
# Convert main image
image = to_rgb_array(image_rgb)
extrinsic=get_matrix(player.get_transform())*camera_to_car_transform
# Retrieve and draw datapoints
#IMAGE IS AN ARRAY!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
image, datapoints = generate_datapoints(world,image, intrinsic, extrinsic, image_depth, player, agents,gen_time)
#Lidar signal processing
# Calculation to shift bboxes relative to pitch and roll of player
rotation = player.get_transform().rotation
pitch, roll, yaw = rotation.pitch, rotation.roll, rotation.yaw
# Since measurements are in degrees, convert to radians
pitch = degrees_to_radians(pitch)
roll = degrees_to_radians(roll)
yaw = degrees_to_radians(yaw)
#print('pitch: ', pitch)
#print('roll: ', roll)
#print('yaw: ', yaw)
# Rotation matrix for pitch
rotP = np.array([[cos(pitch), 0, sin(pitch)],
[0, 1, 0],
[-sin(pitch), 0, cos(pitch)]])
# Rotation matrix for roll
rotR = np.array([[1, 0, 0],
[0, cos(roll), -sin(roll)],
[0, sin(roll), cos(roll)]])
# combined rotation matrix, must be in order roll, pitch, yaw
rotRP = np.matmul(rotR, rotP)
# Take the points from the point cloud and transform to car space
pc_arr_cpy=np.frombuffer(image_lidar.raw_data, dtype=np.dtype('f4'))
#print(pc_arr.shape)
pc_arr=np.reshape(pc_arr_cpy,(int(pc_arr_cpy.shape[0]/4),4))[:,:3].copy()
#print(pc_arr.shape)
pc_arr[:,[0,2]]=-pc_arr[:,[0,2]]
pc_arr[:,[0,1]]=pc_arr[:,[1,0]]
#print(pc_arr.shape)
point_cloud = np.array(transform_points(
pc_arr,lidar_to_car_transform))
#print(lidar_to_car_transform)
point_cloud[:, 2] -= LIDAR_HEIGHT_POS
#print(point_cloud.shape)
point_cloud = np.matmul(rotRP, point_cloud.T).T
# print(self._lidar_to_car_transform.matrix)
# print(self._camera_to_car_transform.matrix)
# Draw lidar
# Camera coordinate system is left, up, forwards
if VISUALIZE_LIDAR:
# Transform to camera space by the inverse of camera_to_car transform
point_cloud_cam = transform_points(point_cloud,np.linalg.inv(camera_to_car_transform))
point_cloud_cam[:, 1] += LIDAR_HEIGHT_POS
image = lidar_utils.project_point_cloud(
image, point_cloud_cam, intrinsic, 1)
#determine whether to save data
#TO_DO!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
if image is not None:
return image, datapoints, point_cloud, extrinsic
else:
logging.debug(
"Image is None")
def save_training_files(player, image, datapoints, point_cloud,intrinsic,extrinsic,captured_frame_no):
logging.warning("Test3 Attempting to save frame no: {}".format(
captured_frame_no))
groundplane_fname = GROUNDPLANE_PATH.format(captured_frame_no)
lidar_fname = LIDAR_PATH.format(captured_frame_no)
kitti_fname = LABEL_PATH.format(captured_frame_no)
img_fname = IMAGE_PATH.format(captured_frame_no)
calib_filename = CALIBRATION_PATH.format(captured_frame_no)
loc_filename = LOCATIONAL_PATH.format(captured_frame_no)
save_groundplanes(
groundplane_fname, player, LIDAR_HEIGHT_POS)
save_ref_files(OUTPUT_FOLDER, captured_frame_no)
save_image_data(
img_fname, to_rgb_array(image))
save_kitti_data(kitti_fname, datapoints)
save_lidar_data(lidar_fname, point_cloud,
LIDAR_HEIGHT_POS, LIDAR_DATA_FORMAT)
save_calibration_matrices(
calib_filename, intrinsic, extrinsic)
save_locational(loc_filename,player)
def should_detect_class(agent):
""" Returns true if the agent is of the classes that we want to detect.
Note that Carla has class types in lowercase """
return True in [class_type in agent.type_id for class_type in CLASSES_TO_LABEL]
def current_captured_frame_num():
# Figures out which frame number we currently are on
# This is run once, when we start the simulator in case we already have a dataset.
# The user can then choose to overwrite or append to the dataset.
label_path = os.path.join(OUTPUT_FOLDER, 'label_2/')
print(os.path.abspath(label_path))
num_existing_data_files = len(
[name for name in os.listdir(label_path) if name.endswith('.txt')])
print(num_existing_data_files)
if num_existing_data_files == 0:
return 0
#answer = input(
# "There already exists a dataset in {}. Would you like to (O)verwrite or (A)ppend the dataset? (O/A)".format(OUTPUT_FOLDER))
answer = "A"
print("There already exists a dataset in {}. Would you like to (O)verwrite or (A)ppend the dataset? (O/A)".format(OUTPUT_FOLDER))
if answer.upper() == "O":
logging.info(
"Resetting frame number to 0 and overwriting existing")
# Overwrite the data
return 0
logging.info("Continuing recording data on frame number {}".format(
num_existing_data_files))
return num_existing_data_files
def pause_time(prev_save,client):
now=ceil(time.time())
if (now-prev_save)>=SAVE_GAP-1:
vehicles = [x for x in client.get_world().get_actors().filter('vehicle.*')]
batch = [carla.command.SetAutopilot(x.id, False) for x in vehicles]
batch+=[carla.command.ApplyVelocity(x.id, carla.Vector3D(0)) for x in vehicles]
client.apply_batch(batch)
print("Freezed all cars at " + (time.asctime( time.localtime(now ))))
return
def restore_ap(client):
vehicles = [x for x in client.get_world().get_actors().filter('vehicle.*')]
batch = [carla.command.SetAutopilot(x.id, True) for x in vehicles]
client.apply_batch(batch)
print("AP restored")
return
def saving_time(prev_time):
now=ceil(time.time())
if now % SAVE_GAP == 0 and now != prev_time:
print(now)
print(prev_time)
print("Test3 Saving data at " + (time.asctime( time.localtime(now ))))
return now
else:
return None
def main():
argparser = argparse.ArgumentParser(
description=__doc__)
argparser.add_argument(
'--host',
metavar='H',
default='127.0.0.1',
help='IP of the host server (default: 127.0.0.1)')
argparser.add_argument(
'-p', '--port',
metavar='P',
default=2000,
type=int,
help='TCP port to listen to (default: 2000)')
argparser.add_argument(
'-n', '--number-of-vehicles',
metavar='N',
default=10,
type=int,
help='number of vehicles (default: 10)')
argparser.add_argument(
'-w', '--number-of-walkers',
metavar='W',
default=50,
type=int,
help='number of walkers (default: 50)')
argparser.add_argument(
'--safe',
action='store_true',
help='avoid spawning vehicles prone to accidents')
argparser.add_argument(
'--filterv',
metavar='PATTERN',
default='vehicle.audi.*',
help='vehicles filter (default: "vehicle.audi.*")')
argparser.add_argument(
'--filterw',
metavar='PATTERN',
default='walker.pedestrian.*',
help='pedestrians filter (default: "walker.pedestrian.*")')
args = argparser.parse_args()
prev_time=0
actor_list = []
pygame.init()
display = pygame.display.set_mode(
(WINDOW_WIDTH, WINDOW_HEIGHT),
pygame.HWSURFACE | pygame.DOUBLEBUF)
font = get_font()
clock = pygame.time.Clock()
client = carla.Client('localhost', 2000)
timeout=5.0
client.set_timeout(timeout)
world = client.get_world()
#world = client.load_world('Town03')
try:
m = world.get_map()
#start_pose = random.choice(m.get_spawn_points())
start_pose = carla.Transform(carla.Location(x=-97, y=-0.4, z=0))
#start_pose = carla.Transform(carla.Location(x=-65, y=-3, z=0))
#start_pose = carla.Transform(carla.Location(x=-97, y=-0.4, z=0))
#start_pose = carla.Transform(carla.Location(x=-85, y=-11, z=0))
waypoint = m.get_waypoint(start_pose.location)
blueprint_library = world.get_blueprint_library()
'''
for tl in world.get_actors().filter('traffic.traffic_light*'):
tl.set_red_time(15)
'''
agents=[]
vehicles_list = []
walkers_list = []
all_id = []
agents=[]
#random.choice(blueprint_library.filter('vehicle.bmw.*')),
player_bp=blueprint_library.find('vehicle.tesla.model3')
intrinsic, cam_rgb_bp, cam_depth_bp, lidar_bp, camera_to_car_transform, lidar_to_car_transform, rgb_transform, depth_transform, lidar_transform = sensor_setting(world)
vehicle = world.spawn_actor(
cam_rgb_bp,
start_pose)
#print ("******************#########")
#car_transform = carla.Transform(carla.Location(x=9, y=-120))
#vehicle = world.spawn_actor(random.choice(blueprint_library.filter('vehicle.*')), car_transform, attach_to=None)
vehicle.set_transform(waypoint.transform)
#txm=carla.Transform(carla.Location(x=-5.5, z=5), carla.Rotation(pitch=-15))
#vehicle.set_transform(txm)
#location=vehicle.get_location()
#location.z+=3
#vehicle.set_location(location)
actor_list.append(vehicle)
print(start_pose)
#vehicle.set_simulate_physics(True)
#vehicle.set_autopilot(True)
camera_rgb = world.spawn_actor(
cam_rgb_bp,
rgb_transform,
attach_to=vehicle)
actor_list.append(camera_rgb)
camera_depth = world.spawn_actor(
cam_depth_bp,
depth_transform,
attach_to=vehicle)
actor_list.append(camera_depth)
lidar = world.spawn_actor(
lidar_bp,
lidar_transform,
attach_to=vehicle)
actor_list.append(lidar)
#camera_semseg = world.spawn_actor(
# blueprint_library.find('sensor.camera.semantic_segmentation'),
# carla.Transform(carla.Location(x=-5.5, z=2.8), carla.Rotation(pitch=-15)),
# attach_to=vehicle)
#actor_list.append(camera_semseg)
# --------------
#Spawning NPCs
# --------------
spawn_points = m.get_spawn_points()
number_of_spawn_points = len(spawn_points)
if args.number_of_vehicles < number_of_spawn_points:
random.shuffle(spawn_points)
elif args.number_of_vehicles > number_of_spawn_points:
msg = 'requested %d vehicles, but could only find %d spawn points'
logging.warning(msg, args.number_of_vehicles, number_of_spawn_points)
args.number_of_vehicles = number_of_spawn_points
# @todo cannot import these directly.
SpawnActor = carla.command.SpawnActor
SetAutopilot = carla.command.SetAutopilot
FutureActor = carla.command.FutureActor
# --------------
# Spawn vehicles
# -----------
blueprints = world.get_blueprint_library().filter(args.filterv)
batch = []
for n, transform in enumerate(spawn_points):
if n >= args.number_of_vehicles:
break
blueprint = random.choice(blueprints)
if blueprint.has_attribute('color'):
color = random.choice(blueprint.get_attribute('color').recommended_values)
blueprint.set_attribute('color', color)
if blueprint.has_attribute('driver_id'):
driver_id = random.choice(blueprint.get_attribute('driver_id').recommended_values)
blueprint.set_attribute('driver_id', driver_id)
blueprint.set_attribute('role_name', 'autopilot')
batch.append(SpawnActor(blueprint, transform).then(SetAutopilot(FutureActor, True)))
for response in client.apply_batch_sync(batch):
if response.error:
logging.error(response.error)
else:
vehicles_list.append(response.actor_id)
# -------------
# Spawn Walkers
# -------------
blueprintsWalkers = world.get_blueprint_library().filter(args.filterw)
# 1. take all the random locations to spawn
spawn_points = []
for i in range(args.number_of_walkers):
spawn_point = carla.Transform()
loc = world.get_random_location_from_navigation()
if (loc != None):
spawn_point.location = loc
spawn_points.append(spawn_point)
# 2. we spawn the walker object
batch = []
for spawn_point in spawn_points:
walker_bp = random.choice(blueprintsWalkers)
# set as not invencible
if walker_bp.has_attribute('is_invincible'):
walker_bp.set_attribute('is_invincible', 'false')
batch.append(SpawnActor(walker_bp, spawn_point))
results = client.apply_batch_sync(batch, True)
for i in range(len(results)):
if results[i].error:
logging.error(results[i].error)
else:
walkers_list.append({"id": results[i].actor_id})
# 3. we spawn the walker controller
batch = []
walker_controller_bp = world.get_blueprint_library().find('controller.ai.walker')
for i in range(len(walkers_list)):
batch.append(SpawnActor(walker_controller_bp, carla.Transform(), walkers_list[i]["id"]))
results = client.apply_batch_sync(batch, True)
for i in range(len(results)):
if results[i].error:
logging.error(results[i].error)
else:
walkers_list[i]["con"] = results[i].actor_id
# 4. we put altogether the walkers and controllers id to get the objects from their id
for i in range(len(walkers_list)):
all_id.append(walkers_list[i]["con"])
all_id.append(walkers_list[i]["id"])
all_actors = world.get_actors(all_id)
# wait for a tick to ensure client receives the last transform of the walkers we have just created
world.wait_for_tick()
# 5. initialize each controller and set target to walk to (list is [controler, actor, controller, actor ...])
for i in range(0, len(all_id), 2):
# start walker
all_actors[i].start()
# set walk to random point
all_actors[i].go_to_location(world.get_random_location_from_navigation())
# random max speed
all_actors[i].set_max_speed(1 + random.random()) # max speed between 1 and 2 (default is 1.4 m/s)
captured_frame_no=current_captured_frame_num()
print(lidar)
with CarlaSyncMode(world, lidar, camera_rgb, camera_depth, fps=10) as sync_mode:
#with CarlaSyncMode(world, camera_rgb, fps=30) as sync_mode:
while True:
if should_quit():
return
clock.tick()
#print(clock.get_rawtime())
#print(clock.get_fps())
# Advance the simulation and wait for the data.
#snapshot, image_rgb= sync_mode.tick(timeout=2.0)
snapshot, image_lidar, image_rgb, image_depth = sync_mode.tick(timeout=5.0)
pc_arr=np.asarray(image_lidar.raw_data)
#agents=world.get_actors(vehicles_list)
now=saving_time(prev_time)
image, datapoints, point_cloud, extrinsic = processing(world,image_rgb, image_depth, image_lidar, intrinsic, vehicle,vehicles_list,camera_to_car_transform,lidar_to_car_transform, (now is not None))
#image_lidar.save_to_disk('output/%06d.bin' % image_lidar.frame)
# Choose the next waypoint and update the car location.
#waypoint = random.choice(waypoint.next(1.5))
#vehicle.set_transform(waypoint.transform)
#vehicle.set_autopilot(True)
#print(vehicle.get_location())
#image_semseg.convert(carla.ColorConverter.CityScapesPalette)
fps = round(1.0 / snapshot.timestamp.delta_seconds)
pause_time(prev_time,client)
# Draw the display.
#draw_image(display, image)
#draw_image(display, image_semseg, blend=True)
#display.blit(
# font.render('% 5d FPS (real)' % clock.get_fps(), True, (255, 255, 255)),
# (8, 10))
#display.blit(
# font.render('% 5d FPS (simulated)' % fps, True, (255, 255, 255)),
# (8, 28))
#pygame.display.flip()
#Save Data
if now is not None and point_cloud.shape[0]>0 and datapoints is not None:
save_training_files(vehicle, image_rgb, datapoints, point_cloud,intrinsic,extrinsic,captured_frame_no)
captured_frame_no+=1
prev_time=now
restore_ap(client)
#if vehicle.is_at_traffic_light():
# traffic_light = vehicle.get_traffic_light()
# if traffic_light.get_state() == carla.TrafficLightState.Red:
# world.hud.notification("Traffic light changed! Good to go!")
# traffic_light.set_state(carla.TrafficLightState.Green)
# print("Traffic changed manually")
finally:
print('\ndestroying %d vehicles' % len(vehicles_list))
client.apply_batch([carla.command.DestroyActor(x) for x in vehicles_list])
# stop walker controllers (list is [controler, actor, controller, actor ...])
for i in range(0, len(all_id), 2):
all_actors[i].stop()
print('\ndestroying %d walkers' % len(walkers_list))
client.apply_batch([carla.command.DestroyActor(x) for x in all_id])
print('destroying actors.')
for actor in actor_list:
actor.destroy()
pygame.quit()
print('done.')
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('\nCancelled by user. Bye!')
| 38.39136 | 213 | 0.624611 |
b8ff61eaa94c563e39e5005924c400ab1d5b2ef3 | 485 | py | Python | models/__init__.py | Skhaki18/APS360 | 4a386fc8ee14802bcb5ef25eefc234160673896a | [
"MIT"
] | 1 | 2022-02-09T20:35:05.000Z | 2022-02-09T20:35:05.000Z | models/__init__.py | Skhaki18/APS360 | 4a386fc8ee14802bcb5ef25eefc234160673896a | [
"MIT"
] | null | null | null | models/__init__.py | Skhaki18/APS360 | 4a386fc8ee14802bcb5ef25eefc234160673896a | [
"MIT"
] | null | null | null | from models.pretrain import pretrainedResNet, pretrainedMobileNetLarge,pretrainedMobileLowRes, fine_tuningResNet, fine_tuningMobileNet
def get_model(model_type, **kwargs):
models = {
# 'ResNet': pretrainedResNet,
'resnet50': pretrainedResNet,
'mobile1': pretrainedMobileNetLarge,
'mobile2': pretrainedMobileLowRes,
'finetuneRN': fine_tuningResNet,
'finetuneMobile': fine_tuningMobileNet
}
return models[model_type](**kwargs) | 40.416667 | 134 | 0.725773 |
85ad17f6bbe7a36a8065e804cc6a6bd07f64a755 | 1,118 | py | Python | winlogbeat/tests/system/test_config.py | dneg/beats | d4817cdfd5b451093db8bfbf91d3493b84de23d0 | [
"Apache-2.0"
] | null | null | null | winlogbeat/tests/system/test_config.py | dneg/beats | d4817cdfd5b451093db8bfbf91d3493b84de23d0 | [
"Apache-2.0"
] | null | null | null | winlogbeat/tests/system/test_config.py | dneg/beats | d4817cdfd5b451093db8bfbf91d3493b84de23d0 | [
"Apache-2.0"
] | 1 | 2019-11-29T12:31:15.000Z | 2019-11-29T12:31:15.000Z | from winlogbeat import BaseTest
"""
Contains tests for config parsing.
"""
class Test(BaseTest):
def test_valid_config(self):
"""
With -configtest and an error in the configuration, it should
return a non-zero error code.
"""
self.render_config_template(
ignore_older="1h",
event_logs=[
{"name": "Application", "ignore_older": "48h"}
]
)
proc = self.start_beat(extra_args=["-configtest"])
exit_code = proc.wait()
assert exit_code == 0
def test_invalid_ignore_older(self):
"""
With -configtest and an error in the configuration, it should
return a non-zero error code.
"""
self.render_config_template(
ignore_older="1 hour",
event_logs=[
{"name": "Application"}
]
)
proc = self.start_beat(extra_args=["-configtest"])
exit_code = proc.wait()
assert exit_code == 1
assert self.log_contains(
"Invalid top level ignore_older value '1 hour'")
| 27.268293 | 69 | 0.559034 |
c4b8065b96b697f577a41ba80648f28d69efe6b7 | 13,843 | py | Python | tests/test_unit/test_detect_type.py | baldurmen/CleverCSV | a7c7c812f2dc220b8f45f3409daac6e933bc44a2 | [
"MIT"
] | 989 | 2019-02-22T12:14:17.000Z | 2022-03-28T01:33:20.000Z | tests/test_unit/test_detect_type.py | baldurmen/CleverCSV | a7c7c812f2dc220b8f45f3409daac6e933bc44a2 | [
"MIT"
] | 27 | 2019-12-22T00:09:28.000Z | 2022-03-30T22:45:50.000Z | tests/test_unit/test_detect_type.py | baldurmen/CleverCSV | a7c7c812f2dc220b8f45f3409daac6e933bc44a2 | [
"MIT"
] | 55 | 2019-10-22T13:09:53.000Z | 2022-01-03T04:28:26.000Z | # -*- coding: utf-8 -*-
"""
Unit tests for the type detection.
Author: Gertjan van den Burg
"""
import unittest
from clevercsv.detect_type import TypeDetector
from clevercsv.detect_type import type_score
from clevercsv.dialect import SimpleDialect
class TypeDetectorTestCase(unittest.TestCase):
def setUp(self):
self.td = TypeDetector()
# NUMBERS
def test_number(self):
yes_number = [
"1",
"2",
"34",
"56",
"123",
"789",
"132.",
"0.123",
"0.10800212",
"0.1231e-087",
"10.789e09",
"123.256e+08",
"0.1231E-087",
"10.789E09",
"123.256E+08",
"123,456,798.00",
"23,456.798",
"1,234.56",
"1,123.",
"1e5",
"1.23e5",
"-1",
"-2",
"-34",
"-56",
"-123",
"-789",
"-0.123",
"-0.10800212",
"-0.1231e-087",
"-10.789e09",
"-123.256e+08",
"-0.1231E-087",
"-10.789E09",
"-123.256E+08",
"-123,456,798.00",
"-23,456.798",
"-1,234.56",
"+1",
"+2",
"+34",
"+56",
"+123",
"+789",
"+0.123",
"+0.10800212",
"+0.1231e-087",
"+10.789e09",
"+123.256e+08",
"+0.1231E-087",
"+10.789E09",
"+123.256E+08",
"+123,456,798.00",
"+23,456.798",
"+1,234.56",
".707",
"-.707",
"50,000.123",
"1.000,123",
"37.e88",
"1.",
]
for num in yes_number:
with self.subTest(num=num):
self.assertTrue(self.td.is_number(num))
no_number = [
"0000.213654",
"123.465.798",
"0.5e0.5",
"1,23.45",
"12,34.56",
"+00003",
"0,132.6",
"1,",
"",
"E14000537",
"0e",
".",
",",
"+E3",
"1,",
]
for num in no_number:
with self.subTest(num=num):
self.assertFalse(self.td.is_number(num))
# DATES
def test_date(self):
yes_date = [
"031219",
"03122019",
"03-12-19",
"03-12-2019",
"03-5-19",
"03-5-2019",
"120319",
"12032019",
"12-03-19",
"02-03-2019",
"02-3-19",
"02-3-2019",
"19-12-3",
"19-12-03",
"19-2-3",
"19-2-03",
"8-21-19",
"8-21-2019",
"8-9-19",
"8-9-2019",
"7-12-19",
"7-12-2019",
"3-9-19",
"3-9-2019",
"191203",
"20191121",
"2019-12-3",
"2019-12-21",
"2019-3-9",
"2019-3-21",
"2019年11月21日",
"2019年11月1日",
"2019年3月21日",
"2019年3月1日",
"19年03月11日",
"19年03月1日",
"19年3月31日",
"19年3月1日",
"2019년11월21일",
"2019년11월1일",
"2019년3월21일",
"2019년3월1일",
"19년03월11일",
"19년03월1일",
"19년3월31일",
"19년3월1일",
]
for date in yes_date:
with self.subTest(date=date):
self.assertTrue(self.td.is_date(date))
no_date = [
"2018|01|02",
"30/07-88",
"12.01-99",
"5.024.2896",
"2512-012.1",
"12 01/2542",
]
for date in no_date:
with self.subTest(date=date):
self.assertFalse(self.td.is_date(date))
# DATETIME
def test_datetime(self):
yes_dt = ["2019-01-12T04:01:23Z", "2021-09-26T12:13:31+01:00"]
for dt in yes_dt:
with self.subTest(dt=dt):
self.assertTrue(self.td.is_datetime(dt))
no_date = []
for date in no_date:
with self.subTest(date=date):
self.assertFalse(self.td.is_datetime(dt))
# URLs
def test_url(self):
# Some cases copied from https://mathiasbynens.be/demo/url-regex
yes_url = [
"Cocoal.icio.us",
"Websquash.com",
"bbc.co.uk",
"ebay.com",
"en.wikipedia.com",
"ftp://foo.bar/baz",
"http://127.0.0.1",
"http://127.0.0.1/uoshostel/web/app_dev.php/assets/img/size2.jpg",
"http://1337.net",
"http://142.42.1.1/",
"http://142.42.1.1:8080/",
"http://223.255.255.254",
"http://a.b-c.de",
"http://code.google.com/events/#&product=browser",
"http://en.wikipedia.com",
"http://experiment.local/frameworks/symphony2/web/app_dev.php/admin/categories",
"http://foo.bar/?q=Test%20URL-encoded%20stuff",
"http://foo.com/(something)?after=parens",
"http://foo.com/blah_(wikipedia)#cite-1",
"http://foo.com/blah_(wikipedia)_blah#cite-1",
"http://foo.com/blah_blah",
"http://foo.com/blah_blah/",
"http://foo.com/blah_blah_(wikipedia)",
"http://foo.com/blah_blah_(wikipedia)_(again)",
"http://fridley-tigers.com",
"http://gertjan.dev",
"http://hi.fridley-tigers.com",
"http://j.mp",
"http://localhost/1234.html",
"http://localhost/Symfony/web/app_dev.php/index",
"http://localhost/pidev/WebSmartravel/web/app_dev.php/travel_admin/1/js/bootstrap.js",
"http://localhost/webSmartravel/web/app_dev.php/admin",
"http://mainhostel.localdev.com/app_dev.php/location",
"http://simplegreensmoothies.com/Recipes/kiwi-strawberry-twist",
"http://t.co/VSD0L81Yrt",
"http://t.co/VSD0L81Yrt.html",
"http://www.bbc.co.uk",
"http://www.beloithistoricalsociety.com/hanchett.htm",
"http://www.co-operativefood.co.uk/find-us/?q=UK&lat=52.451935&long=-1.887871&filters=Food&options=",
"http://www.deutsche-wein-strasse.de/Panorama/Flemlingen/flemlingen.htm",
"http://www.example.com/wpstyle/?p=364",
"http://www.google.com/url?q=http%3A%2F%2Fbit.ly%2F1sneR2w&sa=D&sntz=1&usg=AFQjCNGs2NJSTFm8Dzx-755C0K0_KDuiew",
"http://www.iceland.co.uk/",
"http://www.iceland.co.uk/store-finder/",
"http://www.iceland.co.uk/store-finder/?StoreFinderSearch=S45%209JE",
"http://www.marksandspencer.com/MSStoreDetailsView?SAPStoreId=2804",
"http://www.sainsburys.co.uk/sol/storelocator/storelocator_detail_view.jsp?storeId=4526&bmForm=store_details",
"http://www.stackoverflow.com/",
"https://en.wikipedia.com",
"https://gertjan.dev",
"https://google.com",
"https://localhost",
"https://www.example.com/foo/?bar=baz&inga=42&quux",
"test.example.com/~name",
"www.google.com",
"www.google.com/",
"www.menominee-nsn.gov/",
"http://arxiv.org/abs/arXiv:1908.03213",
"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3474301/",
"https://dl.acm.org/citation.cfm?id=3025626",
"https://openreview.net/forum?id=S1x4ghC9tQ",
"https://link.springer.com/article/10.1007/s10618-019-00631-5",
"http://proceedings.mlr.press/v48/zhangf16.html",
"https://papers.nips.cc/paper/7796-middle-out-decoding",
"http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.89.6548",
"http://localhost:81/test/web/app_dev.php/fr/intranet/admin/client/2",
]
for url in yes_url:
with self.subTest(url=url):
self.assertTrue(self.td.is_url(url))
no_url = [
"//",
"///",
"///a",
"//a",
":// should fail",
"a@b.com",
"ftps://foo.bar/",
"h://test",
"http:// shouldfail.com",
"http://",
"http://#",
"http://##",
"http://##/",
"http://-a.b.co",
"http://-error-.invalid/",
"http://.",
"http://..",
"http://../",
"http://.www.foo.bar./",
"http://.www.foo.bar/",
"http:///a",
"http://1.1.1.1.1",
"http://123.123.123",
"http://3628126748",
"http://?",
"http://??",
"http://??/",
"http://foo.bar/foo(bar)baz quux",
"http://foo.bar?q=Spaces should be encoded",
"http://www.foo.bar./",
"rdar://1234",
]
for url in no_url:
with self.subTest(url=url):
self.assertFalse(self.td.is_url(url))
# Unicode_alphanum
def test_unicode_alphanum(self):
# These tests are by no means inclusive and ought to be extended in the
# future.
yes_alphanum = ["this is a cell", "1231 pounds"]
for unicode_alphanum in yes_alphanum:
with self.subTest(unicode_alphanum=unicode_alphanum):
self.assertTrue(self.td.is_unicode_alphanum(unicode_alphanum))
self.assertTrue(
self.td.is_unicode_alphanum(
unicode_alphanum, is_quoted=True
)
)
no_alphanum = ["https://www.gertjan.dev"]
for unicode_alpanum in no_alphanum:
with self.subTest(unicode_alpanum=unicode_alpanum):
self.assertFalse(self.td.is_unicode_alphanum(unicode_alpanum))
self.assertFalse(
self.td.is_unicode_alphanum(
unicode_alpanum, is_quoted=True
)
)
only_quoted = ["this string, with a comma"]
for unicode_alpanum in only_quoted:
with self.subTest(unicode_alpanum=unicode_alpanum):
self.assertFalse(
self.td.is_unicode_alphanum(
unicode_alpanum,
)
)
self.assertTrue(
self.td.is_unicode_alphanum(
unicode_alpanum, is_quoted=True
)
)
def test_bytearray(self):
yes_bytearray = [
"bytearray(b'')",
"bytearray(b'abc,*&@\"')",
"bytearray(b'bytearray(b'')')",
]
no_bytearray = [
"bytearray(b'abc",
"bytearray(b'abc'",
"bytearray('abc')",
"abc,bytearray(b'def')",
]
for case in yes_bytearray:
with self.subTest(case=case):
self.assertTrue(self.td.is_bytearray(case))
for case in no_bytearray:
with self.subTest(case=case):
self.assertFalse(self.td.is_bytearray(case))
# Unix path
def test_unix_path(self):
yes_path = [
"/Users/person/abc/def-ghi/blabla.csv.test",
"/home/username/share/a/_b/c_d/e.py",
"/home/username/share",
"/home/username",
"/home/username/",
"~/share/",
"./share",
]
for path in yes_path:
with self.subTest(path=path):
self.assertTrue(self.td.is_unix_path(path))
no_path = ["", "~share", ".share"]
for path in no_path:
with self.subTest(path=path):
self.assertFalse(self.td.is_unix_path(path))
"""
Type Score tests
"""
def test_type_score_1(self):
# theta_1 from paper
cells = [
["7", "5; Mon", " Jan 12;6", "40"],
["100; Fri", " Mar 21;8", "23"],
["8", "2; Thu", " Sep 17; 2", "71"],
["538", "0;;7", "26"],
['"NA"; Wed', " Oct 4;6", "93"],
]
data = "\n".join([",".join(x) for x in cells])
dialect = SimpleDialect(delimiter=",", quotechar="", escapechar="")
out = type_score(data, dialect)
exp = 8 / 17
self.assertAlmostEqual(exp, out)
def test_type_score_2(self):
# theta_2 from paper
cells = [
["7,5", " Mon, Jan 12", "6,40"],
["100", " Fri, Mar 21", "8,23"],
["8,2", " Thu, Sep 17", "2,71"],
["538,0", "", "7,26"],
['"N/A"', " Wed, Oct 4", "6,93"],
]
data = "\r\n".join([";".join(x) for x in cells])
dialect = SimpleDialect(delimiter=";", quotechar="", escapechar="")
out = type_score(data, dialect)
exp = 10 / 15
self.assertAlmostEqual(exp, out)
def test_type_score_3(self):
# theta_3 from paper
cells = [
["7,5", " Mon, Jan 12", "6,40"],
["100", " Fri, Mar 21", "8,23"],
["8,2", " Thu, Sep 17", "2,71"],
["538,0", "", "7,26"],
["N/A", " Wed, Oct 4", "6,93"],
]
data = "\r".join([";".join(x) for x in cells])
dialect = SimpleDialect(delimiter=";", quotechar='"', escapechar="")
out = type_score(data, dialect)
exp = 11 / 15
self.assertAlmostEqual(exp, out)
if __name__ == "__main__":
unittest.main()
| 31.822989 | 123 | 0.453803 |
4a48723370220f5bab345a2e81cf021b7de575a7 | 1,408 | py | Python | data-hub-api/apps/cdms_api/tests/integration/customer_addresses/test_delete.py | uktrade/data-hub-api-old | 5ecf093d88692870982a638ced45de6a82d55672 | [
"MIT"
] | null | null | null | data-hub-api/apps/cdms_api/tests/integration/customer_addresses/test_delete.py | uktrade/data-hub-api-old | 5ecf093d88692870982a638ced45de6a82d55672 | [
"MIT"
] | 18 | 2016-04-04T12:42:45.000Z | 2016-09-01T07:21:05.000Z | data-hub-api/apps/cdms_api/tests/integration/customer_addresses/test_delete.py | uktrade/data-hub-api-old | 5ecf093d88692870982a638ced45de6a82d55672 | [
"MIT"
] | 1 | 2016-06-01T15:45:21.000Z | 2016-06-01T15:45:21.000Z | from ....exceptions import CDMSNotFoundException
from .existing_customer_address_test_case import ExistingCustomerAddressTestCase
class TestDelete(ExistingCustomerAddressTestCase):
def test_delete_cascade(self):
"""
Client delete on Account deletes 2 child CustomerAddresses
"""
self.client.delete('Account', self.guids['account'])
self.assertServiceCountEqual('CustomerAddress', 0)
with self.assertRaises(CDMSNotFoundException):
self.client.get('CustomerAddress', self.guids['address1'])
with self.assertRaises(CDMSNotFoundException):
self.client.get('CustomerAddress', self.guids['address2'])
def test_delete_stand_alone(self):
"""
Client deletes single CustomerAddress from Account, Address left None
"""
self.client.delete('CustomerAddress', self.guids['address1'])
self.assertServiceCountEqual('CustomerAddress', 1)
with self.assertRaises(CDMSNotFoundException):
self.client.get('CustomerAddress', self.guids['address1'])
self.client.get('CustomerAddress', self.guids['address2'])
account = self.client.get('Account', self.guids['account'])
self.assertIsNone(account['Address1_AddressId'])
self.assertIsNone(account['Address1_Line1'])
self.assertEqual(account['Address2_Line1'], 'Victoria St W & Federal St')
| 42.666667 | 81 | 0.698153 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.